diff --git a/.github/workflows/iam-users-terraform.yml b/.github/workflows/iam-users-terraform.yml new file mode 100644 index 0000000..202bcdf --- /dev/null +++ b/.github/workflows/iam-users-terraform.yml @@ -0,0 +1,284 @@ +name: IAM Users – Terraform + +on: + workflow_dispatch: {} + # inputs: + # send_emails: + # description: "Send SES emails after apply?" + # required: true + # type: choice + # options: + # - no + # - yes + + pull_request: + paths: + - 'terraform/iam-users/**' + + +jobs: + terraform: + runs-on: ubuntu-latest + permissions: + id-token: write # needed for OIDC + contents: write + + env: + AWS_REGION: us-east-1 + + steps: + - name: Checkout + uses: actions/checkout@v4 + + - name: Configure AWS credentials via OIDC + uses: aws-actions/configure-aws-credentials@v4 + with: + role-to-assume: arn:aws:iam::155729781479:role/github-user # or a GitHubTerraformRole if you create one + aws-region: ${{ env.AWS_REGION }} + + - name: Setup Terraform + uses: hashicorp/setup-terraform@v3 + with: + terraform_version: 1.8.0 + + - name: Terraform Init + working-directory: terraform/iam-users + run: terraform init + + - name: Terraform Plan + working-directory: terraform/iam-users + run: terraform plan -out=tfplan + + # Only apply on manual run or non-PR events (for example push to main or workflow_dispatch) + - name: Terraform Apply + if: github.event_name != 'pull_request' + working-directory: terraform/iam-users + run: terraform apply -auto-approve tfplan + + - name: Detect Terraform creates/deletes + id: plan_guard + working-directory: terraform/iam-users + shell: bash + run: | + set -euo pipefail + + # Assumes you already ran: terraform plan -out=tfplan + terraform show -json tfplan > tfplan.json + + # Look for resource change actions + has_creates=$(jq -r ' + any(.resource_changes[]?; (.change.actions | index("create")) != null) + ' tfplan.json) + + has_deletes=$(jq -r ' + any(.resource_changes[]?; (.change.actions | index("delete")) != null) + ' tfplan.json) + + echo "has_creates=$has_creates" + echo "has_deletes=$has_deletes" + + # Expose as GitHub Actions step outputs + echo "has_creates=$has_creates" >> "$GITHUB_OUTPUT" + echo "has_deletes=$has_deletes" >> "$GITHUB_OUTPUT" + + - name: Get user emails from Terraform outputs + if: github.event_name != 'pull_request' + id: tf_outputs + working-directory: terraform/iam-users + run: | + set -euo pipefail + emails_json=$(terraform output -json user_emails) + echo "emails_json=$emails_json" >> "$GITHUB_OUTPUT" + + # Send an email via SES to each user + - name: Send SES emails to users + if: github.event_name != 'pull_request' && steps.plan_guard.outputs.has_creates == 'true' + id: email_sent_id + working-directory: terraform/iam-users + env: + SES_FROM_ADDRESS: "info@cloudnestadvisory.com" + EMAILS_JSON: ${{ steps.tf_outputs.outputs.emails_json }} + email_sent: false + run: | + set -euo pipefail + + # ------------------------------------------------------------ + # 1) Sent-email log file: tracks which usernames we've already emailed + # so we do not send duplicates on future runs. + # ------------------------------------------------------------ + SENT_FILE="sent_emails.json" + + # Ensure sent log exists + if [ ! -f "$SENT_FILE" ]; then + echo "{}" > "$SENT_FILE" + fi + + # Load current sent state into a variable + + sent=$(cat "$SENT_FILE") + + echo "Current sent email log:" + echo "$sent" | jq . + + # ------------------------------------------------------------ + # 2) Confirm we received the emails map from Terraform outputs + # EMAILS_JSON should look like: {"vol-a":"a@x.com","vol-b":"b@x.com"} + # ------------------------------------------------------------ + + echo "EMAILS_JSON received:" + echo "$EMAILS_JSON" | jq . + + #Old Above Below ------ + # echo "$EMAILS_JSON" | jq -r 'to_entries[] | "\(.key) \(.value)"' | while read username email; do + # echo "Sending email to $email for user $username" + # Old Code Above ------- + + # ------------------------------------------------------------ + # 3) Loop through each (username, email) pair and only send for NEW users + # Use process substitution to avoid subshell issues from pipes. + # ------------------------------------------------------------ + echo "$EMAILS_JSON" | jq -r 'to_entries[] | "\(.key) \(.value)"' | while read -r username email; do + already_sent=$(echo "$sent" | jq -r --arg u "$username" 'has($u)') + + if [ "$already_sent" = "true" ]; then + echo "Skipping $username ($email) — already emailed" + continue + fi + + echo "Sending email to $email for user $username" + + # ------------------------------------------------------------ + # 4) Build SES message payload as JSON file to avoid CLI quoting issues + # ------------------------------------------------------------ + cat > ses-message.json < "$SENT_FILE" + + # ------------------------------------------------------------ + # 7) Final debug: show updated sent log so you can confirm it changed + # ------------------------------------------------------------ + echo "Updated sent email log (file):" + cat "$SENT_FILE" | jq . + + done + + email_sent=true + echo "email_sent=true" >> "$GITHUB_OUTPUT" + + - name: Cleanup sent email log for deleted users + if: github.event_name != 'pull_request' && steps.plan_guard.outputs.has_deletes == 'true' + id: cleanup_sent_log + working-directory: terraform/iam-users + env: + SES_FROM_ADDRESS: "info@cloudnestadvisory.com" + EMAILS_JSON: ${{ steps.tf_outputs.outputs.emails_json }} + email_sent: false + shell: bash + run: | + set -euo pipefail + + SENT_FILE="sent_emails.json" + + # Ensure log exists + if [ ! -f "$SENT_FILE" ]; then + echo "{}" > "$SENT_FILE" + fi + + # Ensure we have plan JSON available + terraform show -json tfplan > tfplan.json + + echo "Current sent email log:" + cat "$SENT_FILE" | jq . + + # Pull usernames being deleted from the plan (IAM users) + deleted_users=$(jq -r ' + [.resource_changes[]? + | select(.type=="aws_iam_user") + | select(.change.actions | index("delete")) + | .change.before.name + ] | unique | .[] + ' tfplan.json || true) + + if [ -z "${deleted_users:-}" ]; then + echo "No deleted IAM users found in plan. Nothing to clean." + exit 0 + fi + + echo "Usernames to remove from sent log:" + echo "$deleted_users" + + # Load current sent log into memory once, delete keys, write once + sent=$(cat "$SENT_FILE") + + while IFS= read -r username; do + [ -z "$username" ] && continue + echo "Removing $username from $SENT_FILE (if present)..." + sent=$(echo "$sent" | jq --arg u "$username" 'del(.[$u])') + done <<< "$deleted_users" + + echo "$sent" > "$SENT_FILE" + + echo "Updated sent email log (file):" + cat "$SENT_FILE" | jq . + + - name: Commit sent email log + #if: steps.email_sent_id.outputs.email_sent == 'true' + working-directory: terraform/iam-users + env: + email_sent_ses: ${{ steps.email_sent_id.outputs.email_sent }} + run: | + set -euo pipefail + echo "Emailis true or flase:$email_sent_ses" + + # ------------------------------------------------------------ + # 1) Identify this commit as coming from GitHub Actions + # ------------------------------------------------------------ + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + # ------------------------------------------------------------ + # 2) Stage the sent email log (this is the only file we care about) + # ------------------------------------------------------------ + git add sent_emails.json + + # ------------------------------------------------------------ + # 3) If the staged file has NOT changed, exit cleanly + # This prevents empty commits when: + # - no emails were sent + # - users were removed + # - apply was a no-op + # ------------------------------------------------------------ + if git diff --cached --quiet; then + echo "sent_emails.json unchanged. Nothing to commit." + exit 0 + fi + + # ------------------------------------------------------------ + # 4) Commit and push the updated sent email log + # ------------------------------------------------------------ + git commit -m "chore: record sent SES emails" + git push \ No newline at end of file diff --git a/.github/workflows/team-membership-audit.yml b/.github/workflows/team-membership-audit.yml new file mode 100644 index 0000000..a335861 --- /dev/null +++ b/.github/workflows/team-membership-audit.yml @@ -0,0 +1,412 @@ +name: Team Membership Audit (org-wide) + +on: + workflow_dispatch: + inputs: + org: + description: "Org to scan (defaults to current repo owner)" + required: false + min_teams: + description: "Minimum number of teams each user must be in" + required: false + default: "1" + required_team_regex: + description: "Optional regex; at least one team must match (e.g., ^(Contributors|Maintainers)$)" + required: false + default: "" + include_outside_collaborators: + description: "Also audit outside collaborators" + required: false + default: "true" + skip_org_admins: + description: "Skip users with org admin/owner role" + required: false + default: "true" + exclude_users_csv: + description: "Comma-separated logins to exclude (e.g., bot-1,octocat)" + required: false + default: "" + +permissions: + contents: read + +jobs: + audit: + runs-on: ubuntu-latest + env: + GH_TOKEN: ${{ secrets.ORG_READ_TOKEN || secrets.GITHUB_TOKEN }} + ORG_INPUT: ${{ github.event.inputs.org }} + MIN_TEAMS: ${{ github.event.inputs.min_teams }} + REQUIRED_TEAM_REGEX: ${{ github.event.inputs.required_team_regex }} + INCLUDE_OUTSIDERS: ${{ github.event.inputs.include_outside_collaborators }} + SKIP_ADMINS: ${{ github.event.inputs.skip_org_admins }} + EXCLUDE_USERS_CSV: ${{ github.event.inputs.exclude_users_csv }} + + steps: + - name: Resolve org + id: ctx + run: | + if [ -n "$ORG_INPUT" ]; then ORG="$ORG_INPUT"; else ORG="${{ github.repository_owner }}"; fi + echo "org=$ORG" >> $GITHUB_OUTPUT + echo "Scanning org: $ORG" + + ### DEBUG CODE ----- + - name: Preflight dEBUG + env: + ORG: ${{ steps.ctx.outputs.org }} + TOKEN: ${{ env.GH_TOKEN }} + run: | + set -euo pipefail + + echo "==> 1) Who am I?" + curl -sS -D /tmp/h1 -H "Authorization: Bearer $TOKEN" https://api.github.com/user -o /tmp/user.json || true + echo "Status: $(head -n1 /tmp/h1)" + echo "x-oauth-scopes: $(grep -i '^x-oauth-scopes:' /tmp/h1 | sed 's/x-oauth-scopes: //I' || true)" + echo "X-GitHub-SSO: $(grep -i '^x-github-sso:' /tmp/h1 | sed 's/x-github-sso: //I' || true)" + jq -r '"login=\(.login) | type=\(.type) | id=\(.id)"' /tmp/user.json 2>/dev/null || cat /tmp/user.json + + echo "" + echo "==> 2) My role in org '$ORG' (admin/member)?" + curl -sS -D /tmp/h2 -H "Authorization: Bearer $TOKEN" "https://api.github.com/orgs/$ORG/memberships/$(jq -r .login /tmp/user.json)" -o /tmp/membership.json || true + echo "Status: $(head -n1 /tmp/h2)" + jq -r '.state as $s | .role as $r | "state=\($s) | role=\($r)"' /tmp/membership.json 2>/dev/null || cat /tmp/membership.json + + echo "" + echo "==> 3) Can I list org members (first 5)?" + curl -sS -D /tmp/h3 -H "Authorization: Bearer $TOKEN" "https://api.github.com/orgs/$ORG/members?per_page=5" -o /tmp/members.json || true + echo "Status: $(head -n1 /tmp/h3)" + echo "SSO header (if 403): $(grep -i '^x-github-sso:' /tmp/h3 | sed 's/x-github-sso: //I' || true)" + jq -r 'if type=="array" then (length|tostring)+" users" else . end' /tmp/members.json 2>/dev/null || cat /tmp/members.json + echo "Logins:" + jq -r 'try .[].login | select(.)' /tmp/members.json 2>/dev/null | sed 's/^/ - /' || true + + echo "" + echo "==> 4) Can I list teams and team members (fallback source)?" + curl -sS -D /tmp/h4 -H "Authorization: Bearer $TOKEN" "https://api.github.com/orgs/$ORG/teams?per_page=100" -o /tmp/teams.json || true + echo "Teams status: $(head -n1 /tmp/h4)" + jq -r '(length|tostring)+" teams"' /tmp/teams.json 2>/dev/null || cat /tmp/teams.json + + # Pick the first team and try to list its members + FIRST_TEAM=$(jq -r '.[0].slug // empty' /tmp/teams.json) + if [ -n "$FIRST_TEAM" ]; then + echo "First team: $FIRST_TEAM — checking members (up to 10)" + curl -sS -D /tmp/h5 -H "Authorization: Bearer $TOKEN" "https://api.github.com/orgs/$ORG/teams/$FIRST_TEAM/members?per_page=10" -o /tmp/team_members.json || true + echo "Team members status: $(head -n1 /tmp/h5)" + jq -r 'if type=="array" then (length|tostring)+" users" else . end' /tmp/team_members.json 2>/dev/null || cat /tmp/team_members.json + echo "Team member logins:" + jq -r 'try .[].login | select(.)' /tmp/team_members.json 2>/dev/null | sed 's/^/ - /' || true + else + echo "No teams readable (or none exist)." + fi + + echo "" + echo "==> 5) Outside collaborators (optional; requires org owner + admin:org)" + curl -sS -D /tmp/h6 -H "Authorization: Bearer $TOKEN" "https://api.github.com/orgs/$ORG/outside_collaborators?per_page=5" -o /tmp/outsiders.json || true + echo "Status: $(head -n1 /tmp/h6)" + echo "SSO header (if 403): $(grep -i '^x-github-sso:' /tmp/h6 | sed 's/x-github-sso: //I' || true)" + jq -r 'if type=="array" then (length|tostring)+" outsiders" else . end' /tmp/outsiders.json 2>/dev/null || cat /tmp/outsiders.json + + + + + ###### + + - name: Install jq + run: sudo apt-get update && sudo apt-get install -y jq + + - name: Audit team membership (build JSON) + id: audit + uses: actions/github-script@v7 + env: + ORG: ${{ steps.ctx.outputs.org }} + with: + github-token: ${{ env.GH_TOKEN }} + script: | + const org = process.env.ORG; + const minTeams = parseInt(process.env.MIN_TEAMS || "1", 10); + const requiredRe = (process.env.REQUIRED_TEAM_REGEX || "").trim(); + let includeOutsiders = (process.env.INCLUDE_OUTSIDERS || "true").toLowerCase() === "true"; + const skipAdmins = (process.env.SKIP_ADMINS || "true").toLowerCase() === "true"; + const excludeSet = new Set( + (process.env.EXCLUDE_USERS_CSV || "").split(",").map(s => s.trim().toLowerCase()).filter(Boolean) + ); + const reqTeam = requiredRe ? new RegExp(requiredRe, "i") : null; + + async function p(route, params) { + return await github.paginate(route, { per_page: 100, ...params }); + } + + const members = await p("GET /orgs/{org}/members", { org }); + + let outsiders = []; + if (includeOutsiders) { + try { + outsiders = await p("GET /orgs/{org}/outside_collaborators", { org }); + } catch (e) { + core.warning(`Cannot list outside collaborators (${e.status || 'ERR'}): ${e.message}. Skipping outsiders.`); + includeOutsiders = false; + } + } + + const teams = await p("GET /orgs/{org}/teams", { org }); + const teamMembersMap = new Map(); + for (const t of teams) { + try { + const tm = await p("GET /orgs/{org}/teams/{team_slug}/members", { org, team_slug: t.slug }); + teamMembersMap.set(t.slug, new Set(tm.map(u => u.login.toLowerCase()))); + } catch (e) { + core.warning(`Cannot list members for team ${t.slug}: ${e.message}`); + teamMembersMap.set(t.slug, new Set()); + } + } + + const userTeamsMap = new Map(); + function addTeamToUser(login, team) { + const k = login.toLowerCase(); + const arr = userTeamsMap.get(k) || []; + arr.push({ slug: team.slug, name: team.name }); + userTeamsMap.set(k, arr); + } + for (const t of teams) { + for (const login of (teamMembersMap.get(t.slug) || new Set())) { + addTeamToUser(login, { slug: t.slug, name: t.name }); + } + } + + const roleCache = new Map(); + async function getRole(login) { + const k = login.toLowerCase(); + if (roleCache.has(k)) return roleCache.get(k); + try { + const { data } = await github.request("GET /orgs/{org}/memberships/{username}", { org, username: login }); + roleCache.set(k, data.role); + return data.role; + } catch { + roleCache.set(k, "member"); + return "member"; + } + } + + const population = [ + ...members.map(u => ({ login: u.login, type: "member" })), + ...(includeOutsiders ? outsiders.map(u => ({ login: u.login, type: "outside_collaborator" })) : []) + ]; + const seen = new Map(); + for (const p_ of population) { + const k = p_.login.toLowerCase(); + if (!seen.has(k)) seen.set(k, p_); + else if (seen.get(k).type !== "member" && p_.type === "member") seen.set(k, p_); + } + + const rows = []; + for (const { login, type } of seen.values()) { + if (excludeSet.has(login.toLowerCase())) continue; + + const role = await getRole(login); + if (skipAdmins && role === "admin") continue; + + const teamsForUser = userTeamsMap.get(login.toLowerCase()) || []; + const teamNames = teamsForUser.map(t => t.name); + const teamSlugs = teamsForUser.map(t => t.slug); + + const hasMinTeams = teamsForUser.length >= (isNaN(minTeams) ? 1 : minTeams); + const matchesRequired = reqTeam ? (teamNames.some(n => reqTeam.test(n)) || teamSlugs.some(s => reqTeam.test(s))) : true; + + const compliant = hasMinTeams && matchesRequired; + const notes = []; + if (!hasMinTeams) notes.push(`requires >=${minTeams} teams`); + if (!matchesRequired && reqTeam) notes.push(`requires team matching /${requiredRe}/`); + + rows.push({ + login, + type, + role, + team_count: teamsForUser.length, + teams: teamNames, + compliant, + notes: notes.join("; ") + }); + } + + rows.sort((a,b) => (a.compliant === b.compliant) ? a.login.localeCompare(b.login) : (a.compliant ? 1 : -1)); + + const out = { + org, + generated_at: new Date().toISOString().slice(0,10), + policy: { + min_teams: minTeams, + required_team_regex: requiredRe || null, + include_outside_collaborators: includeOutsiders, + skip_org_admins: (process.env.SKIP_ADMINS || "true").toLowerCase() === "true" + }, + results: rows + }; + + core.setOutput("json", JSON.stringify(out)); + + - name: Write JSON to file + run: | + echo '${{ steps.audit.outputs.json }}' > team_membership_audit.json + echo "Wrote team_membership_audit.json" + jq '.results | length as $n | "Total audited: \($n)"' team_membership_audit.json + ## DEBUG MODE HERE------ + # - name: Show sample of users read + # run: | + # echo "Total users found:" + # jq '.results | length' team_membership_audit.json + # echo "" + # echo "First 10 logins:" + # jq -r '.results[0:10] | .[].login' team_membership_audit.json + + + - name: Convert audit JSON -> CSV (jq) + run: | + jq -r ' + ["login","type","role","team_count","teams","compliant","notes"], + (.results[] | [ + .login, + .type, + .role, + .team_count, + (.teams // [] | join("; ")), + (if .compliant then "true" else "false" end), + (.notes // "") + ]) | @csv + ' team_membership_audit.json > team_membership_audit.csv + + - name: Build summary jq programs + run: | + cat > summary.jq <<'JQ' + def pct(a;b): if b==0 then 0 else ((a*100.0)/b) end; + + . as $root + | ($root.results // []) as $rows + | $rows | length as $total + | ($rows | map(select(.type=="member")) | length) as $members + | ($rows | map(select(.type=="outside_collaborator")) | length) as $outsiders + | ($rows | map(select(.compliant==true)) | length) as $ok + | ($total - $ok) as $bad + | ($rows | map(.team_count) | min // 0) as $min_tc + | ($rows | map(.team_count) | max // 0) as $max_tc + | ($rows | map(.team_count) | add // 0) as $sum_tc + | ($sum_tc / (if $total==0 then 1 else $total end)) as $avg_tc + | ( + ($rows | map(.team_count) | sort) as $sorted + | (if $total==0 then 0 + else ($sorted[($total-1)/2|floor] + $sorted[$total/2|floor]) / 2 + end) + ) as $median_tc + | ($rows + | map(select((.notes // "") | test("requires >="))) + | length + ) as $below_min + | ($rows + | map(select((.notes // "") | test("requires team matching"))) + | length + ) as $missing_required + | ($rows + | map(.teams // []) + | map(.[]) + | sort + | group_by(.) + | map({team: .[0], users: length}) + | sort_by(-.users) + ) as $per_team + | { + org: $root.org, + generated_at: $root.generated_at, + policy: (if $root.policy then $root.policy else {} end), + totals: { + audited: $total, + members: $members, + outside_collaborators: $outsiders + }, + compliance: { + compliant: $ok, + non_compliant: $bad, + pct_compliant: (pct($ok;$total) | floor) + }, + gaps: { + below_min_teams: $below_min, + missing_required_team: $missing_required + }, + team_count_stats: { + min: $min_tc, + max: $max_tc, + avg: ($avg_tc | tonumber | (. * 100 | floor) / 100), + median: $median_tc + }, + per_team_coverage: $per_team + } + JQ + + cat > summary_kv.jq <<'JQ' + [ + ["metric","value"], + ["org", .org], + ["generated_at", .generated_at], + ["audited", .totals.audited], + ["members", .totals.members], + ["outside_collab", .totals.outside_collaborators], + ["compliant", .compliance.compliant], + ["non_compliant", .compliance.non_compliant], + ["pct_compliant", .compliance.pct_compliant], + ["below_min_teams", .gaps.below_min_teams], + ["missing_required", .gaps.missing_required_team], + ["team_count_min", .team_count_stats.min], + ["team_count_max", .team_count_stats.max], + ["team_count_avg", .team_count_stats.avg], + ["team_count_median",.team_count_stats.median] + ] | map(@csv)[] + JQ + + cat > per_team_csv.jq <<'JQ' + ["team","users"], + (.per_team_coverage[] | [ .team, .users ]) | @csv + JQ + + - name: Build audit summary (JSON) via jq file + run: | + jq -f summary.jq team_membership_audit.json > audit_summary.json + echo "Summary:" + jq '.compliance' audit_summary.json + + - name: Export summary as key/value CSV (jq) + run: | + jq -r -f summary_kv.jq audit_summary.json > audit_summary_kv.csv + + - name: Export per-team coverage CSV (jq) + run: | + jq -r -f per_team_csv.jq audit_summary.json > per_team_coverage.csv + + - name: Append summary to run page + run: | + A=$(jq -r '.totals.audited' audit_summary.json) + C=$(jq -r '.compliance.compliant' audit_summary.json) + NC=$(jq -r '.compliance.non_compliant' audit_summary.json) + P=$(jq -r '.compliance.pct_compliant' audit_summary.json) + BM=$(jq -r '.gaps.below_min_teams' audit_summary.json) + MR=$(jq -r '.gaps.missing_required_team' audit_summary.json) + { + echo "## Team Membership Audit — Summary" + echo "" + echo "| Metric | Value |" + echo "|---|---:|" + echo "| Audited users | $A |" + echo "| Compliant | $C |" + echo "| Non-compliant | $NC |" + echo "| % Compliant | ${P}% |" + echo "| Below min teams | $BM |" + echo "| Missing required team | $MR |" + } >> "$GITHUB_STEP_SUMMARY" + + - name: Upload artifacts + uses: actions/upload-artifact@v4 + with: + name: team-membership-audit-bundle + path: | + team_membership_audit.json + team_membership_audit.csv + audit_summary.json + audit_summary_kv.csv + per_team_coverage.csv diff --git a/.github/workflows/user-activity-org.yml b/.github/workflows/user-activity-org.yml new file mode 100644 index 0000000..09cecbe --- /dev/null +++ b/.github/workflows/user-activity-org.yml @@ -0,0 +1,169 @@ +name: User Activity (org-wide) + +on: + workflow_dispatch: + inputs: + username: + description: "GitHub login to check (e.g., octocat)" + required: true + org: + description: "Org to scan (defaults to current repo owner)" + required: false + lookback_days: + description: "Consider comments updated in the last N days" + required: false + default: "90" + repo_name_regex: + description: "Optional: only repos whose names match this regex" + required: false + default: ".*" + +permissions: + contents: read + issues: read + pull-requests: read + +jobs: + org-scan: + runs-on: ubuntu-latest + env: + # Use PAT if you need private org repos; otherwise GITHUB_TOKEN is fine for what it can see + GH_TOKEN: ${{ secrets.ORG_READ_TOKEN || secrets.GITHUB_TOKEN }} + USERNAME: ${{ github.event.inputs.username }} + LOOKBACK_DAYS: ${{ github.event.inputs.lookback_days }} + REPO_NAME_REGEX: ${{ github.event.inputs.repo_name_regex }} + + steps: + - name: Resolve org + id: ctx + env: + ORG_INPUT: ${{ github.event.inputs.org }} + REPO_OWNER: ${{ github.repository_owner }} + run: | + if [ -n "$ORG_INPUT" ]; then echo "org=$ORG_INPUT" >> $GITHUB_OUTPUT; else echo "org=$REPO_OWNER" >> $GITHUB_OUTPUT; fi + echo "Scanning org: $(cat $GITHUB_OUTPUT)" + + - name: Debug inputs + run: | + echo "username='$USERNAME'" + echo "lookback_days='$LOOKBACK_DAYS'" + echo "repo_name_regex='$REPO_NAME_REGEX'" + echo "org='${{ steps.ctx.outputs.org }}'" + + - name: Scan repos for latest commit/comment + id: scan + uses: actions/github-script@v7 + with: + github-token: ${{ env.GH_TOKEN }} + script: | + const org = "${{ steps.ctx.outputs.org }}"; + const username = process.env.USERNAME?.trim(); + const lookbackDays = parseInt(process.env.LOOKBACK_DAYS || "90", 10); + const repoRegex = new RegExp(process.env.REPO_NAME_REGEX || ".*", "i"); + const since = new Date(Date.now() - lookbackDays*24*60*60*1000).toISOString(); + + let bestCommit = null; + let bestComment = null; + + const considerCommit = (repoName, c) => { + const when = new Date(c.commit?.author?.date || c.commit?.committer?.date || c.created_at).getTime(); + const cand = { when, repo: repoName, sha: c.sha, url: c.html_url, msg: c.commit?.message || "", date: new Date(when).toISOString() }; + if (!bestCommit || when > bestCommit.when) bestCommit = cand; + }; + const considerComment = (repoName, type, url, date, body) => { + const when = new Date(date).getTime(); + const cand = { when, repo: repoName, type, url, date: new Date(when).toISOString(), excerpt: (body||"").slice(0,200) }; + if (!bestComment || when > bestComment.when) bestComment = cand; + }; + + // List repos (visibility depends on token) + const repos = await github.paginate("GET /orgs/{org}/repos", { + org, per_page: 100, type: "all", sort: "full_name", direction: "asc" + }); + + for (const r of repos.filter(rr => repoRegex.test(rr.name))) { + try { + // Latest commit by user + const commits = await github.request("GET /repos/{owner}/{repo}/commits", { + owner: org, repo: r.name, author: username, per_page: 1 + }); + if (commits.data?.[0]) considerCommit(r.name, commits.data[0]); + + // Issue comments + const issueComments = await github.paginate("GET /repos/{owner}/{repo}/issues/comments", { + owner: org, repo: r.name, since, per_page: 100 + }); + issueComments + .filter(c => c.user?.login?.toLowerCase() === username.toLowerCase()) + .forEach(c => considerComment(r.name, "issue_comment", c.html_url, c.updated_at || c.created_at, c.body)); + + // PR review comments + const reviewComments = await github.paginate("GET /repos/{owner}/{repo}/pulls/comments", { + owner: org, repo: r.name, since, per_page: 100 + }); + reviewComments + .filter(c => c.user?.login?.toLowerCase() === username.toLowerCase()) + .forEach(c => considerComment(r.name, "pr_review_comment", c.html_url, c.updated_at || c.created_at, c.body)); + + // Discussions (best effort) + try { + const discComments = await github.paginate("GET /repos/{owner}/{repo}/discussions/comments", { + owner: org, repo: r.name, per_page: 100 + }); + discComments + .filter(c => + (c.updated_at || c.created_at) >= since && + c.user?.login?.toLowerCase() === username.toLowerCase() + ) + .forEach(c => considerComment(r.name, "discussion_comment", c.html_url, c.updated_at || c.created_at, c.body)); + } catch { + core.info(`[${r.name}] Discussions API not available; skipping.`); + } + } catch (e) { + core.warning(`Skipping ${r.name}: ${e.message}`); + } + } + + core.setOutput("commit_repo", bestCommit?.repo ?? ""); + core.setOutput("commit_sha", bestCommit?.sha ?? ""); + core.setOutput("commit_url", bestCommit?.url ?? ""); + core.setOutput("commit_date", bestCommit?.date ?? ""); + core.setOutput("commit_message", bestCommit?.msg ?? ""); + + core.setOutput("comment_repo", bestComment?.repo ?? ""); + core.setOutput("comment_type", bestComment?.type ?? ""); + core.setOutput("comment_url", bestComment?.url ?? ""); + core.setOutput("comment_date", bestComment?.date ?? ""); + core.setOutput("comment_excerpt", bestComment?.excerpt ?? ""); + + - name: Summarize org-wide findings + run: | + { + echo "### Org-wide user activity summary"; + echo ""; + echo "**User:** \`${{ github.event.inputs.username }}\`"; + echo "**Org:** \`${{ steps.ctx.outputs.org }}\`"; + echo "**Repo filter:** \`${{ github.event.inputs.repo_name_regex || '.*' }}\`"; + echo ""; + echo "#### Latest commit across org"; + if [ -n "${{ steps.scan.outputs.commit_sha }}" ]; then + echo "- Repo: \`${{ steps.scan.outputs.commit_repo }}\`"; + echo "- SHA: \`${{ steps.scan.outputs.commit_sha }}\`"; + echo "- Message: ${{ steps.scan.outputs.commit_message }}"; + echo "- Date: \`${{ steps.scan.outputs.commit_date }}\`"; + echo "- URL: ${{ steps.scan.outputs.commit_url }}"; + else + echo "_No commits found across scanned repos._"; + fi + echo ""; + echo "#### Latest comment (issue/PR/discussion) across org"; + if [ -n "${{ steps.scan.outputs.comment_url }}" ]; then + echo "- Repo: \`${{ steps.scan.outputs.comment_repo }}\`"; + echo "- Type: \`${{ steps.scan.outputs.comment_type }}\`"; + echo "- Date: \`${{ steps.scan.outputs.comment_date }}\`"; + echo "- URL: ${{ steps.scan.outputs.comment_url }}"; + echo "- Excerpt: `${{ steps.scan.outputs.comment_excerpt }}`"; + else + echo "_No comments found across scanned repos in the last ${{ github.event.inputs.lookback_days }} days._"; + fi + } >> "$GITHUB_STEP_SUMMARY" diff --git a/.github/workflows/user-activity.yml b/.github/workflows/user-activity.yml new file mode 100644 index 0000000..1fd6bf9 --- /dev/null +++ b/.github/workflows/user-activity.yml @@ -0,0 +1,208 @@ +name: User Activity (repo) + +on: + workflow_dispatch: + inputs: + username: + description: "GitHub login to check (e.g., octocat)" + required: true + lookback_days: + description: "Consider comments updated in the last N days" + required: false + default: "90" + +permissions: + contents: read + issues: read + pull-requests: read + +jobs: + user-activity: + runs-on: ubuntu-latest + + steps: + - name: Debug inputs + run: | + echo "username='${{ github.event.inputs.username }}'" + echo "lookback_days='${{ github.event.inputs.lookback_days }}'" + + - name: Gather the latest commit by the user in this repo + id: commits + uses: actions/github-script@v7 + env: + USERNAME: ${{ github.event.inputs.username }} + with: + # Always set a token explicitly + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const username = process.env.USERNAME?.trim(); + if (!username) { + core.setFailed("USERNAME env var is empty. Did you provide the 'username' input?"); + } + const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/"); + const res = await github.request("GET /repos/{owner}/{repo}/commits", { + owner, repo, author: username, per_page: 1 + }); + const latest = res.data?.[0] || null; + + core.setOutput("commit_sha", latest?.sha ?? ""); + core.setOutput("commit_url", latest?.html_url ?? ""); + core.setOutput("commit_date", latest?.commit?.author?.date ?? ""); + core.setOutput("commit_message", latest?.commit?.message ?? ""); + + - name: Gather the latest issue/PR/discussion comment by user + id: comments + uses: actions/github-script@v7 + env: + USERNAME: ${{ github.event.inputs.username }} + LOOKBACK_DAYS: ${{ github.event.inputs.lookback_days }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const username = process.env.USERNAME?.trim(); + const lookbackDays = parseInt(process.env.LOOKBACK_DAYS || "90", 10); + const [owner, repo] = process.env.GITHUB_REPOSITORY.split("/"); + const sinceISO = new Date(Date.now() - lookbackDays*24*60*60*1000).toISOString(); + + let best = null; + const consider = (type, url, createdAt, body) => { + const when = new Date(createdAt).getTime(); + if (!best || when > best.when) best = { type, url, createdAt, excerpt: (body||"").slice(0,200), when }; + }; + + // Issue comments (includes PR “conversation” comments) + const issueComments = await github.paginate("GET /repos/{owner}/{repo}/issues/comments", { + owner, repo, since: sinceISO, per_page: 100 + }); + issueComments + .filter(c => c.user?.login?.toLowerCase() === username.toLowerCase()) + .forEach(c => consider("issue_comment", c.html_url, c.updated_at || c.created_at, c.body)); + + // PR review comments + const reviewComments = await github.paginate("GET /repos/{owner}/{repo}/pulls/comments", { + owner, repo, since: sinceISO, per_page: 100 + }); + reviewComments + .filter(c => c.user?.login?.toLowerCase() === username.toLowerCase()) + .forEach(c => consider("pr_review_comment", c.html_url, c.updated_at || c.created_at, c.body)); + + // Discussions (best effort) + try { + const discComments = await github.paginate("GET /repos/{owner}/{repo}/discussions/comments", { + owner, repo, per_page: 100 + }); + discComments + .filter(c => + (c.updated_at || c.created_at) >= sinceISO && + c.user?.login?.toLowerCase() === username.toLowerCase() + ) + .forEach(c => consider("discussion_comment", c.html_url, c.updated_at || c.created_at, c.body)); + } catch { + core.info("Discussions not enabled or endpoint unavailable; skipping."); + } + + core.setOutput("comment_type", best?.type ?? ""); + core.setOutput("comment_url", best?.url ?? ""); + core.setOutput("comment_date", best?.createdAt ?? ""); + core.setOutput("comment_excerpt", best?.excerpt ?? ""); + + - name: Summarize + run: | + commit_date=$(echo "${{ steps.commits.outputs.commit_date || steps.scan.outputs.commit_date }}" | cut -c1-10) + { + echo "### User activity summary"; + echo ""; + echo "**User:** \`${{ github.event.inputs.username }}\`"; + echo "**Repo:** \`${{ github.repository }}\`"; + echo ""; + echo "#### Latest commit"; + if [ -n "${{ steps.commits.outputs.commit_sha }}" ]; then + echo "- SHA: \`${{ steps.commits.outputs.commit_sha }}\`"; + echo "- Message: ${{ steps.commits.outputs.commit_message }}"; + echo "- Date: \ $commit_date"; + echo "- URL: ${{ steps.commits.outputs.commit_url }}"; + else + echo "_No commits found in this repo by that user._"; + fi + echo ""; + echo "#### Latest comment (issue/PR/discussion)"; + if [ -n "${{ steps.comments.outputs.comment_url }}" ]; then + echo "- Type: \`${{ steps.comments.outputs.comment_type }}\`"; + echo "- Date: \`${{ steps.comments.outputs.comment_date }}\`"; + echo "- URL: ${{ steps.comments.outputs.comment_url }}"; + echo "- Excerpt: `${{ steps.comments.outputs.comment_excerpt }}`"; + else + echo "_No comments found for that user in the last ${{ github.event.inputs.lookback_days }} days._"; + fi + } >> "$GITHUB_STEP_SUMMARY" +# Create Report ------- + - name: Create JSON report + id: create_json + run: | + # Format dates (trim to YYYY-MM-DD) + commit_date=$(echo "${{ steps.commits.outputs.commit_date || steps.scan.outputs.commit_date }}" | cut -c1-10) + comment_date=$(echo "${{ steps.comments.outputs.comment_date || steps.scan.outputs.comment_date }}" | cut -c1-10) + generated_date=$(date +%Y-%m-%d) + + jq -n \ + --arg username "${{ github.event.inputs.username }}" \ + --arg repo "${{ github.repository }}" \ + --arg commit_sha "${{ steps.commits.outputs.commit_sha || steps.scan.outputs.commit_sha }}" \ + --arg commit_url "${{ steps.commits.outputs.commit_url || steps.scan.outputs.commit_url }}" \ + --arg commit_date "$commit_date" \ + --arg commit_message "${{ steps.commits.outputs.commit_message || steps.scan.outputs.commit_message }}" \ + --arg comment_type "${{ steps.comments.outputs.comment_type || steps.scan.outputs.comment_type }}" \ + --arg comment_url "${{ steps.comments.outputs.comment_url || steps.scan.outputs.comment_url }}" \ + --arg comment_date "$comment_date" \ + --arg comment_excerpt "${{ steps.comments.outputs.comment_excerpt || steps.scan.outputs.comment_excerpt }}" \ + --arg generated_at "$generated_date" \ + '{ + user: $username, + repository: $repo, + commit: { + sha: $commit_sha, + url: $commit_url, + date: $commit_date, + message: $commit_message + }, + comment: { + type: $comment_type, + url: $comment_url, + date: $comment_date, + excerpt: $comment_excerpt + }, + generated_at: $generated_at + }' > user_activity.json + + - name: Convert JSON to CSV + run: | + # Extract key fields into a readable CSV + jq -r '[ + "user", + "repository", + "commit_sha", + "commit_date", + "commit_url", + "comment_type", + "comment_date", + "comment_url" + ], + [ + .user, + .repository, + .commit.sha, + .commit.date, + .commit.url, + .comment.type, + .comment.date, + .comment.url + ] | @csv' user_activity.json > user_activity.csv + + - name: Upload reports + uses: actions/upload-artifact@v4 + with: + name: user-activity-reports + path: | + user_activity.json + user_activity.csv + diff --git a/infrastructure/iam-user-nested.yaml b/infrastructure/iam-user-nested.yaml new file mode 100644 index 0000000..e69de29 diff --git a/infrastructure/iam-users-root.yaml b/infrastructure/iam-users-root.yaml new file mode 100644 index 0000000..a7b6159 --- /dev/null +++ b/infrastructure/iam-users-root.yaml @@ -0,0 +1,24 @@ +AWSTemplateFormatVersion: '2010-09-09' +Description: Master stack to manage all volunteer IAM users via nested stacks + +Parameters: + NestedTemplateUrl: + Type: String + Description: S3 URL of the iam-user-nested.yaml template + +Resources: + # Nested user stacks will be appended below by automation. + # Example of what will be added: + # + # User_jdoe: + # Type: AWS::CloudFormation::Stack + # Properties: + # TemplateURL: !Ref NestedTemplateUrl + # Parameters: + # UserName: jdoe + # Email: jdoe@example.com + +Outputs: + StackNote: + Description: This stack owns all volunteer IAM user nested stacks + Value: "IamUsersMaster" diff --git a/org-contributor-access/README.md b/org-contributor-access/README.md new file mode 100644 index 0000000..e69de29 diff --git a/org-contributor-access/volunteer-access/README.md b/org-contributor-access/volunteer-access/README.md new file mode 100644 index 0000000..0ff5120 --- /dev/null +++ b/org-contributor-access/volunteer-access/README.md @@ -0,0 +1,29 @@ +# 🔐 GitHub Team Roles and Permissions + +This document outlines the structure of our GitHub organization and the permissions assigned to each team. +These roles help us maintain security, collaboration, and clear contribution boundaries for all members. + +| **Team** | **Role** | **Permissions** | **Typical Members** | +|-----------|-----------|------------------|----------------------| +| **Admins** | Full repository + organization settings | `Admin` | Cloud Security Lead, DevOps Engineers | +| **Maintainers** | Can merge PRs, manage issues, and oversee branches | `Maintain` | Core Developers | +| **Contributors** | Can create branches and pull requests, but cannot merge | `Write` | Active Volunteers | +| **Reviewers** | Can review and comment on pull requests and issues | `Triage` | Security Reviewers, Code Auditors | +| **Observers** | Read-only access for documentation or training purposes | `Read` | New Volunteers, Interns | + +--- + +### 🧩 Notes +- All members must follow the **Contribution Guidelines** before submitting PRs. +- Admin access should be limited to trusted leads only. +- Observers can request elevated permissions after completing onboarding. +- Use **branch protection rules** to enforce code review and prevent unauthorized merges. + +--- + +### 🛠️ Helpful Links Tips +How do I get addtional access + - [Volunteer Tiers](./volunteer_tiers.md) + diff --git a/org-contributor-access/volunteer-access/volunteer_tiers.md b/org-contributor-access/volunteer-access/volunteer_tiers.md new file mode 100644 index 0000000..1d6d109 --- /dev/null +++ b/org-contributor-access/volunteer-access/volunteer_tiers.md @@ -0,0 +1,65 @@ +# 🌱 Volunteer Access & Contribution Tiers + +Our GitHub access model is designed to recognize commitment and contribution quality. +This helps keep our repositories secure, organized, and rewarding for active volunteers. + +--- + +## 🕒 Tier Advancement (Time-Based) + +| **Tier** | **Time Contributed** | **Access Level** | **Description** | +|-----------|----------------------|------------------|------------------| +| **Observer** | 0–10 hours | `Read` | Getting familiar with the project, onboarding, and reviewing documentation. | +| **Contributor** | 10–30 hours | `Write` | Actively creating pull requests and contributing to issues. | +| **Maintainer** | 30–60 hours | `Maintain` | Regular weekly contributor helping with merges, reviews, and issue management. | +| **Admin** | 60+ hours | `Admin` | Core volunteer responsible for repository settings and security. | + +> 🧭 *We track hours through volunteer logs or activity summaries provided by leads.* + +--- + +## 💬 Tier Advancement (Engagement-Based) + +| **Tier** | **Activity Metrics** | **Access Level** | **Notes** | +|-----------|----------------------|------------------|------------| +| **Observer** | 0–3 comments or issues | `Read` | Learning phase and providing feedback. | +| **Contributor** | 3–5 merged PRs or 10+ issues/comments | `Write` | Regular participation with code or documentation. | +| **Maintainer** | 10+ merged PRs or active reviewer status | `Maintain` | Trusted reviewer or team mentor. | +| **Admin-Eligible** | 3+ months consistent maintainership | `Admin` | Requires approval from project leads. | + +> 🧩 *Contributions include code, documentation, issue triage, and review activity.* + +--- + +## ⚖️ Hybrid Access Criteria + +We use a **hybrid model** for promotions: + +- ✅ **Write access** → After 10+ active hours *and* 3+ merged PRs. +- ✅ **Maintain access** → After 30+ hours *and* trusted code reviews. +- 🚫 **Admin access** → Reserved for CloudSec or DevOps leads only. + +--- + +## 🪴 Access Review Process + +To request an upgrade: + +1. Open a new issue titled: + **`Access Upgrade Request: [Your GitHub Username]`** +2. Include: + - Hours contributed or summary of activity + - Example PRs or issues + - Short note on what areas you’d like to help maintain +3. A team lead will review and respond within 3–5 business days. + +--- + +## 🔐 Notes + +- Access is reviewed quarterly by the Cloud Security and DevOps teams. +- Permissions may be adjusted if a volunteer becomes inactive for 60+ days. +- Admin privileges are limited to essential personnel for security reasons. +- All contributors must follow the [Code of Conduct](./CODE_OF_CONDUCT.md) and [Security Policy](./SECURITY.md). + +--- \ No newline at end of file diff --git a/terraform/README.md b/terraform/README.md new file mode 100644 index 0000000..d5ac2dc --- /dev/null +++ b/terraform/README.md @@ -0,0 +1,112 @@ +**IAM Users – Terraform Management** +--- +This directory contains the Terraform configuration used to manage all volunteer IAM users in AWS. +Everything is handled through code so that access is consistent, reviewed, and easy to update. + +**How This Works** + +- All IAM users are defined in a single file: `users.auto.tfvars.` + +- Terraform reads this list and creates, updates, or removes users as needed. + +- A reusable module `(modules/iam_user)` handles: + + - Creating the IAM user + + - Assigning tags + + - Applying the correct permission level + +- GitHub Actions runs Terraform using secure AWS OIDC authentication, so no AWS access keys are stored anywhere. + +This setup ensures that user access is managed in a clear, repeatable, and auditable way. + +**Folder Structure** +`````` +terraform/ + iam-users/ + main.tf + variables.tf + users.auto.tfvars + modules/ + iam_user/ + main.tf + variables.tf +`````` +Adding or Updating a User + +To add a new volunteer or update an existing one, edit the users.auto.tfvars file. +Example: +```json +users = { + "vol-dmoney" = { + email = "darrell@example.org" + permission_level = "ReadOnly" + } + + "vol-jdoe" = { + email = "jdoe@example.org" + permission_level = "PowerUser" + } +} +``` + +*Valid permission_level values:* + +- ReadOnly + +- PowerUser + +- Admin + +Terraform will attach the appropriate AWS-managed policies for each level. + +**Deploying Changes** +--- + +1. Once you update users.auto.tfvars: + +2. Commit and push your changes. + +3. Go to the GitHub Actions tab. + +4. Run the workflow named IAM Users – Terraform. + +The workflow initializes Terraform, shows the plan, and applies the changes automatically. + +**Removing a User** +--- +To remove a user, delete their entry from users.auto.tfvars. +Terraform will see the removal and plan to delete the IAM user. + +If you want additional safety to prevent accidental deletion, we can add prevent_destroy to the module. Let me know if you'd like that enabled. + +**Security Notes** +--- +GitHub Actions uses OIDC to assume an AWS IAM role. No access keys are stored in the repository. + +The IAM role is restricted to: + +Only the volunteer IAM user path + +Only the necessary IAM and CloudFormation permissions + +Terraform state is stored in an S3 bucket configured for this project. + +**Troubleshooting** +--- +If something does not look right, you can: + +Review the Terraform plan in GitHub Actions + +Check for typos in `users.auto.tfvars` + +Run Terraform locally (if you have permissions): + +```terraform +terraform init +terraform plan +terraform apply + +``` +If you need help reviewing logs or adjusting the workflow, feel free to ask. \ No newline at end of file diff --git a/terraform/iam-users/main.tf b/terraform/iam-users/main.tf new file mode 100644 index 0000000..b63575d --- /dev/null +++ b/terraform/iam-users/main.tf @@ -0,0 +1,31 @@ +terraform { + required_version = ">= 1.5.0" + + required_providers { + aws = { + source = "hashicorp/aws" + version = "~> 5.0" + } + } + + # Optional but strongly recommended: S3 backend for state + backend "s3" { + bucket = "volunteer-access-cf" # your bucket + key = "terraform/iam-users/terraform.tfstate" + region = "us-east-1" + } +} + +provider "aws" { + region = "us-east-1" +} + +module "iam_user" { + source = "../modules/iam_user" + + for_each = var.users + + user_name = each.key + email = each.value.email + permission_level = each.value.permission_level +} diff --git a/terraform/iam-users/outputs.tf b/terraform/iam-users/outputs.tf new file mode 100644 index 0000000..e2016ed --- /dev/null +++ b/terraform/iam-users/outputs.tf @@ -0,0 +1,4 @@ +output "user_emails" { + description = "Map of usernames to email addresses" + value = { for username, cfg in var.users : username => cfg.email } +} diff --git a/terraform/iam-users/sent_emails.json b/terraform/iam-users/sent_emails.json new file mode 100644 index 0000000..4c9819c --- /dev/null +++ b/terraform/iam-users/sent_emails.json @@ -0,0 +1,4 @@ +{ + "vol-dmoney": "2025-12-14T23:25:58Z", + "vol-test": "2025-12-17T03:56:13Z" +} diff --git a/terraform/iam-users/users.auto.tfvars b/terraform/iam-users/users.auto.tfvars new file mode 100644 index 0000000..aeba9cc --- /dev/null +++ b/terraform/iam-users/users.auto.tfvars @@ -0,0 +1,17 @@ +users = { + "vol-dmoney" = { + email = "shackdt1@gmail.com" + permission_level = "ReadOnly" + } + + "vol-test" = { + email = "dtshack@gmail.com" + permission_level = "PowerUser" + } + + # add more users here + # "vol-someone" = { + # email = "someone@example.org" + # permission_level = "Admin" + # } +} diff --git a/terraform/iam-users/variables.tf b/terraform/iam-users/variables.tf new file mode 100644 index 0000000..3e54dd0 --- /dev/null +++ b/terraform/iam-users/variables.tf @@ -0,0 +1,8 @@ +variable "users" { + description = "Map of IAM users to create" + type = map(object({ + email = string + permission_level = string + })) +} + diff --git a/terraform/modules/iam_user/main.tf b/terraform/modules/iam_user/main.tf new file mode 100644 index 0000000..202b2be --- /dev/null +++ b/terraform/modules/iam_user/main.tf @@ -0,0 +1,37 @@ +locals { + # Map logical permission levels to AWS managed policy ARNs + permission_policies = { + ReadOnly = [ + "arn:aws:iam::aws:policy/ReadOnlyAccess" + ] + + PowerUser = [ + "arn:aws:iam::aws:policy/PowerUserAccess" + ] + + Admin = [ + "arn:aws:iam::aws:policy/AdministratorAccess" + ] + } + + policies_for_user = lookup(local.permission_policies, var.permission_level, []) +} + +resource "aws_iam_user" "this" { + name = var.user_name + path = "/volunteers/" + + tags = { + Purpose = "VolunteerAccess" + ContactEmail = var.email + ManagedBy = "Terraform" + } +} + +# Attach mapped AWS-managed policies +resource "aws_iam_user_policy_attachment" "managed" { + for_each = toset(local.policies_for_user) + + user = aws_iam_user.this.name + policy_arn = each.value +} diff --git a/terraform/modules/iam_user/variables.tf b/terraform/modules/iam_user/variables.tf new file mode 100644 index 0000000..1f6272d --- /dev/null +++ b/terraform/modules/iam_user/variables.tf @@ -0,0 +1,18 @@ +variable "user_name" { + type = string + description = "IAM username (e.g. vol-dmoney)" +} + +variable "email" { + type = string + description = "User email for tagging" +} + +variable "permission_level" { + type = string + description = "Logical permission level" + validation { + condition = contains(["ReadOnly", "PowerUser", "Admin"], var.permission_level) + error_message = "permission_level must be one of: ReadOnly, PowerUser, Admin." + } +}