diff --git a/.github/actions/deploy-worker/action.yml b/.github/actions/deploy-worker/action.yml index 4825ed11..45f752f0 100644 --- a/.github/actions/deploy-worker/action.yml +++ b/.github/actions/deploy-worker/action.yml @@ -205,7 +205,7 @@ runs: else echo "$PUSH_OUTPUT" echo "::warning::Container image push failed — wrangler deploy will retry container push using CLOUDFLARE_API_TOKEN." - echo "::warning::To fix: ensure CLOUDFLARE_CONTAINERS_TOKEN has Account > Containers:Edit permission." + echo "::warning::To fix: ensure CLOUDFLARE_API_TOKEN or the optional cloudflare-containers-token input has Account > Containers:Edit permission." echo "::warning::See: https://dash.cloudflare.com/profile/api-tokens" fi diff --git a/.github/workflows/api-shield-scan.yml b/.github/workflows/api-shield-scan.yml index 7f206932..3c0eca17 100644 --- a/.github/workflows/api-shield-scan.yml +++ b/.github/workflows/api-shield-scan.yml @@ -11,188 +11,188 @@ name: API Shield Readiness # Setup guide: docs/security/API_SHIELD_VULNERABILITY_SCANNER.md on: - push: - branches: [main] - paths: - - 'docs/api/openapi.yaml' - - 'docs/api/cloudflare-schema.yaml' - - 'scripts/generate-cloudflare-schema.ts' - - 'scripts/validate-openapi.ts' - pull_request: - branches: [main] - paths: - - 'docs/api/openapi.yaml' - - 'docs/api/cloudflare-schema.yaml' - - 'scripts/generate-cloudflare-schema.ts' - - 'scripts/validate-openapi.ts' - workflow_dispatch: + push: + branches: [master, main] + paths: + - "docs/api/openapi.yaml" + - "docs/api/cloudflare-schema.yaml" + - "scripts/generate-cloudflare-schema.ts" + - "scripts/validate-openapi.ts" + pull_request: + branches: [master, main] + paths: + - "docs/api/openapi.yaml" + - "docs/api/cloudflare-schema.yaml" + - "scripts/generate-cloudflare-schema.ts" + - "scripts/validate-openapi.ts" + workflow_dispatch: concurrency: - group: api-shield-${{ github.ref }} - cancel-in-progress: true + group: api-shield-${{ github.ref }} + cancel-in-progress: true jobs: - api-shield-readiness: - name: API Shield Readiness Check - runs-on: ubuntu-latest - timeout-minutes: 10 - permissions: - contents: read - - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: '2.x' - install-deps: 'false' - - # ---------------------------------------------------------------- - # 1. Validate the OpenAPI spec - # ---------------------------------------------------------------- - - name: Validate OpenAPI specification - run: deno task openapi:validate - - # ---------------------------------------------------------------- - # 2. Check all operations have operationId - # The scanner uses operationId to build API call graphs. - # Operations without an ID cannot be referenced in scan sequences. - # ---------------------------------------------------------------- - - name: Check operationId coverage - run: | - echo "=== Checking operationId coverage ===" - - # Count HTTP method blocks vs operationId occurrences - METHOD_COUNT=$(grep -cE '^\s+(get|post|put|patch|delete):\s*$' docs/api/openapi.yaml || true) - OPID_COUNT=$(grep -c 'operationId:' docs/api/openapi.yaml || true) - - echo "HTTP methods defined: $METHOD_COUNT" - echo "operationId entries: $OPID_COUNT" - - if [ "$OPID_COUNT" -lt "$METHOD_COUNT" ]; then - echo "" - echo "::error::operationId missing on $((METHOD_COUNT - OPID_COUNT)) operation(s)." - echo "Every operation needs a unique operationId for the API Shield scanner call graphs." - echo "Run: grep -n 'get:\|post:\|put:\|patch:\|delete:' docs/api/openapi.yaml | head -50" - exit 1 - fi - echo "✅ All $OPID_COUNT operations have an operationId" - - # ---------------------------------------------------------------- - # 3. Check resource endpoints have security annotations - # Resource endpoints (those with path parameters like /{id}) - # must declare a security requirement so the scanner knows - # which auth scheme to test with. - # ---------------------------------------------------------------- - - name: Check security annotations on resource endpoints - run: | - echo "=== Checking security annotations on resource endpoints ===" - - VIOLATIONS=0 - CURRENT_PATH="" - CURRENT_METHOD="" - HAS_PARAM=false - HAS_SECURITY=false - - check_and_report() { - if [ -n "$CURRENT_METHOD" ] && [ "$HAS_PARAM" = "true" ] && [ "$HAS_SECURITY" = "false" ]; then - if ! echo "$CURRENT_PATH" | grep -q '/admin/'; then - echo "⚠️ Resource endpoint missing security annotation: $CURRENT_PATH ($CURRENT_METHOD)" - VIOLATIONS=$((VIOLATIONS + 1)) - fi - fi - } - - while IFS= read -r line; do - - # Detect HTTP methods under a path (8-space indent in this spec) - if echo "$line" | grep -qE '^\s{8}(get|post|put|patch|delete):\s*$'; then - # Before starting a new method, check the previous one - check_and_report - CURRENT_METHOD=$(echo "$line" | tr -d ' :') - HAS_SECURITY=false - continue - fi - - # Detect security annotations - if echo "$line" | grep -qE '^\s+security:\s*$'; then - HAS_SECURITY=true - continue - fi - - # Detect path entries (4-space indent in this spec) - # Must be checked AFTER method/security so we correctly finalize - # the last method of the previous path before moving on. - if echo "$line" | grep -qE '^\s{4}/'; then - # Finalize the last method of the previous path - check_and_report - CURRENT_PATH=$(echo "$line" | tr -d ' :') - CURRENT_METHOD="" - HAS_SECURITY=false - # Set HAS_PARAM based on whether the new path contains a parameter - if echo "$CURRENT_PATH" | grep -qE '\{[^}]+\}'; then - HAS_PARAM=true - else - HAS_PARAM=false - fi - fi - - done < docs/api/openapi.yaml - - # Finalize the very last method in the file - check_and_report - - if [ "$VIOLATIONS" -gt 0 ]; then - echo "" - echo "::warning::$VIOLATIONS resource endpoint(s) are missing 'security:' annotations." - echo "Add security requirements so the API Shield scanner knows which auth scheme to use." - echo "See docs/security/API_SHIELD_VULNERABILITY_SCANNER.md for guidance." - # Warn only — do not fail, as missing annotations don't break functionality - else - echo "✅ All resource endpoints have security annotations" - fi - - # ---------------------------------------------------------------- - # 4. Check cloudflare-schema.yaml is not drifting from openapi.yaml - # Regenerate the schema and fail if it differs from committed. - # ---------------------------------------------------------------- - - name: Check Cloudflare schema is up to date - run: | - echo "=== Checking cloudflare-schema.yaml is up to date ===" - - # Save the current committed schema checksum - BEFORE=$(sha256sum docs/api/cloudflare-schema.yaml | awk '{print $1}') - - # Regenerate - deno task schema:cloudflare - - AFTER=$(sha256sum docs/api/cloudflare-schema.yaml | awk '{print $1}') - - if [ "$BEFORE" != "$AFTER" ]; then - echo "" - echo "::error::docs/api/cloudflare-schema.yaml is out of date." - echo "Run 'deno task schema:cloudflare' and commit the result before merging." - echo "This schema is uploaded to Cloudflare API Shield and must stay in sync with openapi.yaml." - git diff docs/api/cloudflare-schema.yaml - exit 1 - fi - echo "✅ cloudflare-schema.yaml matches openapi.yaml" - - # ---------------------------------------------------------------- - # 5. Summary - # ---------------------------------------------------------------- - - name: API Shield readiness summary - if: success() - run: | - echo "" - echo "==================================================" - echo " ✅ API Shield readiness checks passed" - echo "==================================================" - echo "" - echo "Next steps to activate the vulnerability scanner:" - echo " 1. Upload docs/api/cloudflare-schema.yaml to API Shield" - echo " 2. Configure test credentials in HashiCorp Vault" - echo " 3. Enable scanner in Cloudflare Dashboard → Security → API Shield" - echo "" - echo "See docs/security/API_SHIELD_VULNERABILITY_SCANNER.md for full setup guide." + api-shield-readiness: + name: API Shield Readiness Check + runs-on: ubuntu-latest + timeout-minutes: 10 + permissions: + contents: read + + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: "2.x" + install-deps: "false" + + # ---------------------------------------------------------------- + # 1. Validate the OpenAPI spec + # ---------------------------------------------------------------- + - name: Validate OpenAPI specification + run: deno task openapi:validate + + # ---------------------------------------------------------------- + # 2. Check all operations have operationId + # The scanner uses operationId to build API call graphs. + # Operations without an ID cannot be referenced in scan sequences. + # ---------------------------------------------------------------- + - name: Check operationId coverage + run: | + echo "=== Checking operationId coverage ===" + + # Count HTTP method blocks vs operationId occurrences + METHOD_COUNT=$(grep -cE '^\s+(get|post|put|patch|delete):\s*$' docs/api/openapi.yaml || true) + OPID_COUNT=$(grep -c 'operationId:' docs/api/openapi.yaml || true) + + echo "HTTP methods defined: $METHOD_COUNT" + echo "operationId entries: $OPID_COUNT" + + if [ "$OPID_COUNT" -lt "$METHOD_COUNT" ]; then + echo "" + echo "::error::operationId missing on $((METHOD_COUNT - OPID_COUNT)) operation(s)." + echo "Every operation needs a unique operationId for the API Shield scanner call graphs." + echo "Run: grep -n 'get:\|post:\|put:\|patch:\|delete:' docs/api/openapi.yaml | head -50" + exit 1 + fi + echo "✅ All $OPID_COUNT operations have an operationId" + + # ---------------------------------------------------------------- + # 3. Check resource endpoints have security annotations + # Resource endpoints (those with path parameters like /{id}) + # must declare a security requirement so the scanner knows + # which auth scheme to test with. + # ---------------------------------------------------------------- + - name: Check security annotations on resource endpoints + run: | + echo "=== Checking security annotations on resource endpoints ===" + + VIOLATIONS=0 + CURRENT_PATH="" + CURRENT_METHOD="" + HAS_PARAM=false + HAS_SECURITY=false + + check_and_report() { + if [ -n "$CURRENT_METHOD" ] && [ "$HAS_PARAM" = "true" ] && [ "$HAS_SECURITY" = "false" ]; then + if ! echo "$CURRENT_PATH" | grep -q '/admin/'; then + echo "⚠️ Resource endpoint missing security annotation: $CURRENT_PATH ($CURRENT_METHOD)" + VIOLATIONS=$((VIOLATIONS + 1)) + fi + fi + } + + while IFS= read -r line; do + + # Detect HTTP methods under a path (8-space indent in this spec) + if echo "$line" | grep -qE '^\s{8}(get|post|put|patch|delete):\s*$'; then + # Before starting a new method, check the previous one + check_and_report + CURRENT_METHOD=$(echo "$line" | tr -d ' :') + HAS_SECURITY=false + continue + fi + + # Detect security annotations + if echo "$line" | grep -qE '^\s+security:\s*$'; then + HAS_SECURITY=true + continue + fi + + # Detect path entries (4-space indent in this spec) + # Must be checked AFTER method/security so we correctly finalize + # the last method of the previous path before moving on. + if echo "$line" | grep -qE '^\s{4}/'; then + # Finalize the last method of the previous path + check_and_report + CURRENT_PATH=$(echo "$line" | tr -d ' :') + CURRENT_METHOD="" + HAS_SECURITY=false + # Set HAS_PARAM based on whether the new path contains a parameter + if echo "$CURRENT_PATH" | grep -qE '\{[^}]+\}'; then + HAS_PARAM=true + else + HAS_PARAM=false + fi + fi + + done < docs/api/openapi.yaml + + # Finalize the very last method in the file + check_and_report + + if [ "$VIOLATIONS" -gt 0 ]; then + echo "" + echo "::warning::$VIOLATIONS resource endpoint(s) are missing 'security:' annotations." + echo "Add security requirements so the API Shield scanner knows which auth scheme to use." + echo "See docs/security/API_SHIELD_VULNERABILITY_SCANNER.md for guidance." + # Warn only — do not fail, as missing annotations don't break functionality + else + echo "✅ All resource endpoints have security annotations" + fi + + # ---------------------------------------------------------------- + # 4. Check cloudflare-schema.yaml is not drifting from openapi.yaml + # Regenerate the schema and fail if it differs from committed. + # ---------------------------------------------------------------- + - name: Check Cloudflare schema is up to date + run: | + echo "=== Checking cloudflare-schema.yaml is up to date ===" + + # Save the current committed schema checksum + BEFORE=$(sha256sum docs/api/cloudflare-schema.yaml | awk '{print $1}') + + # Regenerate + deno task schema:cloudflare + + AFTER=$(sha256sum docs/api/cloudflare-schema.yaml | awk '{print $1}') + + if [ "$BEFORE" != "$AFTER" ]; then + echo "" + echo "::error::docs/api/cloudflare-schema.yaml is out of date." + echo "Run 'deno task schema:cloudflare' and commit the result before merging." + echo "This schema is uploaded to Cloudflare API Shield and must stay in sync with openapi.yaml." + git diff docs/api/cloudflare-schema.yaml + exit 1 + fi + echo "✅ cloudflare-schema.yaml matches openapi.yaml" + + # ---------------------------------------------------------------- + # 5. Summary + # ---------------------------------------------------------------- + - name: API Shield readiness summary + if: success() + run: | + echo "" + echo "==================================================" + echo " ✅ API Shield readiness checks passed" + echo "==================================================" + echo "" + echo "Next steps to activate the vulnerability scanner:" + echo " 1. Upload docs/api/cloudflare-schema.yaml to API Shield" + echo " 2. Configure test credentials in HashiCorp Vault" + echo " 3. Enable scanner in Cloudflare Dashboard → Security → API Shield" + echo "" + echo "See docs/security/API_SHIELD_VULNERABILITY_SCANNER.md for full setup guide." diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 4e7aa239..60d4a2b0 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -2,21 +2,21 @@ name: Benchmark on: push: - branches: [main] + branches: [master, main] paths: - - 'src/**' - - 'deno.json' - - 'deno.lock' + - "src/**" + - "deno.json" + - "deno.lock" pull_request: - branches: [main] + branches: [master, main] paths: - - 'src/**' - - 'deno.json' - - 'deno.lock' + - "src/**" + - "deno.json" + - "deno.lock" workflow_dispatch: env: - DENO_VERSION: '2.x' + DENO_VERSION: "2.x" jobs: bench: diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fc64ad7f..d5aa8a4d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -1,978 +1,980 @@ name: CI on: - push: - branches: [main] - pull_request: - branches: [main] - workflow_dispatch: - inputs: - force_deploy_frontend: - description: 'Force deploy the frontend worker even if no frontend files changed' - required: false - default: 'false' - type: choice - options: - - 'false' - - 'true' + push: + branches: [master, main] + pull_request: + branches: [master, main] + workflow_dispatch: + inputs: + force_deploy_frontend: + description: "Force deploy the frontend worker even if no frontend files changed" + required: false + default: "false" + type: choice + options: + - "false" + - "true" env: - DENO_VERSION: '2.x' + DENO_VERSION: "2.x" concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true jobs: - changes: - name: Detect Changed Paths - runs-on: ubuntu-latest - timeout-minutes: 5 - permissions: - contents: read - outputs: - compiler: ${{ steps.filter.outputs.compiler }} - frontend: ${{ steps.filter.outputs.frontend }} - migrations: ${{ steps.filter.outputs.migrations }} - worker: ${{ steps.filter.outputs.worker }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Detect changed paths - id: filter - uses: dorny/paths-filter@fbd0ab8f3e69293af611ebaee6363fc25e6d187d # v4.0.1 - with: - filters: | - compiler: - - 'src/**' - - '!src/**/*.test.ts' - - 'deno.json' - - 'deno.lock' - frontend: - - 'frontend/**' - - 'pnpm-lock.yaml' - - 'wrangler.toml' - - 'src/version.ts' - migrations: - - 'migrations/**' - - 'admin-migrations/**' - worker: - - 'worker/**' - - 'src/**' - - 'wrangler.toml' - - lint-format: - name: Lint & Format Check - runs-on: ubuntu-latest - timeout-minutes: 10 - needs: [changes] - if: needs.changes.outputs.compiler == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - install-deps: 'false' - - - name: Lint - run: deno lint - - - name: Format check - run: deno fmt --check - - - name: Route-order lint - run: deno task lint:routes - - cloudflare-dep-check: - name: Cloudflare Dependency Check - runs-on: ubuntu-latest - timeout-minutes: 5 - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - # The upgrade-check script only reads local files and fetches npm metadata; - # it has no npm/jsr imports of its own, so no dep install is required. - install-deps: 'false' - - - name: Check Cloudflare dependency versions - run: deno task ci:cloudflare-check - - typecheck: - name: Type Check - runs-on: ubuntu-latest - timeout-minutes: 10 - needs: [changes] - if: needs.changes.outputs.compiler == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - - name: Type check all entry points - run: deno task check - - test: - name: Test - runs-on: ubuntu-latest - timeout-minutes: 20 - needs: [changes] - if: needs.changes.outputs.compiler == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Load environment variables - uses: ./.github/actions/setup-env - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - - name: Run tests with coverage - run: deno task test:coverage - env: - DENO_TLS_CA_STORE: system - - - name: Generate coverage report - if: always() && hashFiles('coverage/**') != '' - continue-on-error: true - run: deno coverage coverage --lcov --output=coverage/lcov.info - - - name: Upload coverage to Codecov - if: always() && hashFiles('coverage/lcov.info') != '' - uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0 - continue-on-error: true - with: - files: coverage/lcov.info - flags: deno - fail_ci_if_error: false - - security: - name: Security Scan - runs-on: ubuntu-latest - timeout-minutes: 15 - # Security scan should not block deployments + changes: + name: Detect Changed Paths + runs-on: ubuntu-latest + timeout-minutes: 5 + permissions: + contents: read + outputs: + compiler: ${{ steps.filter.outputs.compiler }} + frontend: ${{ steps.filter.outputs.frontend }} + migrations: ${{ steps.filter.outputs.migrations }} + worker: ${{ steps.filter.outputs.worker }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Detect changed paths + id: filter + uses: dorny/paths-filter@fbd0ab8f3e69293af611ebaee6363fc25e6d187d # v4.0.1 + with: + filters: | + compiler: + - 'src/**' + - '!src/**/*.test.ts' + - 'deno.json' + - 'deno.lock' + frontend: + - 'frontend/**' + - 'pnpm-lock.yaml' + - 'wrangler.toml' + - 'src/version.ts' + migrations: + - 'migrations/**' + - 'admin-migrations/**' + worker: + - 'worker/**' + - 'src/**' + - 'wrangler.toml' + + lint-format: + name: Lint & Format Check + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [changes] + if: needs.changes.outputs.compiler == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + install-deps: "false" + + - name: Lint + run: deno lint + + - name: Format check + run: deno fmt --check + + - name: Route-order lint + run: deno task lint:routes + + cloudflare-dep-check: + name: Cloudflare Dependency Check + runs-on: ubuntu-latest + timeout-minutes: 5 + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + # The upgrade-check script only reads local files and fetches npm metadata; + # it has no npm/jsr imports of its own, so no dep install is required. + install-deps: "false" + + - name: Check Cloudflare dependency versions + run: deno task ci:cloudflare-check + + typecheck: + name: Type Check + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [changes] + if: needs.changes.outputs.compiler == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + - name: Type check all entry points + run: deno task check + + test: + name: Test + runs-on: ubuntu-latest + timeout-minutes: 20 + needs: [changes] + if: needs.changes.outputs.compiler == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Load environment variables + uses: ./.github/actions/setup-env + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + - name: Run tests with coverage + run: deno task test:coverage + env: + DENO_TLS_CA_STORE: system + + - name: Generate coverage report + if: always() && hashFiles('coverage/**') != '' + continue-on-error: true + run: deno coverage coverage --lcov --output=coverage/lcov.info + + - name: Upload coverage to Codecov + if: always() && hashFiles('coverage/lcov.info') != '' + uses: codecov/codecov-action@57e3a136b779b570ffcdbf80b3bdc90e7fab3de2 # v6.0.0 continue-on-error: true - permissions: - contents: read - security-events: write - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Run Trivy vulnerability scanner - uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1 # 0.35.0 - continue-on-error: true - with: - scan-type: fs - scan-ref: . - format: sarif - output: trivy-results.sarif - severity: 'CRITICAL,HIGH,MEDIUM' - - - name: Upload Trivy results to GitHub Security - if: always() && hashFiles('trivy-results.sarif') != '' - uses: github/codeql-action/upload-sarif@dd677812177e0c29f9c970a6c58d8607ae1bfefd # v4 - continue-on-error: true - with: - sarif_file: trivy-results.sarif - - frontend-lint-test: - name: Frontend (lint, test) - runs-on: ubuntu-latest - timeout-minutes: 15 - needs: [changes] - if: needs.changes.outputs.frontend == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup pnpm and Node.js - uses: ./.github/actions/setup-pnpm-node - with: - node-version: '22' - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Lint frontend - run: pnpm --filter adblock-frontend run lint - - - name: Test frontend - run: pnpm --filter adblock-frontend run test - - frontend-build: - name: Frontend (build) - runs-on: ubuntu-latest - timeout-minutes: 15 - needs: [changes] - if: >- - needs.changes.outputs.frontend == 'true' || - needs.changes.outputs.worker == 'true' || - needs.changes.outputs.compiler == 'true' || - (github.event.inputs.force_deploy_frontend || 'false') == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup pnpm and Node.js - uses: ./.github/actions/setup-pnpm-node - with: - node-version: '22' - - - name: Install dependencies - run: pnpm install --frozen-lockfile - - - name: Build Angular frontend - run: pnpm --filter adblock-frontend run build - - - name: Upload frontend build artifact - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 - with: - name: frontend-dist - path: frontend/dist/ - retention-days: 1 - - validate-artifacts: - name: Validate Generated Artifacts - runs-on: ubuntu-latest - timeout-minutes: 10 - needs: [changes] - if: needs.changes.outputs.compiler == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - - name: Validate OpenAPI specification - run: deno task openapi:validate - - - name: Regenerate all artifacts - run: deno task schema:generate - - - name: Check all artifacts are up to date - run: | - DRIFT=0 - if ! git diff --quiet docs/api/cloudflare-schema.yaml; then - echo "❌ docs/api/cloudflare-schema.yaml is out of date!" - echo "" - git diff docs/api/cloudflare-schema.yaml - DRIFT=1 - fi - if ! git diff --quiet docs/postman/postman-collection.json docs/postman/postman-environment.json; then - echo "❌ Postman collection or environment is out of date!" - echo "" - git diff docs/postman/postman-collection.json docs/postman/postman-environment.json - DRIFT=1 - fi - if [ "$DRIFT" -eq 1 ]; then - echo "" - echo "Run 'deno task schema:generate' locally, commit the updated files, and push again." - echo "Tip: run 'deno task setup:hooks' to catch drift automatically before every push." - exit 1 - fi - echo "✅ All generated artifacts are up to date" - - check-slow-types: - name: Check Slow Types - runs-on: ubuntu-latest - timeout-minutes: 10 - needs: [changes] - if: needs.changes.outputs.compiler == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - - name: Check for slow types - run: deno publish --dry-run --allow-dirty - - audit-public-surface: - name: Audit Public Surface (IP Protection) - runs-on: ubuntu-latest - timeout-minutes: 10 - needs: [changes] - if: needs.changes.outputs.compiler == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - - name: Audit public surface for IP leaks - run: | - set -euo pipefail - # Capture the dry-run output; grep for lines listing files to be published - # deno publish --dry-run prints each file it would publish, one per line - PUBLISHED=$(deno publish --dry-run --allow-dirty 2>&1 | grep -oP '(?<= )\S+' || true) - LEAKED=$(echo "$PUBLISHED" | grep -E "^(worker/|frontend/|prisma/|migrations/)" || true) - if [ -n "$LEAKED" ]; then - echo "❌ IP LEAK DETECTED: The following files from private packages are in the JSR publish surface:" - echo "$LEAKED" - exit 1 - fi - echo "✅ Public surface audit passed — no private package files detected" - - validate-migrations: - name: Validate Migrations - runs-on: ubuntu-latest - timeout-minutes: 5 - needs: [changes] - if: needs.changes.outputs.migrations == 'true' - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - install-deps: 'false' - - - name: Validate migration file naming and ordering - run: deno task migrate:validate - - zta-lint: - name: ZTA Lint - runs-on: ubuntu-latest - timeout-minutes: 10 - needs: [changes] - if: needs.changes.outputs.worker == 'true' || needs.changes.outputs.frontend == 'true' - permissions: - contents: read - continue-on-error: false - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Run ZTA security checks - uses: ./.github/actions/zta-checks - - lockfile-check: - name: Lockfile Sync Check - runs-on: ubuntu-latest - timeout-minutes: 10 - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup pnpm and Node.js - uses: ./.github/actions/setup-pnpm-node - with: - node-version: '22' - - - name: Check pnpm lockfile is in sync - run: | - pnpm install --frozen-lockfile --ignore-scripts || { - echo "❌ pnpm-lock.yaml is out of sync with package.json!" - echo "" - echo "Run 'pnpm install' locally, commit the updated pnpm-lock.yaml, and push again." - echo "Tip: run 'deno task setup:hooks' to catch drift automatically before every push." - exit 1 - } - echo "✅ pnpm-lock.yaml is in sync with package.json" - - ci-gate: - name: CI Gate - runs-on: ubuntu-latest - timeout-minutes: 5 - needs: - - changes - - lint-format - - typecheck - - test - - validate-artifacts - - validate-migrations - - frontend-lint-test - - frontend-build - - check-slow-types - - audit-public-surface - # verify-deploy is a PR-only safety check (it is skipped on push to main). - # Keeping it in needs caused GitHub Actions to propagate the 'skipped' result - # upward, which could cause ci-gate itself to be treated as skipped, silently - # preventing deploy and publish from running on push. verify-deploy remains - # a standalone job and still runs on every pull_request. - # zta-lint uses path filters and will be skipped when no relevant files change; - # the gate's Python logic treats 'skipped' as passing, so this is safe to include. - - zta-lint - - lockfile-check - if: ${{ always() && !cancelled() }} - steps: - - name: Check all jobs passed - env: - NEEDS_JSON: ${{ toJSON(needs) }} - run: | - echo "Job results: $NEEDS_JSON" - python3 - <<'PY' - import os - import json - - needs = json.loads(os.environ['NEEDS_JSON']) - cancelled = [name for name, data in needs.items() if data['result'] == 'cancelled'] - if cancelled: - print('Run interrupted by concurrency cancellation (cancelled jobs: ' + ', '.join(cancelled) + ')') - print('A newer run will verify these checks — exiting gracefully.') - raise SystemExit(0) - failures = [name for name, data in needs.items() if data['result'] not in ('success', 'skipped')] - if failures: - print('FAILED: ' + ', '.join(failures)) - raise SystemExit(1) - print('All checks passed') - PY - - - name: Write CI summary - if: always() - env: - NEEDS_JSON: ${{ toJSON(needs) }} - run: | - python3 - <<'PY' - import os, json - needs = json.loads(os.environ['NEEDS_JSON']) - lines = ["## CI Gate Summary", "", "| Job | Result |", "|---|---|"] - icons = {'success': '✅', 'failure': '❌', 'skipped': '⏭️', 'cancelled': '🚫'} - for name, data in needs.items(): - result = data['result'] - icon = icons.get(result, '❓') - lines.append(f"| `{name}` | {icon} {result} |") - print('\n'.join(lines)) - with open(os.environ['GITHUB_STEP_SUMMARY'], 'a') as f: - f.write('\n'.join(lines) + '\n') - PY - - - publish: - name: Publish to JSR - runs-on: ubuntu-latest - timeout-minutes: 10 - needs: [ci-gate, changes] - # verify-deploy is no longer in ci-gate's needs (Issue 1 fix), so the always() guard - # is kept as a safety net for any other skipped dependency. ci-gate.result == 'success' - # is the real gate. - if: >- - always() && - needs.ci-gate.result == 'success' && - github.event_name == 'push' && - github.ref == 'refs/heads/main' && - needs.changes.outputs.compiler == 'true' - permissions: - contents: read - id-token: write - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - - name: Check if version already published on JSR - id: jsr_check - run: | - set -euo pipefail - VERSION=$(deno eval "console.log(JSON.parse(Deno.readTextFileSync('deno.json')).version)") - # Read the JSR package name from deno.json so the check stays in sync - # even if the package scope/name is changed in the future. - PACKAGE=$(deno eval "console.log(JSON.parse(Deno.readTextFileSync('deno.json')).name)") - if [ -z "${VERSION}" ] || [ "${VERSION}" = "undefined" ]; then - echo "Error: deno.json.version is missing, empty, or undefined. Cannot check/publish to JSR." >&2 - exit 1 - fi - if [ -z "${PACKAGE}" ] || [ "${PACKAGE}" = "undefined" ]; then - echo "Error: deno.json.name (JSR package name) is missing, empty, or undefined. Cannot check/publish to JSR." >&2 - exit 1 - fi - echo "version=$VERSION" >> "$GITHUB_OUTPUT" - echo "package=$PACKAGE" >> "$GITHUB_OUTPUT" - HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" \ - "https://jsr.io/${PACKAGE}/${VERSION}_meta.json") - if [ "$HTTP_STATUS" = "200" ]; then - echo "already_published=true" >> "$GITHUB_OUTPUT" - echo "[SKIP] Version ${VERSION} already on JSR (HTTP 200) — skipping publish" - elif [ "$HTTP_STATUS" = "404" ]; then - echo "already_published=false" >> "$GITHUB_OUTPUT" - echo "Version ${VERSION} not yet on JSR (HTTP 404) — will publish" - else - echo "Failed to probe JSR for ${PACKAGE}@${VERSION}. HTTP status: ${HTTP_STATUS}" >&2 - echo "Treating this as a connectivity/auth error so CI can surface the real issue." >&2 - exit 1 - fi - - - name: Publish to JSR - if: steps.jsr_check.outputs.already_published == 'false' - run: | - set -euo pipefail - if ! OUTPUT="$(deno publish --allow-dirty 2>&1)"; then - echo "${OUTPUT}" - if echo "${OUTPUT}" | grep -qiE 'version .*already (exists|published)'; then - echo "Version already published, skipping." - exit 0 - fi - echo "Publish failed with unexpected error." - exit 1 - fi - echo "Successfully published to JSR" - - - name: Publish summary - if: always() - env: - VERSION: ${{ steps.jsr_check.outputs.version }} - PACKAGE: ${{ steps.jsr_check.outputs.package }} - ALREADY_PUBLISHED: ${{ steps.jsr_check.outputs.already_published }} - run: | - set -euo pipefail - if [ "${ALREADY_PUBLISHED}" = "true" ]; then - cat <> "$GITHUB_STEP_SUMMARY" - ## ⏭️ JSR Publish Skipped - - Version **${VERSION}** was already published on JSR. - - 📦 https://jsr.io/${PACKAGE}@${VERSION} - EOF - else - cat <> "$GITHUB_STEP_SUMMARY" - ## 📦 Published to JSR - - Version **${VERSION}** has been published. - - 📦 https://jsr.io/${PACKAGE}@${VERSION} - EOF - fi - - # Verify the worker bundles correctly on every PR — catches build failures - # before they reach main. No secrets required; --dry-run bundles without - # uploading to Cloudflare. Runs in parallel with other checks (only waits - # for frontend-build to produce the artifact) so failures are surfaced fast. - verify-deploy: - name: Verify Worker Build - runs-on: ubuntu-latest - timeout-minutes: 15 - needs: [changes, frontend-build] - if: >- - always() && - (needs.frontend-build.result == 'success' || needs.frontend-build.result == 'skipped') && - (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') && - (needs.changes.outputs.worker == 'true' || needs.changes.outputs.frontend == 'true') - permissions: - contents: read - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Validate wrangler.toml - uses: ./.github/actions/validate-wrangler-toml - - - name: Load environment variables - uses: ./.github/actions/setup-env - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - - name: Download frontend build artifact - if: needs.frontend-build.result == 'success' - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 - with: - name: frontend-dist - path: frontend/dist/ - - - name: Verify worker build (dry run) - run: | - for i in 1 2 3; do - deno task wrangler:verify && break - if [ "$i" -lt 3 ]; then - echo "Attempt $i failed, retrying in 15s..." - sleep 15 - else - echo "All 3 attempts failed." - exit 1 - fi - done - - # Deploy jobs only run on main pushes - deploy: - name: Deploy to Cloudflare - runs-on: ubuntu-latest - timeout-minutes: 30 - needs: [ci-gate, changes, frontend-build] - # verify-deploy is no longer in ci-gate's needs (Issue 1 fix), so the always() guard - # is kept as a safety net for any other skipped dependency. ci-gate.result == 'success' - # is the real gate. - # frontend-build always runs when worker or compiler files change (same triggers as this - # job), so the frontend artifact is always available when the deploy runs. - if: >- - always() && - needs.ci-gate.result == 'success' && - (needs.frontend-build.result == 'success' || needs.frontend-build.result == 'skipped') && - github.event_name == 'push' && - github.ref == 'refs/heads/main' && - (needs.changes.outputs.worker == 'true' || needs.changes.outputs.compiler == 'true') - permissions: - contents: read + with: + files: coverage/lcov.info + flags: deno + fail_ci_if_error: false + + security: + name: Security Scan + runs-on: ubuntu-latest + timeout-minutes: 15 + # Security scan should not block deployments + continue-on-error: true + permissions: + contents: read + security-events: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Run Trivy vulnerability scanner + uses: aquasecurity/trivy-action@57a97c7e7821a5776cebc9bb87c984fa69cba8f1 # 0.35.0 + continue-on-error: true + with: + scan-type: fs + scan-ref: . + format: sarif + output: trivy-results.sarif + severity: "CRITICAL,HIGH,MEDIUM" + + - name: Upload Trivy results to GitHub Security + if: always() && hashFiles('trivy-results.sarif') != '' + uses: github/codeql-action/upload-sarif@dd677812177e0c29f9c970a6c58d8607ae1bfefd # v4 + continue-on-error: true + with: + sarif_file: trivy-results.sarif + + frontend-lint-test: + name: Frontend (lint, test) + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: [changes] + if: needs.changes.outputs.frontend == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup pnpm and Node.js + uses: ./.github/actions/setup-pnpm-node + with: + node-version: "22" + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Lint frontend + run: pnpm --filter adblock-frontend run lint + + - name: Test frontend + run: pnpm --filter adblock-frontend run test + + frontend-build: + name: Frontend (build) + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: [changes] + if: >- + needs.changes.outputs.frontend == 'true' || + needs.changes.outputs.worker == 'true' || + needs.changes.outputs.compiler == 'true' || + (github.event.inputs.force_deploy_frontend || 'false') == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup pnpm and Node.js + uses: ./.github/actions/setup-pnpm-node + with: + node-version: "22" + + - name: Install dependencies + run: pnpm install --frozen-lockfile + + - name: Build Angular frontend + run: pnpm --filter adblock-frontend run build + + - name: Upload frontend build artifact + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: frontend-dist + path: frontend/dist/ + retention-days: 1 + + validate-artifacts: + name: Validate Generated Artifacts + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [changes] + if: needs.changes.outputs.compiler == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + - name: Validate OpenAPI specification + run: deno task openapi:validate + + - name: Regenerate all artifacts + run: deno task schema:generate + + - name: Check all artifacts are up to date + run: | + DRIFT=0 + if ! git diff --quiet docs/api/cloudflare-schema.yaml; then + echo "❌ docs/api/cloudflare-schema.yaml is out of date!" + echo "" + git diff docs/api/cloudflare-schema.yaml + DRIFT=1 + fi + if ! git diff --quiet docs/postman/postman-collection.json docs/postman/postman-environment.json; then + echo "❌ Postman collection or environment is out of date!" + echo "" + git diff docs/postman/postman-collection.json docs/postman/postman-environment.json + DRIFT=1 + fi + if [ "$DRIFT" -eq 1 ]; then + echo "" + echo "Run 'deno task schema:generate' locally, commit the updated files, and push again." + echo "Tip: run 'deno task setup:hooks' to catch drift automatically before every push." + exit 1 + fi + echo "✅ All generated artifacts are up to date" + + check-slow-types: + name: Check Slow Types + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [changes] + if: needs.changes.outputs.compiler == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + - name: Check for slow types + run: deno publish --dry-run --allow-dirty + + audit-public-surface: + name: Audit Public Surface (IP Protection) + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [changes] + if: needs.changes.outputs.compiler == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + - name: Audit public surface for IP leaks + run: | + set -euo pipefail + # Capture the dry-run output; grep for lines listing files to be published + # deno publish --dry-run prints each file it would publish, one per line + PUBLISHED=$(deno publish --dry-run --allow-dirty 2>&1 | grep -oP '(?<= )\S+' || true) + LEAKED=$(echo "$PUBLISHED" | grep -E "^(worker/|frontend/|prisma/|migrations/)" || true) + if [ -n "$LEAKED" ]; then + echo "❌ IP LEAK DETECTED: The following files from private packages are in the JSR publish surface:" + echo "$LEAKED" + exit 1 + fi + echo "✅ Public surface audit passed — no private package files detected" + + validate-migrations: + name: Validate Migrations + runs-on: ubuntu-latest + timeout-minutes: 5 + needs: [changes] + if: needs.changes.outputs.migrations == 'true' + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + install-deps: "false" + + - name: Validate migration file naming and ordering + run: deno task migrate:validate + + zta-lint: + name: ZTA Lint + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [changes] + if: needs.changes.outputs.worker == 'true' || needs.changes.outputs.frontend == 'true' + permissions: + contents: read + continue-on-error: false + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Run ZTA security checks + uses: ./.github/actions/zta-checks + + lockfile-check: + name: Lockfile Sync Check + runs-on: ubuntu-latest + timeout-minutes: 10 + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup pnpm and Node.js + uses: ./.github/actions/setup-pnpm-node + with: + node-version: "22" + + - name: Check pnpm lockfile is in sync + run: | + pnpm install --frozen-lockfile --ignore-scripts || { + echo "❌ pnpm-lock.yaml is out of sync with package.json!" + echo "" + echo "Run 'pnpm install' locally, commit the updated pnpm-lock.yaml, and push again." + echo "Tip: run 'deno task setup:hooks' to catch drift automatically before every push." + exit 1 + } + echo "✅ pnpm-lock.yaml is in sync with package.json" + + ci-gate: + name: CI Gate + runs-on: ubuntu-latest + timeout-minutes: 5 + permissions: + contents: read + needs: + - changes + - lint-format + - typecheck + - test + - validate-artifacts + - validate-migrations + - frontend-lint-test + - frontend-build + - check-slow-types + - audit-public-surface + # verify-deploy is a PR-only safety check (it is skipped on pushes to the + # repository default branch). + # Keeping it in needs caused GitHub Actions to propagate the 'skipped' result + # upward, which could cause ci-gate itself to be treated as skipped, silently + # preventing deploy and publish from running on push. verify-deploy remains + # a standalone job and still runs on every pull_request. + # zta-lint uses path filters and will be skipped when no relevant files change; + # the gate's Python logic treats 'skipped' as passing, so this is safe to include. + - zta-lint + - lockfile-check + if: ${{ always() && !cancelled() }} + steps: + - name: Check all jobs passed + env: + NEEDS_JSON: ${{ toJSON(needs) }} + run: | + echo "Job results: $NEEDS_JSON" + python3 - <<'PY' + import os + import json + + needs = json.loads(os.environ['NEEDS_JSON']) + cancelled = [name for name, data in needs.items() if data['result'] == 'cancelled'] + if cancelled: + print('Run interrupted by concurrency cancellation (cancelled jobs: ' + ', '.join(cancelled) + ')') + print('A newer run will verify these checks — exiting gracefully.') + raise SystemExit(0) + failures = [name for name, data in needs.items() if data['result'] not in ('success', 'skipped')] + if failures: + print('FAILED: ' + ', '.join(failures)) + raise SystemExit(1) + print('All checks passed') + PY + + - name: Write CI summary + if: always() + env: + NEEDS_JSON: ${{ toJSON(needs) }} + run: | + python3 - <<'PY' + import os, json + needs = json.loads(os.environ['NEEDS_JSON']) + lines = ["## CI Gate Summary", "", "| Job | Result |", "|---|---|"] + icons = {'success': '✅', 'failure': '❌', 'skipped': '⏭️', 'cancelled': '🚫'} + for name, data in needs.items(): + result = data['result'] + icon = icons.get(result, '❓') + lines.append(f"| `{name}` | {icon} {result} |") + print('\n'.join(lines)) + with open(os.environ['GITHUB_STEP_SUMMARY'], 'a') as f: + f.write('\n'.join(lines) + '\n') + PY + + publish: + name: Publish to JSR + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [ci-gate, changes] + # verify-deploy is no longer in ci-gate's needs (Issue 1 fix), so the always() guard + # is kept as a safety net for any other skipped dependency. ci-gate.result == 'success' + # is the real gate. + if: >- + always() && + needs.ci-gate.result == 'success' && + github.event_name == 'push' && + github.ref == format('refs/heads/{0}', github.event.repository.default_branch) && + needs.changes.outputs.compiler == 'true' + permissions: + contents: read + id-token: write + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + - name: Check if version already published on JSR + id: jsr_check + run: | + set -euo pipefail + VERSION=$(deno eval "console.log(JSON.parse(Deno.readTextFileSync('deno.json')).version)") + # Read the JSR package name from deno.json so the check stays in sync + # even if the package scope/name is changed in the future. + PACKAGE=$(deno eval "console.log(JSON.parse(Deno.readTextFileSync('deno.json')).name)") + if [ -z "${VERSION}" ] || [ "${VERSION}" = "undefined" ]; then + echo "Error: deno.json.version is missing, empty, or undefined. Cannot check/publish to JSR." >&2 + exit 1 + fi + if [ -z "${PACKAGE}" ] || [ "${PACKAGE}" = "undefined" ]; then + echo "Error: deno.json.name (JSR package name) is missing, empty, or undefined. Cannot check/publish to JSR." >&2 + exit 1 + fi + echo "version=$VERSION" >> "$GITHUB_OUTPUT" + echo "package=$PACKAGE" >> "$GITHUB_OUTPUT" + HTTP_STATUS=$(curl -s -o /dev/null -w "%{http_code}" \ + "https://jsr.io/${PACKAGE}/${VERSION}_meta.json") + if [ "$HTTP_STATUS" = "200" ]; then + echo "already_published=true" >> "$GITHUB_OUTPUT" + echo "[SKIP] Version ${VERSION} already on JSR (HTTP 200) — skipping publish" + elif [ "$HTTP_STATUS" = "404" ]; then + echo "already_published=false" >> "$GITHUB_OUTPUT" + echo "Version ${VERSION} not yet on JSR (HTTP 404) — will publish" + else + echo "Failed to probe JSR for ${PACKAGE}@${VERSION}. HTTP status: ${HTTP_STATUS}" >&2 + echo "Treating this as a connectivity/auth error so CI can surface the real issue." >&2 + exit 1 + fi + + - name: Publish to JSR + if: steps.jsr_check.outputs.already_published == 'false' + run: | + set -euo pipefail + if ! OUTPUT="$(deno publish --allow-dirty 2>&1)"; then + echo "${OUTPUT}" + if echo "${OUTPUT}" | grep -qiE 'version .*already (exists|published)'; then + echo "Version already published, skipping." + exit 0 + fi + echo "Publish failed with unexpected error." + exit 1 + fi + echo "Successfully published to JSR" + + - name: Publish summary + if: always() env: - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - CF_WEB_ANALYTICS_TOKEN: ${{ secrets.CF_WEB_ANALYTICS_TOKEN }} - DIRECT_DATABASE_URL: ${{ secrets.DIRECT_DATABASE_URL }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Validate wrangler.toml - uses: ./.github/actions/validate-wrangler-toml - - - name: Load environment variables - uses: ./.github/actions/setup-env - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - - name: Download frontend build artifact - if: needs.frontend-build.result == 'success' - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 - with: - name: frontend-dist - path: frontend/dist/ - - - name: Deploy worker - uses: ./.github/actions/deploy-worker - with: - github-sha: ${{ github.sha }} - cloudflare-containers-token: ${{ secrets.CLOUDFLARE_CONTAINERS_TOKEN }} - - - name: Show deploy version - if: always() - run: | - set -euo pipefail - VERSION=$(deno eval "console.log(JSON.parse(Deno.readTextFileSync('deno.json')).version)") - echo "## 🚀 Deploying version $VERSION to Cloudflare" >> "$GITHUB_STEP_SUMMARY" - - deploy-frontend: - name: Deploy Frontend Worker - runs-on: ubuntu-latest - timeout-minutes: 15 - needs: [ci-gate, changes, frontend-build] - # verify-deploy is no longer in ci-gate's needs (Issue 1 fix), so the always() guard - # is kept as a safety net for any other skipped dependency. ci-gate.result == 'success' - # is the real gate. - if: >- - always() && - needs.ci-gate.result == 'success' && - needs.frontend-build.result == 'success' && - github.ref == 'refs/heads/main' && - (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && - ( - needs.changes.outputs.frontend == 'true' || - needs.changes.outputs.worker == 'true' || - needs.changes.outputs.compiler == 'true' || - (github.event.inputs.force_deploy_frontend || 'false') == 'true' - ) - permissions: - contents: read + VERSION: ${{ steps.jsr_check.outputs.version }} + PACKAGE: ${{ steps.jsr_check.outputs.package }} + ALREADY_PUBLISHED: ${{ steps.jsr_check.outputs.already_published }} + run: | + set -euo pipefail + if [ "${ALREADY_PUBLISHED}" = "true" ]; then + cat <> "$GITHUB_STEP_SUMMARY" + ## ⏭️ JSR Publish Skipped + + Version **${VERSION}** was already published on JSR. + + 📦 https://jsr.io/${PACKAGE}@${VERSION} + EOF + else + cat <> "$GITHUB_STEP_SUMMARY" + ## 📦 Published to JSR + + Version **${VERSION}** has been published. + + 📦 https://jsr.io/${PACKAGE}@${VERSION} + EOF + fi + + # Verify the worker bundles correctly on every PR — catches build failures + # before they reach main. No secrets required; --dry-run bundles without + # uploading to Cloudflare. Runs in parallel with other checks (only waits + # for frontend-build to produce the artifact) so failures are surfaced fast. + verify-deploy: + name: Verify Worker Build + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: [changes, frontend-build] + if: >- + always() && + (needs.frontend-build.result == 'success' || needs.frontend-build.result == 'skipped') && + (github.event_name == 'pull_request' || github.event_name == 'workflow_dispatch') && + (needs.changes.outputs.worker == 'true' || needs.changes.outputs.frontend == 'true') + permissions: + contents: read + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Validate wrangler.toml + uses: ./.github/actions/validate-wrangler-toml + + - name: Load environment variables + uses: ./.github/actions/setup-env + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + - name: Download frontend build artifact + if: needs.frontend-build.result == 'success' + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: frontend-dist + path: frontend/dist/ + + - name: Verify worker build (dry run) + run: | + for i in 1 2 3; do + deno task wrangler:verify && break + if [ "$i" -lt 3 ]; then + echo "Attempt $i failed, retrying in 15s..." + sleep 15 + else + echo "All 3 attempts failed." + exit 1 + fi + done + + # Deploy jobs only run on pushes to the repository default branch. + deploy: + name: Deploy to Cloudflare + runs-on: ubuntu-latest + timeout-minutes: 30 + needs: [ci-gate, changes, frontend-build] + # verify-deploy is no longer in ci-gate's needs (Issue 1 fix), so the always() guard + # is kept as a safety net for any other skipped dependency. ci-gate.result == 'success' + # is the real gate. + # frontend-build always runs when worker or compiler files change (same triggers as this + # job), so the frontend artifact is always available when the deploy runs. + if: >- + always() && + needs.ci-gate.result == 'success' && + (needs.frontend-build.result == 'success' || needs.frontend-build.result == 'skipped') && + github.event_name == 'push' && + github.ref == format('refs/heads/{0}', github.event.repository.default_branch) && + (needs.changes.outputs.worker == 'true' || needs.changes.outputs.compiler == 'true') + permissions: + contents: read + env: + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + CF_WEB_ANALYTICS_TOKEN: ${{ secrets.CF_WEB_ANALYTICS_TOKEN }} + DIRECT_DATABASE_URL: ${{ secrets.DIRECT_DATABASE_URL }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Validate wrangler.toml + uses: ./.github/actions/validate-wrangler-toml + + - name: Load environment variables + uses: ./.github/actions/setup-env + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + - name: Download frontend build artifact + if: needs.frontend-build.result == 'success' + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: frontend-dist + path: frontend/dist/ + + - name: Deploy worker + uses: ./.github/actions/deploy-worker + with: + github-sha: ${{ github.sha }} + cloudflare-containers-token: ${{ secrets.CLOUDFLARE_CONTAINERS_TOKEN }} + + - name: Show deploy version + if: always() + run: | + set -euo pipefail + VERSION=$(deno eval "console.log(JSON.parse(Deno.readTextFileSync('deno.json')).version)") + echo "## 🚀 Deploying version $VERSION to Cloudflare" >> "$GITHUB_STEP_SUMMARY" + + deploy-frontend: + name: Deploy Frontend Worker + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: [ci-gate, changes, frontend-build] + # verify-deploy is no longer in ci-gate's needs (Issue 1 fix), so the always() guard + # is kept as a safety net for any other skipped dependency. ci-gate.result == 'success' + # is the real gate. + if: >- + always() && + needs.ci-gate.result == 'success' && + needs.frontend-build.result == 'success' && + github.ref == format('refs/heads/{0}', github.event.repository.default_branch) && + (github.event_name == 'push' || github.event_name == 'workflow_dispatch') && + ( + needs.changes.outputs.frontend == 'true' || + needs.changes.outputs.worker == 'true' || + needs.changes.outputs.compiler == 'true' || + (github.event.inputs.force_deploy_frontend || 'false') == 'true' + ) + permissions: + contents: read + env: + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + CF_WEB_ANALYTICS_TOKEN: ${{ secrets.CF_WEB_ANALYTICS_TOKEN }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup pnpm and Node.js + uses: ./.github/actions/setup-pnpm-node + with: + node-version: "22" + + - name: Install frontend dependencies + run: pnpm install --frozen-lockfile + + - name: Download frontend build artifact + uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 + with: + name: frontend-dist + path: frontend/dist/ + + - name: Inject CF Web Analytics token into built HTML env: - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - CF_WEB_ANALYTICS_TOKEN: ${{ secrets.CF_WEB_ANALYTICS_TOKEN }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup pnpm and Node.js - uses: ./.github/actions/setup-pnpm-node - with: - node-version: '22' - - - name: Install frontend dependencies - run: pnpm install --frozen-lockfile - - - name: Download frontend build artifact - uses: actions/download-artifact@3e5f45b2cfb9172054b4087a40e8e0b5a5461e7c # v8.0.1 - with: - name: frontend-dist - path: frontend/dist/ - - - name: Inject CF Web Analytics token into built HTML - env: - CF_WEB_ANALYTICS_TOKEN: ${{ secrets.CF_WEB_ANALYTICS_TOKEN }} - run: sh scripts/build-worker.sh - - - name: Deploy Frontend Worker - if: env.CLOUDFLARE_API_TOKEN != '' - working-directory: frontend - run: | - echo "Deploying adblock-frontend..." - pnpm run deploy - - smoke-test-backend: - name: Smoke Test — Backend - runs-on: ubuntu-latest - timeout-minutes: 5 - needs: [deploy] - permissions: {} - if: >- - always() && - needs.deploy.result == 'success' && - github.event_name == 'push' && - github.ref == 'refs/heads/main' - steps: - - name: Wait for Cloudflare propagation - run: sleep 15 - - - name: Check /api/health - id: health - run: | - set -euo pipefail - HTTP="" - CURL_EXIT=0 - set +e - HTTP=$(curl -sS -o /tmp/health.json -w "%{http_code}" \ - https://adblock-compiler.jayson-knight.workers.dev/api/health) - CURL_EXIT=$? - set -e - if [ "$CURL_EXIT" -ne 0 ]; then - echo "❌ Network error while calling /api/health (curl exit $CURL_EXIT)" - echo "http_code=000" >> "$GITHUB_OUTPUT" - exit 1 - fi - echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" - if [ "$HTTP" != "200" ]; then - echo "❌ /api/health returned HTTP $HTTP" - exit 1 - fi - # "degraded" is acceptable — it means non-critical services (e.g. Hyperdrive) are - # slow but the API is functional. Only "down" indicates a complete outage. - jq -e '.status == "healthy" or .status == "degraded"' /tmp/health.json || { - echo "❌ /api/health status is not healthy or degraded:" - cat /tmp/health.json - exit 1 - } - echo "✅ /api/health OK ($(jq -r .status /tmp/health.json))" - - - name: Check /api/version - id: version - run: | - set -euo pipefail - HTTP="" - CURL_EXIT=0 - set +e - HTTP=$(curl -sS -o /tmp/version.json -w "%{http_code}" \ - https://adblock-compiler.jayson-knight.workers.dev/api/version) - CURL_EXIT=$? - set -e - if [ "$CURL_EXIT" -ne 0 ]; then - echo "❌ Network error while calling /api/version (curl exit $CURL_EXIT)" - echo "http_code=000" >> "$GITHUB_OUTPUT" - exit 1 - fi - echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" - if [ "$HTTP" != "200" ]; then - echo "❌ /api/version returned HTTP $HTTP" - exit 1 - fi - echo "✅ /api/version OK" - - - name: Check /api/auth/providers - id: auth - run: | - set -euo pipefail - HTTP="" - CURL_EXIT=0 - set +e - HTTP=$(curl -sS -o /dev/null -w "%{http_code}" \ - https://adblock-compiler.jayson-knight.workers.dev/api/auth/providers) - CURL_EXIT=$? - set -e - if [ "$CURL_EXIT" -ne 0 ]; then - echo "❌ Network error while calling /api/auth/providers (curl exit $CURL_EXIT)" - echo "http_code=000" >> "$GITHUB_OUTPUT" - exit 1 - fi - echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" - if [ "$HTTP" != "200" ]; then - echo "❌ /api/auth/providers returned HTTP $HTTP" - exit 1 - fi - echo "✅ /api/auth/providers OK" - - - name: Write smoke test summary - if: always() - run: | - HEALTH_CODE="${{ steps.health.outputs.http_code || '—' }}" - VERSION_CODE="${{ steps.version.outputs.http_code || '—' }}" - AUTH_CODE="${{ steps.auth.outputs.http_code || '—' }}" - cat >> "$GITHUB_STEP_SUMMARY" << EOF - ## 🔍 Backend Smoke Tests - - | Endpoint | HTTP Status | Result | - |---|---|---| - | \`/api/health\` | $HEALTH_CODE | $( [ "$HEALTH_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | - | \`/api/version\` | $VERSION_CODE | $( [ "$VERSION_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | - | \`/api/auth/providers\` | $AUTH_CODE | $( [ "$AUTH_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | - EOF - - smoke-test-frontend: - name: Smoke Test — Frontend - runs-on: ubuntu-latest - timeout-minutes: 5 - needs: [deploy-frontend] - permissions: {} - if: >- - always() && - needs.deploy-frontend.result == 'success' && - github.event_name == 'push' && - github.ref == 'refs/heads/main' - steps: - - name: Wait for Cloudflare propagation - run: sleep 15 - - - name: Check homepage returns 200 - id: homepage - run: | - set -euo pipefail - HTTP="" - CURL_EXIT=0 - set +e - HTTP=$(curl -sS -o /dev/null -w "%{http_code}" \ - https://adblock-frontend.jayson-knight.workers.dev/) - CURL_EXIT=$? - set -e - if [ "$CURL_EXIT" -ne 0 ]; then - echo "❌ Network error while calling homepage (curl exit $CURL_EXIT)" - echo "http_code=000" >> "$GITHUB_OUTPUT" - exit 1 - fi - echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" - if [ "$HTTP" != "200" ]; then - echo "❌ Homepage returned HTTP $HTTP" - exit 1 - fi - echo "✅ Homepage OK" - - - name: Check SSR API proxy (/api/auth/providers) - id: proxy - run: | - set -euo pipefail - HTTP="" - CURL_EXIT=0 - set +e - HTTP=$(curl -sS -o /dev/null -w "%{http_code}" \ - https://adblock-frontend.jayson-knight.workers.dev/api/auth/providers) - CURL_EXIT=$? - set -e - if [ "$CURL_EXIT" -ne 0 ]; then - echo "❌ Network error while calling SSR API proxy (curl exit $CURL_EXIT)" - echo "http_code=000" >> "$GITHUB_OUTPUT" - exit 1 - fi - echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" - if [ "$HTTP" != "200" ]; then - echo "❌ SSR API proxy /api/auth/providers returned HTTP $HTTP" - exit 1 - fi - echo "✅ SSR API proxy OK" - - - name: Check health via SSR proxy - id: health - run: | - set -euo pipefail - HTTP="" - CURL_EXIT=0 - set +e - HTTP=$(curl -sS -o /tmp/fe-health.json -w "%{http_code}" \ - https://adblock-frontend.jayson-knight.workers.dev/api/health) - CURL_EXIT=$? - set -e - if [ "$CURL_EXIT" -ne 0 ]; then - echo "❌ Network error while calling /api/health via frontend (curl exit $CURL_EXIT)" - echo "http_code=000" >> "$GITHUB_OUTPUT" - exit 1 - fi - echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" - if [ "$HTTP" != "200" ]; then - echo "❌ /api/health via frontend returned HTTP $HTTP" - exit 1 - fi - # "degraded" is acceptable — it means non-critical services are slow but the API - # is functional. Only "down" indicates a complete outage. - jq -e '.status == "healthy" or .status == "degraded"' /tmp/fe-health.json || { - echo "❌ /api/health status is not healthy or degraded:" - cat /tmp/fe-health.json - exit 1 - } - echo "✅ /api/health via frontend OK" - - - name: Write smoke test summary - if: always() - run: | - HOMEPAGE_CODE="${{ steps.homepage.outputs.http_code || '—' }}" - PROXY_CODE="${{ steps.proxy.outputs.http_code || '—' }}" - HEALTH_CODE="${{ steps.health.outputs.http_code || '—' }}" - cat >> "$GITHUB_STEP_SUMMARY" << EOF - ## 🔍 Frontend Smoke Tests - - | Check | HTTP Status | Result | - |---|---|---| - | Homepage | $HOMEPAGE_CODE | $( [ "$HOMEPAGE_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | - | SSR API proxy (\`/api/auth/providers\`) | $PROXY_CODE | $( [ "$PROXY_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | - | Health via proxy (\`/api/health\`) | $HEALTH_CODE | $( [ "$HEALTH_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | - EOF - - deploy-status: - name: Deployment Status - runs-on: ubuntu-latest - timeout-minutes: 5 - needs: [smoke-test-backend, smoke-test-frontend] - permissions: {} - if: >- - always() && - (needs.smoke-test-backend.result == 'success' || needs.smoke-test-backend.result == 'skipped') && - (needs.smoke-test-frontend.result == 'success' || needs.smoke-test-frontend.result == 'skipped') && - github.event_name == 'push' && - github.ref == 'refs/heads/main' - steps: - - name: Write deployment summary - run: | - BACKEND="${{ needs.smoke-test-backend.result }}" - FRONTEND="${{ needs.smoke-test-frontend.result }}" - backend_icon() { [ "$1" = "success" ] && echo "✅ healthy" || echo "⏭️ skipped"; } - frontend_icon() { [ "$1" = "success" ] && echo "✅ 200 OK" || echo "⏭️ skipped"; } - cat >> "$GITHUB_STEP_SUMMARY" << EOF - ## ✅ Deployment Complete - - | Component | Status | URL | - |---|---|---| - | Backend Worker | $(backend_icon "$BACKEND") | https://adblock-compiler.jayson-knight.workers.dev | - | Frontend Worker | $(frontend_icon "$FRONTEND") | https://adblock-frontend.jayson-knight.workers.dev | - EOF + CF_WEB_ANALYTICS_TOKEN: ${{ secrets.CF_WEB_ANALYTICS_TOKEN }} + run: sh scripts/build-worker.sh + + - name: Deploy Frontend Worker + if: env.CLOUDFLARE_API_TOKEN != '' + working-directory: frontend + run: | + echo "Deploying adblock-frontend..." + pnpm run deploy + + smoke-test-backend: + name: Smoke Test — Backend + runs-on: ubuntu-latest + timeout-minutes: 5 + needs: [deploy] + permissions: {} + if: >- + always() && + needs.deploy.result == 'success' && + github.event_name == 'push' && + github.ref == format('refs/heads/{0}', github.event.repository.default_branch) + steps: + - name: Wait for Cloudflare propagation + run: sleep 15 + + - name: Check /api/health + id: health + run: | + set -euo pipefail + HTTP="" + CURL_EXIT=0 + set +e + HTTP=$(curl -sS -o /tmp/health.json -w "%{http_code}" \ + https://adblock-compiler.jayson-knight.workers.dev/api/health) + CURL_EXIT=$? + set -e + if [ "$CURL_EXIT" -ne 0 ]; then + echo "❌ Network error while calling /api/health (curl exit $CURL_EXIT)" + echo "http_code=000" >> "$GITHUB_OUTPUT" + exit 1 + fi + echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" + if [ "$HTTP" != "200" ]; then + echo "❌ /api/health returned HTTP $HTTP" + exit 1 + fi + # "degraded" is acceptable — it means non-critical services (e.g. Hyperdrive) are + # slow but the API is functional. Only "down" indicates a complete outage. + jq -e '.status == "healthy" or .status == "degraded"' /tmp/health.json || { + echo "❌ /api/health status is not healthy or degraded:" + cat /tmp/health.json + exit 1 + } + echo "✅ /api/health OK ($(jq -r .status /tmp/health.json))" + + - name: Check /api/version + id: version + run: | + set -euo pipefail + HTTP="" + CURL_EXIT=0 + set +e + HTTP=$(curl -sS -o /tmp/version.json -w "%{http_code}" \ + https://adblock-compiler.jayson-knight.workers.dev/api/version) + CURL_EXIT=$? + set -e + if [ "$CURL_EXIT" -ne 0 ]; then + echo "❌ Network error while calling /api/version (curl exit $CURL_EXIT)" + echo "http_code=000" >> "$GITHUB_OUTPUT" + exit 1 + fi + echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" + if [ "$HTTP" != "200" ]; then + echo "❌ /api/version returned HTTP $HTTP" + exit 1 + fi + echo "✅ /api/version OK" + + - name: Check /api/auth/providers + id: auth + run: | + set -euo pipefail + HTTP="" + CURL_EXIT=0 + set +e + HTTP=$(curl -sS -o /dev/null -w "%{http_code}" \ + https://adblock-compiler.jayson-knight.workers.dev/api/auth/providers) + CURL_EXIT=$? + set -e + if [ "$CURL_EXIT" -ne 0 ]; then + echo "❌ Network error while calling /api/auth/providers (curl exit $CURL_EXIT)" + echo "http_code=000" >> "$GITHUB_OUTPUT" + exit 1 + fi + echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" + if [ "$HTTP" != "200" ]; then + echo "❌ /api/auth/providers returned HTTP $HTTP" + exit 1 + fi + echo "✅ /api/auth/providers OK" + + - name: Write smoke test summary + if: always() + run: | + HEALTH_CODE="${{ steps.health.outputs.http_code || '—' }}" + VERSION_CODE="${{ steps.version.outputs.http_code || '—' }}" + AUTH_CODE="${{ steps.auth.outputs.http_code || '—' }}" + cat >> "$GITHUB_STEP_SUMMARY" << EOF + ## 🔍 Backend Smoke Tests + + | Endpoint | HTTP Status | Result | + |---|---|---| + | \`/api/health\` | $HEALTH_CODE | $( [ "$HEALTH_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | + | \`/api/version\` | $VERSION_CODE | $( [ "$VERSION_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | + | \`/api/auth/providers\` | $AUTH_CODE | $( [ "$AUTH_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | + EOF + + smoke-test-frontend: + name: Smoke Test — Frontend + runs-on: ubuntu-latest + timeout-minutes: 5 + needs: [deploy-frontend] + permissions: {} + if: >- + always() && + needs.deploy-frontend.result == 'success' && + github.event_name == 'push' && + github.ref == format('refs/heads/{0}', github.event.repository.default_branch) + steps: + - name: Wait for Cloudflare propagation + run: sleep 15 + + - name: Check homepage returns 200 + id: homepage + run: | + set -euo pipefail + HTTP="" + CURL_EXIT=0 + set +e + HTTP=$(curl -sS -o /dev/null -w "%{http_code}" \ + https://adblock-frontend.jayson-knight.workers.dev/) + CURL_EXIT=$? + set -e + if [ "$CURL_EXIT" -ne 0 ]; then + echo "❌ Network error while calling homepage (curl exit $CURL_EXIT)" + echo "http_code=000" >> "$GITHUB_OUTPUT" + exit 1 + fi + echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" + if [ "$HTTP" != "200" ]; then + echo "❌ Homepage returned HTTP $HTTP" + exit 1 + fi + echo "✅ Homepage OK" + + - name: Check SSR API proxy (/api/auth/providers) + id: proxy + run: | + set -euo pipefail + HTTP="" + CURL_EXIT=0 + set +e + HTTP=$(curl -sS -o /dev/null -w "%{http_code}" \ + https://adblock-frontend.jayson-knight.workers.dev/api/auth/providers) + CURL_EXIT=$? + set -e + if [ "$CURL_EXIT" -ne 0 ]; then + echo "❌ Network error while calling SSR API proxy (curl exit $CURL_EXIT)" + echo "http_code=000" >> "$GITHUB_OUTPUT" + exit 1 + fi + echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" + if [ "$HTTP" != "200" ]; then + echo "❌ SSR API proxy /api/auth/providers returned HTTP $HTTP" + exit 1 + fi + echo "✅ SSR API proxy OK" + + - name: Check health via SSR proxy + id: health + run: | + set -euo pipefail + HTTP="" + CURL_EXIT=0 + set +e + HTTP=$(curl -sS -o /tmp/fe-health.json -w "%{http_code}" \ + https://adblock-frontend.jayson-knight.workers.dev/api/health) + CURL_EXIT=$? + set -e + if [ "$CURL_EXIT" -ne 0 ]; then + echo "❌ Network error while calling /api/health via frontend (curl exit $CURL_EXIT)" + echo "http_code=000" >> "$GITHUB_OUTPUT" + exit 1 + fi + echo "http_code=$HTTP" >> "$GITHUB_OUTPUT" + if [ "$HTTP" != "200" ]; then + echo "❌ /api/health via frontend returned HTTP $HTTP" + exit 1 + fi + # "degraded" is acceptable — it means non-critical services are slow but the API + # is functional. Only "down" indicates a complete outage. + jq -e '.status == "healthy" or .status == "degraded"' /tmp/fe-health.json || { + echo "❌ /api/health status is not healthy or degraded:" + cat /tmp/fe-health.json + exit 1 + } + echo "✅ /api/health via frontend OK" + + - name: Write smoke test summary + if: always() + run: | + HOMEPAGE_CODE="${{ steps.homepage.outputs.http_code || '—' }}" + PROXY_CODE="${{ steps.proxy.outputs.http_code || '—' }}" + HEALTH_CODE="${{ steps.health.outputs.http_code || '—' }}" + cat >> "$GITHUB_STEP_SUMMARY" << EOF + ## 🔍 Frontend Smoke Tests + + | Check | HTTP Status | Result | + |---|---|---| + | Homepage | $HOMEPAGE_CODE | $( [ "$HOMEPAGE_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | + | SSR API proxy (\`/api/auth/providers\`) | $PROXY_CODE | $( [ "$PROXY_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | + | Health via proxy (\`/api/health\`) | $HEALTH_CODE | $( [ "$HEALTH_CODE" = "200" ] && echo "✅ OK" || echo "❌ FAIL" ) | + EOF + + deploy-status: + name: Deployment Status + runs-on: ubuntu-latest + timeout-minutes: 5 + needs: [smoke-test-backend, smoke-test-frontend] + permissions: {} + if: >- + always() && + (needs.smoke-test-backend.result == 'success' || needs.smoke-test-backend.result == 'skipped') && + (needs.smoke-test-frontend.result == 'success' || needs.smoke-test-frontend.result == 'skipped') && + github.event_name == 'push' && + github.ref == format('refs/heads/{0}', github.event.repository.default_branch) + steps: + - name: Write deployment summary + run: | + BACKEND="${{ needs.smoke-test-backend.result }}" + FRONTEND="${{ needs.smoke-test-frontend.result }}" + backend_icon() { [ "$1" = "success" ] && echo "✅ healthy" || echo "⏭️ skipped"; } + frontend_icon() { [ "$1" = "success" ] && echo "✅ 200 OK" || echo "⏭️ skipped"; } + cat >> "$GITHUB_STEP_SUMMARY" << EOF + ## ✅ Deployment Complete + + | Component | Status | URL | + |---|---|---| + | Backend Worker | $(backend_icon "$BACKEND") | https://adblock-compiler.jayson-knight.workers.dev | + | Frontend Worker | $(frontend_icon "$FRONTEND") | https://adblock-frontend.jayson-knight.workers.dev | + EOF diff --git a/.github/workflows/claude.yml b/.github/workflows/claude.yml index 2629b5bb..0db46a2c 100644 --- a/.github/workflows/claude.yml +++ b/.github/workflows/claude.yml @@ -2,11 +2,11 @@ name: Claude Code on: # push trigger with path filter prevents GitHub 'workflow file issue' - # errors when this file is modified in a push to main + # errors when this file is modified in a push to the default branch push: - branches: [main] + branches: [master, main] paths: - - '.github/workflows/claude.yml' + - ".github/workflows/claude.yml" issue_comment: types: [created] pull_request_review_comment: @@ -69,4 +69,3 @@ jobs: # See https://github.com/anthropics/claude-code-action/blob/main/docs/usage.md # or https://code.claude.com/docs/en/cli-reference for available options # claude_args: '--allowed-tools Bash(gh pr:*)' - diff --git a/.github/workflows/db-migrate.yml b/.github/workflows/db-migrate.yml index 9941cc43..9dfc3746 100644 --- a/.github/workflows/db-migrate.yml +++ b/.github/workflows/db-migrate.yml @@ -1,449 +1,452 @@ name: Database Migrations -# Runs on every PR that touches migration files, and on every push to main. +# Runs on every PR that touches migration files, and on every push to the +# repository default branch. # # PR behaviour → validate + dry-run (no side effects); posts a comment with # the list of pending migrations. -# main behaviour → apply migrations to Neon staging branch (DIRECT_DATABASE_URL_STAGING). +# default-branch behaviour → apply migrations to Neon staging branch (DIRECT_DATABASE_URL_STAGING). # release behaviour → apply migrations to Neon production branch (DIRECT_DATABASE_URL) — Option B1 promotion. # # Backend support: -# Cloudflare D1 — always attempted when CLOUDFLARE_API_TOKEN is set (push to main only). -# PostgreSQL — staging: on push to main (DIRECT_DATABASE_URL_STAGING). +# Cloudflare D1 — always attempted when CLOUDFLARE_API_TOKEN is set (default-branch pushes only). +# PostgreSQL — staging: on default-branch pushes (DIRECT_DATABASE_URL_STAGING). # production: on GitHub Release published (DIRECT_DATABASE_URL). on: - push: - branches: [main] - paths: - - 'migrations/**' - - 'admin-migrations/**' - - 'prisma/migrations/**' - - 'prisma/schema.prisma' - pull_request: - branches: [main] - paths: - - 'migrations/**' - - 'admin-migrations/**' - - 'prisma/migrations/**' - - 'prisma/schema.prisma' - release: - # Option B1: GitHub Release is the promotion gate — publishing a release - # triggers Prisma migrations against the Neon production branch. - types: [published] - workflow_dispatch: - inputs: - dry_run: - description: 'Perform a dry-run only (validate, no writes)' - type: boolean - default: false - target: - description: 'Migration target (staging | production | both)' - type: choice - options: [staging, production, both] - default: staging + push: + branches: [master, main] + paths: + - "migrations/**" + - "admin-migrations/**" + - "prisma/migrations/**" + - "prisma/schema.prisma" + pull_request: + branches: [master, main] + paths: + - "migrations/**" + - "admin-migrations/**" + - "prisma/migrations/**" + - "prisma/schema.prisma" + release: + # Option B1: GitHub Release is the promotion gate — publishing a release + # triggers Prisma migrations against the Neon production branch. + types: [published] + workflow_dispatch: + inputs: + dry_run: + description: "Perform a dry-run only (validate, no writes)" + type: boolean + default: false + target: + description: "Migration target (staging | production | both)" + type: choice + options: [staging, production, both] + default: staging env: - DENO_VERSION: '2.x' + DENO_VERSION: "2.x" concurrency: - # On main we never want two migration runs overlapping; on PRs we cancel - # the previous run for the same branch. - group: db-migrate-${{ github.ref }} - cancel-in-progress: ${{ github.event_name == 'pull_request' }} + # On the default branch we never want two migration runs overlapping; on + # PRs we cancel the previous run for the same branch. + group: db-migrate-${{ github.ref }} + cancel-in-progress: ${{ github.event_name == 'pull_request' }} permissions: - contents: read - pull-requests: write + contents: read + pull-requests: write jobs: - # ─── 1. Validate ───────────────────────────────────────────────────────── - validate: - name: Validate Migrations - runs-on: ubuntu-latest - timeout-minutes: 5 - outputs: - # Space-separated list of changed migration files (relative paths) - changed_d1: ${{ steps.detect.outputs.changed_d1 }} - changed_admin: ${{ steps.detect.outputs.changed_admin }} - changed_prisma: ${{ steps.detect.outputs.changed_prisma }} - has_changes: ${{ steps.detect.outputs.has_changes }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - with: - # Full history so we can diff against the base branch on PRs - fetch-depth: 0 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - install-deps: 'false' - - - name: Detect changed migration files - id: detect - run: | - if [ "${{ github.event_name }}" = "pull_request" ]; then - BASE="${{ github.event.pull_request.base.sha }}" - else - # On push/dispatch compare against the previous commit - BASE="${{ github.event.before }}" - # workflow_dispatch has no before sha — fall back to HEAD~1 - if [ -z "$BASE" ] || [ "$BASE" = "0000000000000000000000000000000000000000" ]; then - BASE="HEAD~1" - fi - fi - - CHANGED_D1=$(git diff --name-only "$BASE" HEAD -- 'migrations/*.sql' 2>/dev/null | tr '\n' ' ' | sed 's/ *$//') - CHANGED_ADMIN=$(git diff --name-only "$BASE" HEAD -- 'admin-migrations/*.sql' 2>/dev/null | tr '\n' ' ' | sed 's/ *$//') - CHANGED_PRISMA=$(git diff --name-only "$BASE" HEAD -- 'prisma/migrations/**' 2>/dev/null | tr '\n' ' ' | sed 's/ *$//') - - echo "changed_d1=${CHANGED_D1}" >> "$GITHUB_OUTPUT" - echo "changed_admin=${CHANGED_ADMIN}" >> "$GITHUB_OUTPUT" - echo "changed_prisma=${CHANGED_PRISMA}" >> "$GITHUB_OUTPUT" - - if [ -n "$CHANGED_D1" ] || [ -n "$CHANGED_ADMIN" ] || [ -n "$CHANGED_PRISMA" ]; then - echo "has_changes=true" >> "$GITHUB_OUTPUT" - echo "📂 Changed D1 migrations: ${CHANGED_D1:-none}" - echo "📂 Changed admin migrations: ${CHANGED_ADMIN:-none}" - echo "📂 Changed Prisma migrations: ${CHANGED_PRISMA:-none}" - else - echo "has_changes=false" >> "$GITHUB_OUTPUT" - echo "ℹ️ No migration files changed — validation still runs on all files" - fi - - - name: Validate all migration files - run: deno task migrate:validate - - # ─── 2. Dry-run (PR only) ──────────────────────────────────────────────── - dry-run: - name: Migration Dry-Run - runs-on: ubuntu-latest - timeout-minutes: 10 - needs: [validate] - if: >- - github.event_name == 'pull_request' || - (github.event_name == 'workflow_dispatch' && inputs.dry_run == true) + # ─── 1. Validate ───────────────────────────────────────────────────────── + validate: + name: Validate Migrations + runs-on: ubuntu-latest + timeout-minutes: 5 + outputs: + # Space-separated list of changed migration files (relative paths) + changed_d1: ${{ steps.detect.outputs.changed_d1 }} + changed_admin: ${{ steps.detect.outputs.changed_admin }} + changed_prisma: ${{ steps.detect.outputs.changed_prisma }} + has_changes: ${{ steps.detect.outputs.has_changes }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + # Full history so we can diff against the base branch on PRs + fetch-depth: 0 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + install-deps: "false" + + - name: Detect changed migration files + id: detect + run: | + if [ "${{ github.event_name }}" = "pull_request" ]; then + BASE="${{ github.event.pull_request.base.sha }}" + else + # On push/dispatch compare against the previous commit + BASE="${{ github.event.before }}" + # workflow_dispatch has no before sha — fall back to HEAD~1 + if [ -z "$BASE" ] || [ "$BASE" = "0000000000000000000000000000000000000000" ]; then + BASE="HEAD~1" + fi + fi + + CHANGED_D1=$(git diff --name-only "$BASE" HEAD -- 'migrations/*.sql' 2>/dev/null | tr '\n' ' ' | sed 's/ *$//') + CHANGED_ADMIN=$(git diff --name-only "$BASE" HEAD -- 'admin-migrations/*.sql' 2>/dev/null | tr '\n' ' ' | sed 's/ *$//') + CHANGED_PRISMA=$(git diff --name-only "$BASE" HEAD -- 'prisma/migrations/**' 2>/dev/null | tr '\n' ' ' | sed 's/ *$//') + + echo "changed_d1=${CHANGED_D1}" >> "$GITHUB_OUTPUT" + echo "changed_admin=${CHANGED_ADMIN}" >> "$GITHUB_OUTPUT" + echo "changed_prisma=${CHANGED_PRISMA}" >> "$GITHUB_OUTPUT" + + if [ -n "$CHANGED_D1" ] || [ -n "$CHANGED_ADMIN" ] || [ -n "$CHANGED_PRISMA" ]; then + echo "has_changes=true" >> "$GITHUB_OUTPUT" + echo "📂 Changed D1 migrations: ${CHANGED_D1:-none}" + echo "📂 Changed admin migrations: ${CHANGED_ADMIN:-none}" + echo "📂 Changed Prisma migrations: ${CHANGED_PRISMA:-none}" + else + echo "has_changes=false" >> "$GITHUB_OUTPUT" + echo "ℹ️ No migration files changed — validation still runs on all files" + fi + + - name: Validate all migration files + run: deno task migrate:validate + + # ─── 2. Dry-run (PR only) ──────────────────────────────────────────────── + dry-run: + name: Migration Dry-Run + runs-on: ubuntu-latest + timeout-minutes: 10 + needs: [validate] + if: >- + github.event_name == 'pull_request' || + (github.event_name == 'workflow_dispatch' && inputs.dry_run == true) + env: + # Expose secret presence as env vars; secrets context is not + # available in step-level if: conditions. + HAS_CLOUDFLARE_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN != '' }} + HAS_DIRECT_DB_URL: ${{ secrets.DIRECT_DATABASE_URL_STAGING != '' || secrets.DIRECT_DATABASE_URL != '' }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + # ── Cloudflare D1 dry-run ──────────────────────────────────────── + - name: D1 — list pending migrations (main DB) + id: d1_main_pending + if: env.HAS_CLOUDFLARE_TOKEN == 'true' env: - # Expose secret presence as env vars; secrets context is not - # available in step-level if: conditions. - HAS_CLOUDFLARE_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN != '' }} - HAS_DIRECT_DB_URL: ${{ secrets.DIRECT_DATABASE_URL_STAGING != '' || secrets.DIRECT_DATABASE_URL != '' }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - # ── Cloudflare D1 dry-run ──────────────────────────────────────── - - name: D1 — list pending migrations (main DB) - id: d1_main_pending - if: env.HAS_CLOUDFLARE_TOKEN == 'true' - env: - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - run: | - echo "=== Pending migrations for adblock-compiler-d1-database ===" - OUTPUT=$(deno task wrangler d1 migrations list adblock-compiler-d1-database --remote 2>&1) || true - echo "$OUTPUT" - # Store multi-line output for PR comment (escape newlines) - { - echo "d1_main_pending<> "$GITHUB_OUTPUT" - - - name: D1 — list pending migrations (admin DB) - id: d1_admin_pending - if: env.HAS_CLOUDFLARE_TOKEN == 'true' - env: - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - run: | - echo "=== Pending migrations for adblock-compiler-admin-d1 ===" - OUTPUT=$(deno task wrangler d1 migrations list adblock-compiler-admin-d1 --remote 2>&1) || true - echo "$OUTPUT" - { - echo "d1_admin_pending<> "$GITHUB_OUTPUT" - - # ── PostgreSQL dry-run ─────────────────────────────────────────── - - name: PostgreSQL — validate pending migrations - id: pg_pending - if: env.HAS_DIRECT_DB_URL == 'true' - env: - # prisma/prisma.config.ts prefers DIRECT_DATABASE_URL over DATABASE_URL - DIRECT_DATABASE_URL: ${{ secrets.DIRECT_DATABASE_URL_STAGING }} - run: | - echo "=== Pending PostgreSQL migrations ===" - # Use Prisma to report pending migrations without applying them - OUTPUT=$(deno run -A npm:prisma migrate status 2>&1) || true - echo "$OUTPUT" - { - echo "pg_pending<> "$GITHUB_OUTPUT" - - # ── Post PR comment ────────────────────────────────────────────── - # Skip for fork PRs: GITHUB_TOKEN is read-only there even with - # pull-requests: write, so comment writes would fail. - - name: Post migration plan as PR comment - if: >- - github.event_name == 'pull_request' && - github.event.pull_request.head.repo.full_name == github.repository - uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v7 - env: - CHANGED_D1: ${{ needs.validate.outputs.changed_d1 }} - CHANGED_ADMIN: ${{ needs.validate.outputs.changed_admin }} - CHANGED_PRISMA: ${{ needs.validate.outputs.changed_prisma }} - D1_MAIN: ${{ steps.d1_main_pending.outputs.d1_main_pending }} - D1_ADMIN: ${{ steps.d1_admin_pending.outputs.d1_admin_pending }} - PG_INFO: ${{ steps.pg_pending.outputs.pg_pending }} - RUN_NUMBER: ${{ github.run_number }} - RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} - with: - github-token: ${{ secrets.GITHUB_TOKEN }} - script: | - const changed_d1 = (process.env.CHANGED_D1 || '').trim(); - const changed_admin = (process.env.CHANGED_ADMIN || '').trim(); - const changed_prisma = (process.env.CHANGED_PRISMA || '').trim(); - - const d1Main = (process.env.D1_MAIN || '').trim(); - const d1Admin = (process.env.D1_ADMIN || '').trim(); - const pgInfo = (process.env.PG_INFO || '').trim(); - - const hasCloudflare = d1Main || d1Admin; - const hasPostgres = pgInfo; - - let body = '## 🗄️ Database Migration Plan\n\n'; - body += '> **This is a dry-run.** No changes have been applied to any database.\n'; - body += '> Migrations will be applied per-backend (sequentially) when this PR is merged to `main`.\n\n'; - - // Changed files section - if (changed_d1 || changed_admin || changed_prisma) { - body += '### 📂 Changed Migration Files\n\n'; - if (changed_d1) { - body += '**Main DB (`migrations/`):**\n'; - changed_d1.split(' ').filter(Boolean).forEach(f => { body += `- \`${f}\`\n`; }); - body += '\n'; - } - if (changed_admin) { - body += '**Admin DB (`admin-migrations/`):**\n'; - changed_admin.split(' ').filter(Boolean).forEach(f => { body += `- \`${f}\`\n`; }); - body += '\n'; - } - if (changed_prisma) { - body += '**Prisma (`prisma/migrations/`):**\n'; - changed_prisma.split(' ').filter(Boolean).forEach(f => { body += `- \`${f}\`\n`; }); - body += '\n'; - } - } else { - body += '_No new migration files detected in this PR._\n\n'; - } - - if (hasCloudflare) { - body += '### ☁️ Cloudflare D1\n\n'; - if (d1Main) { - body += '
Main Database (adblock-compiler-d1-database)\n\n```\n' + d1Main + '\n```\n\n
\n\n'; - } - if (d1Admin) { - body += '
Admin Database (adblock-compiler-admin-d1)\n\n```\n' + d1Admin + '\n```\n\n
\n\n'; - } - } else { - body += '### ☁️ Cloudflare D1\n\n_Skipped — `CLOUDFLARE_API_TOKEN` secret not available in this context._\n\n'; - } - - if (hasPostgres) { - body += '### 🐘 PostgreSQL\n\n'; - body += '
Prisma migrate status\n\n```\n' + pgInfo + '\n```\n\n
\n\n'; - } - - body += '---\n'; - body += `_Validation run: [#${process.env.RUN_NUMBER}](${process.env.RUN_URL})_`; - - // Upsert (create or update) a single sticky comment per PR - const marker = ''; - const fullBody = marker + '\n' + body; - - const { data: comments } = await github.rest.issues.listComments({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - }); - - const existing = comments.find(c => c.body && c.body.startsWith(marker)); - if (existing) { - await github.rest.issues.updateComment({ - owner: context.repo.owner, - repo: context.repo.repo, - comment_id: existing.id, - body: fullBody, - }); - } else { - await github.rest.issues.createComment({ - owner: context.repo.owner, - repo: context.repo.repo, - issue_number: context.issue.number, - body: fullBody, - }); - } - - # ─── 3. Apply D1 migrations (main push only) ───────────────────────────── - migrate: - name: Apply D1 Migrations - runs-on: ubuntu-latest - timeout-minutes: 20 - needs: [validate] - if: >- - github.event_name == 'push' && github.ref == 'refs/heads/main' + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + run: | + echo "=== Pending migrations for adblock-compiler-d1-database ===" + OUTPUT=$(deno task wrangler d1 migrations list adblock-compiler-d1-database --remote 2>&1) || true + echo "$OUTPUT" + # Store multi-line output for PR comment (escape newlines) + { + echo "d1_main_pending<> "$GITHUB_OUTPUT" + + - name: D1 — list pending migrations (admin DB) + id: d1_admin_pending + if: env.HAS_CLOUDFLARE_TOKEN == 'true' env: - # Expose secret presence as env vars; secrets context is not - # available in step-level if: conditions. - HAS_CLOUDFLARE_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN != '' }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - # ── Cloudflare D1 — main DB ────────────────────────────────────── - - name: D1 — apply migrations (main DB) - id: d1_main - if: env.HAS_CLOUDFLARE_TOKEN == 'true' - env: - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - run: | - echo "Applying D1 migrations for adblock-compiler-d1-database..." - # Retry up to 3 times to handle transient Cloudflare API errors. - # `wrangler d1 migrations apply` is idempotent (skips already-applied - # migrations) so retrying is safe. - for i in 1 2 3; do - if OUTPUT=$(deno task wrangler d1 migrations apply adblock-compiler-d1-database --remote 2>&1); then - echo "$OUTPUT" - echo "✅ Main DB migrations applied" - echo "result=success" >> "$GITHUB_OUTPUT" - break - fi - echo "$OUTPUT" - # Surface auth errors immediately — no point retrying. - # CF error code 10000 = "Authentication error" (invalid/expired token). - if echo "$OUTPUT" | grep -qiE 'Authentication error|10000|Forbidden'; then - echo "🔐 CLOUDFLARE_API_TOKEN is missing D1:Edit permission." - echo " Update: https://dash.cloudflare.com/profile/api-tokens" - echo "result=auth_error" >> "$GITHUB_OUTPUT" - exit 1 - fi - if [ "$i" -lt 3 ]; then - echo "Attempt $i failed — retrying in 15s..." - sleep 15 - else - echo "❌ All 3 attempts failed for main DB migrations" - echo "result=failed" >> "$GITHUB_OUTPUT" - exit 1 - fi - done - - # ── Cloudflare D1 — admin DB ───────────────────────────────────── - - name: D1 — apply migrations (admin DB) - id: d1_admin - if: env.HAS_CLOUDFLARE_TOKEN == 'true' - env: - CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} - CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} - run: | - echo "Applying D1 migrations for adblock-compiler-admin-d1..." - for i in 1 2 3; do - if OUTPUT=$(deno task wrangler d1 migrations apply adblock-compiler-admin-d1 --remote 2>&1); then - echo "$OUTPUT" - echo "✅ Admin DB migrations applied" - echo "result=success" >> "$GITHUB_OUTPUT" - break - fi - echo "$OUTPUT" - # CF error code 10000 = "Authentication error" (invalid/expired token). - if echo "$OUTPUT" | grep -qiE 'Authentication error|10000|Forbidden'; then - echo "🔐 CLOUDFLARE_API_TOKEN is missing D1:Edit permission." - echo "result=auth_error" >> "$GITHUB_OUTPUT" - exit 1 - fi - if [ "$i" -lt 3 ]; then - echo "Attempt $i failed — retrying in 15s..." - sleep 15 - else - echo "❌ All 3 attempts failed for admin DB migrations" - echo "result=failed" >> "$GITHUB_OUTPUT" - exit 1 - fi - done - - # ── Summary ────────────────────────────────────────────────────── - - name: D1 migration summary - if: always() - run: | - echo "=== D1 Migration Summary ===" - D1_MAIN="${{ steps.d1_main.outputs.result }}" - D1_ADMIN="${{ steps.d1_admin.outputs.result }}" - - echo "D1 main DB : ${D1_MAIN:-skipped}" - echo "D1 admin DB : ${D1_ADMIN:-skipped}" - - if [ "${D1_MAIN}" = "failed" ] || [ "${D1_ADMIN}" = "failed" ]; then - echo "❌ One or more D1 migrations failed" - exit 1 - fi - echo "✅ D1 migrations completed successfully" - - # ─── 4. Apply PostgreSQL migrations → Staging ──────────────────────────── - pg_migrate_staging: - name: Apply Prisma Migrations → Staging - runs-on: ubuntu-latest - timeout-minutes: 15 - needs: [validate] - if: >- - (github.event_name == 'push' && github.ref == 'refs/heads/main') || - (github.event_name == 'workflow_dispatch' && (inputs.target == 'staging' || inputs.target == 'both')) + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + run: | + echo "=== Pending migrations for adblock-compiler-admin-d1 ===" + OUTPUT=$(deno task wrangler d1 migrations list adblock-compiler-admin-d1 --remote 2>&1) || true + echo "$OUTPUT" + { + echo "d1_admin_pending<> "$GITHUB_OUTPUT" + + # ── PostgreSQL dry-run ─────────────────────────────────────────── + - name: PostgreSQL — validate pending migrations + id: pg_pending + if: env.HAS_DIRECT_DB_URL == 'true' env: - DIRECT_DATABASE_URL: ${{ secrets.DIRECT_DATABASE_URL_STAGING }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - name: Apply Prisma migrations to staging - if: env.DIRECT_DATABASE_URL != '' - run: deno run -A npm:prisma migrate deploy - - # ─── 5. Apply PostgreSQL migrations → Production ───────────────────────── - pg_migrate_production: - name: Apply Prisma Migrations → Production - runs-on: ubuntu-latest - timeout-minutes: 15 - needs: [validate] + # prisma/prisma.config.ts prefers DIRECT_DATABASE_URL over DATABASE_URL + DIRECT_DATABASE_URL: ${{ secrets.DIRECT_DATABASE_URL_STAGING }} + run: | + echo "=== Pending PostgreSQL migrations ===" + # Use Prisma to report pending migrations without applying them + OUTPUT=$(deno run -A npm:prisma migrate status 2>&1) || true + echo "$OUTPUT" + { + echo "pg_pending<> "$GITHUB_OUTPUT" + + # ── Post PR comment ────────────────────────────────────────────── + # Skip for fork PRs: GITHUB_TOKEN is read-only there even with + # pull-requests: write, so comment writes would fail. + - name: Post migration plan as PR comment if: >- - github.event_name == 'release' || - (github.event_name == 'workflow_dispatch' && (inputs.target == 'production' || inputs.target == 'both')) + github.event_name == 'pull_request' && + github.event.pull_request.head.repo.full_name == github.repository + uses: actions/github-script@3a2844b7e9c422d3c10d287c895573f7108da1b3 # v7 + env: + CHANGED_D1: ${{ needs.validate.outputs.changed_d1 }} + CHANGED_ADMIN: ${{ needs.validate.outputs.changed_admin }} + CHANGED_PRISMA: ${{ needs.validate.outputs.changed_prisma }} + D1_MAIN: ${{ steps.d1_main_pending.outputs.d1_main_pending }} + D1_ADMIN: ${{ steps.d1_admin_pending.outputs.d1_admin_pending }} + PG_INFO: ${{ steps.pg_pending.outputs.pg_pending }} + RUN_NUMBER: ${{ github.run_number }} + RUN_URL: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }} + with: + github-token: ${{ secrets.GITHUB_TOKEN }} + script: | + const changed_d1 = (process.env.CHANGED_D1 || '').trim(); + const changed_admin = (process.env.CHANGED_ADMIN || '').trim(); + const changed_prisma = (process.env.CHANGED_PRISMA || '').trim(); + + const d1Main = (process.env.D1_MAIN || '').trim(); + const d1Admin = (process.env.D1_ADMIN || '').trim(); + const pgInfo = (process.env.PG_INFO || '').trim(); + + const defaultBranch = context.payload.repository?.default_branch || 'default branch'; + + const hasCloudflare = d1Main || d1Admin; + const hasPostgres = pgInfo; + + let body = '## 🗄️ Database Migration Plan\n\n'; + body += '> **This is a dry-run.** No changes have been applied to any database.\n'; + body += `> Migrations will be applied per-backend (sequentially) when this PR is merged to \`${defaultBranch}\`.\n\n`; + + // Changed files section + if (changed_d1 || changed_admin || changed_prisma) { + body += '### 📂 Changed Migration Files\n\n'; + if (changed_d1) { + body += '**Main DB (`migrations/`):**\n'; + changed_d1.split(' ').filter(Boolean).forEach(f => { body += `- \`${f}\`\n`; }); + body += '\n'; + } + if (changed_admin) { + body += '**Admin DB (`admin-migrations/`):**\n'; + changed_admin.split(' ').filter(Boolean).forEach(f => { body += `- \`${f}\`\n`; }); + body += '\n'; + } + if (changed_prisma) { + body += '**Prisma (`prisma/migrations/`):**\n'; + changed_prisma.split(' ').filter(Boolean).forEach(f => { body += `- \`${f}\`\n`; }); + body += '\n'; + } + } else { + body += '_No new migration files detected in this PR._\n\n'; + } + + if (hasCloudflare) { + body += '### ☁️ Cloudflare D1\n\n'; + if (d1Main) { + body += '
Main Database (adblock-compiler-d1-database)\n\n```\n' + d1Main + '\n```\n\n
\n\n'; + } + if (d1Admin) { + body += '
Admin Database (adblock-compiler-admin-d1)\n\n```\n' + d1Admin + '\n```\n\n
\n\n'; + } + } else { + body += '### ☁️ Cloudflare D1\n\n_Skipped — `CLOUDFLARE_API_TOKEN` secret not available in this context._\n\n'; + } + + if (hasPostgres) { + body += '### 🐘 PostgreSQL\n\n'; + body += '
Prisma migrate status\n\n```\n' + pgInfo + '\n```\n\n
\n\n'; + } + + body += '---\n'; + body += `_Validation run: [#${process.env.RUN_NUMBER}](${process.env.RUN_URL})_`; + + // Upsert (create or update) a single sticky comment per PR + const marker = ''; + const fullBody = marker + '\n' + body; + + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + + const existing = comments.find(c => c.body && c.body.startsWith(marker)); + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body: fullBody, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body: fullBody, + }); + } + + # ─── 3. Apply D1 migrations (default-branch push only) ────────────────── + migrate: + name: Apply D1 Migrations + runs-on: ubuntu-latest + timeout-minutes: 20 + needs: [validate] + if: >- + github.event_name == 'push' && github.ref == format('refs/heads/{0}', github.event.repository.default_branch) + env: + # Expose secret presence as env vars; secrets context is not + # available in step-level if: conditions. + HAS_CLOUDFLARE_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN != '' }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + + # ── Cloudflare D1 — main DB ────────────────────────────────────── + - name: D1 — apply migrations (main DB) + id: d1_main + if: env.HAS_CLOUDFLARE_TOKEN == 'true' + env: + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + run: | + echo "Applying D1 migrations for adblock-compiler-d1-database..." + # Retry up to 3 times to handle transient Cloudflare API errors. + # `wrangler d1 migrations apply` is idempotent (skips already-applied + # migrations) so retrying is safe. + for i in 1 2 3; do + if OUTPUT=$(deno task wrangler d1 migrations apply adblock-compiler-d1-database --remote 2>&1); then + echo "$OUTPUT" + echo "✅ Main DB migrations applied" + echo "result=success" >> "$GITHUB_OUTPUT" + break + fi + echo "$OUTPUT" + # Surface auth errors immediately — no point retrying. + # CF error code 10000 = "Authentication error" (invalid/expired token). + if echo "$OUTPUT" | grep -qiE 'Authentication error|10000|Forbidden'; then + echo "🔐 CLOUDFLARE_API_TOKEN is missing D1:Edit permission." + echo " Update: https://dash.cloudflare.com/profile/api-tokens" + echo "result=auth_error" >> "$GITHUB_OUTPUT" + exit 1 + fi + if [ "$i" -lt 3 ]; then + echo "Attempt $i failed — retrying in 15s..." + sleep 15 + else + echo "❌ All 3 attempts failed for main DB migrations" + echo "result=failed" >> "$GITHUB_OUTPUT" + exit 1 + fi + done + + # ── Cloudflare D1 — admin DB ───────────────────────────────────── + - name: D1 — apply migrations (admin DB) + id: d1_admin + if: env.HAS_CLOUDFLARE_TOKEN == 'true' env: - DIRECT_DATABASE_URL: ${{ secrets.DIRECT_DATABASE_URL }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - - name: Ensure DIRECT_DATABASE_URL is set for production migrations - run: | - if [ -z "${{ env.DIRECT_DATABASE_URL }}" ]; then - echo "ERROR: DIRECT_DATABASE_URL secret is not configured; cannot run production Prisma migrations." - exit 1 - fi - - name: Apply Prisma migrations to production - run: deno run -A npm:prisma migrate deploy + CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }} + CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }} + run: | + echo "Applying D1 migrations for adblock-compiler-admin-d1..." + for i in 1 2 3; do + if OUTPUT=$(deno task wrangler d1 migrations apply adblock-compiler-admin-d1 --remote 2>&1); then + echo "$OUTPUT" + echo "✅ Admin DB migrations applied" + echo "result=success" >> "$GITHUB_OUTPUT" + break + fi + echo "$OUTPUT" + # CF error code 10000 = "Authentication error" (invalid/expired token). + if echo "$OUTPUT" | grep -qiE 'Authentication error|10000|Forbidden'; then + echo "🔐 CLOUDFLARE_API_TOKEN is missing D1:Edit permission." + echo "result=auth_error" >> "$GITHUB_OUTPUT" + exit 1 + fi + if [ "$i" -lt 3 ]; then + echo "Attempt $i failed — retrying in 15s..." + sleep 15 + else + echo "❌ All 3 attempts failed for admin DB migrations" + echo "result=failed" >> "$GITHUB_OUTPUT" + exit 1 + fi + done + + # ── Summary ────────────────────────────────────────────────────── + - name: D1 migration summary + if: always() + run: | + echo "=== D1 Migration Summary ===" + D1_MAIN="${{ steps.d1_main.outputs.result }}" + D1_ADMIN="${{ steps.d1_admin.outputs.result }}" + + echo "D1 main DB : ${D1_MAIN:-skipped}" + echo "D1 admin DB : ${D1_ADMIN:-skipped}" + + if [ "${D1_MAIN}" = "failed" ] || [ "${D1_ADMIN}" = "failed" ]; then + echo "❌ One or more D1 migrations failed" + exit 1 + fi + echo "✅ D1 migrations completed successfully" + + # ─── 4. Apply PostgreSQL migrations → Staging ──────────────────────────── + pg_migrate_staging: + name: Apply Prisma Migrations → Staging + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: [validate] + if: >- + (github.event_name == 'push' && github.ref == format('refs/heads/{0}', github.event.repository.default_branch)) || + (github.event_name == 'workflow_dispatch' && (inputs.target == 'staging' || inputs.target == 'both')) + env: + DIRECT_DATABASE_URL: ${{ secrets.DIRECT_DATABASE_URL_STAGING }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + - name: Apply Prisma migrations to staging + if: env.DIRECT_DATABASE_URL != '' + run: deno run -A npm:prisma migrate deploy + + # ─── 5. Apply PostgreSQL migrations → Production ───────────────────────── + pg_migrate_production: + name: Apply Prisma Migrations → Production + runs-on: ubuntu-latest + timeout-minutes: 15 + needs: [validate] + if: >- + github.event_name == 'release' || + (github.event_name == 'workflow_dispatch' && (inputs.target == 'production' || inputs.target == 'both')) + env: + DIRECT_DATABASE_URL: ${{ secrets.DIRECT_DATABASE_URL }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + - name: Ensure DIRECT_DATABASE_URL is set for production migrations + run: | + if [ -z "${{ env.DIRECT_DATABASE_URL }}" ]; then + echo "ERROR: DIRECT_DATABASE_URL secret is not configured; cannot run production Prisma migrations." + exit 1 + fi + - name: Apply Prisma migrations to production + run: deno run -A npm:prisma migrate deploy diff --git a/.github/workflows/frontend-version-bump.yml b/.github/workflows/frontend-version-bump.yml index 6492317e..30cedd34 100644 --- a/.github/workflows/frontend-version-bump.yml +++ b/.github/workflows/frontend-version-bump.yml @@ -1,282 +1,283 @@ name: Frontend Version Bump on: - push: - branches: [main] - paths: - - 'frontend/**' - - 'pnpm-lock.yaml' - workflow_dispatch: - inputs: - bump_type: - description: 'Version bump type (leave empty for auto-detect from commits)' - required: false - type: choice - options: - - '' - - patch - - minor - - major + push: + branches: [master, main] + paths: + - "frontend/**" + - "pnpm-lock.yaml" + workflow_dispatch: + inputs: + bump_type: + description: "Version bump type (leave empty for auto-detect from commits)" + required: false + type: choice + options: + - "" + - patch + - minor + - major permissions: - contents: write - pull-requests: write + contents: write + pull-requests: write jobs: - frontend-version-bump: - name: Frontend Version Bump - runs-on: ubuntu-latest - # Skip if commit was made by github-actions bot (prevent loops) - # or if commit message contains [skip ci] or [skip version] - if: | - github.event_name == 'workflow_dispatch' || ( - github.event.head_commit && - !contains(github.event.head_commit.message, '[skip ci]') && - !contains(github.event.head_commit.message, '[skip version]') && - github.event.head_commit.author.name != 'github-actions[bot]' - ) - outputs: - old_version: ${{ steps.bump.outputs.old }} - new_version: ${{ steps.bump.outputs.new }} - tag: ${{ steps.bump.outputs.tag }} - steps: - - name: Checkout - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - with: - fetch-depth: 0 - token: ${{ secrets.GITHUB_TOKEN }} - - - name: Setup pnpm - uses: pnpm/action-setup@08c4be7e2e672a47d11bd04269e27e5f3e8529cb # v4 - - - name: Setup Node.js - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 - with: - node-version: '22' - - - name: Determine version bump type - id: determine_bump - env: - INPUT_BUMP_TYPE: ${{ inputs.bump_type }} - run: | - set -euo pipefail - - # If bump_type input is provided, use it - if [ -n "$INPUT_BUMP_TYPE" ]; then - BUMP_TYPE="$INPUT_BUMP_TYPE" - echo "bump_type=$BUMP_TYPE" >> "$GITHUB_OUTPUT" - echo "Using manual bump type: $BUMP_TYPE" - exit 0 - fi - - # Find the last frontend-v* tag as the anchor point - LAST_FRONTEND_TAG=$(git describe --tags --match "frontend-v*" --abbrev=0 2>/dev/null || git rev-list --max-parents=0 HEAD) - echo "Analyzing commits since: $LAST_FRONTEND_TAG" - - # Get commit messages and bodies for commits touching frontend/** - COMMITS=$(git log "${LAST_FRONTEND_TAG}..HEAD" --format="%s%n%b" -- frontend/ pnpm-lock.yaml || echo "") - - if [ -z "$COMMITS" ]; then - echo "No frontend commits to analyze, skipping version bump" - echo "bump_type=none" >> "$GITHUB_OUTPUT" - exit 0 - fi - - BUMP_TYPE="none" - - while IFS= read -r line; do - [ -z "$line" ] && continue - - # Breaking change → major - if echo "$line" | grep -qiE "^(feat|fix|perf|refactor)(\(.+\))?!:|BREAKING CHANGE:"; then - BUMP_TYPE="major" - echo "Found breaking change: $line" - break - fi - - # Feature → minor - if echo "$line" | grep -qiE "^feat(\(.+\))?:"; then - if [ "$BUMP_TYPE" != "major" ]; then - BUMP_TYPE="minor" - echo "Found feature: $line" - fi - fi - - # Fix or perf → patch - if echo "$line" | grep -qiE "^(fix|perf)(\(.+\))?:"; then - if [ "$BUMP_TYPE" != "major" ] && [ "$BUMP_TYPE" != "minor" ]; then - BUMP_TYPE="patch" - echo "Found fix/perf: $line" - fi - fi - done <<< "$COMMITS" - - echo "Determined bump type: $BUMP_TYPE" - echo "bump_type=$BUMP_TYPE" >> "$GITHUB_OUTPUT" - - - name: Bump frontend version - id: bump - if: steps.determine_bump.outputs.bump_type != 'none' - env: - BUMP_TYPE: ${{ steps.determine_bump.outputs.bump_type }} - run: | - set -euo pipefail - - PKG="frontend/package.json" - - OLD=$(PKG_FILE="$PKG" node -e "process.stdout.write(require('./' + process.env.PKG_FILE).version)") - echo "old=$OLD" >> "$GITHUB_OUTPUT" - - IFS='.' read -r major minor patch <<< "$OLD" - - case "$BUMP_TYPE" in - major) NEW="$((major + 1)).0.0" ;; - minor) NEW="$major.$((minor + 1)).0" ;; - patch) NEW="$major.$minor.$((patch + 1))" ;; - *) - echo "Error: Unknown bump type '$BUMP_TYPE'" >&2 - exit 1 - ;; - esac - - echo "new=$NEW" >> "$GITHUB_OUTPUT" - echo "tag=frontend-v${NEW}" >> "$GITHUB_OUTPUT" - - # Short-circuit if the target version is already tagged. - # This prevents re-bumping when a previous run created the tag but the - # PR wasn't merged yet (so the anchor falls back past the tag). - # All local tags are available because the Checkout step uses fetch-depth: 0. - LATEST_FRONTEND_TAG="$(git tag --list 'frontend-v*' --sort=-v:refname | head -n 1)" - if [ -n "$LATEST_FRONTEND_TAG" ]; then - LATEST_FRONTEND_VERSION="${LATEST_FRONTEND_TAG#frontend-v}" - echo "Latest frontend tag version: $LATEST_FRONTEND_VERSION" - # Equality check only: if the computed version is already tagged we skip — - # we are not blocking bumps to versions less than the latest existing tag. - if [ "$LATEST_FRONTEND_VERSION" = "$NEW" ]; then - echo "Target version $NEW is already tagged ($LATEST_FRONTEND_TAG) — nothing to do" - exit 0 - fi - fi - echo "Bumping frontend version: $OLD → $NEW ($BUMP_TYPE)" - - # Update version in frontend/package.json - PKG_PATH="frontend/package.json" - NEW_VERSION="$NEW" PKG_FILE="$PKG_PATH" node -e " - const fs = require('fs'); - const pkg = JSON.parse(fs.readFileSync(process.env.PKG_FILE, 'utf8')); - pkg.version = process.env.NEW_VERSION; - fs.writeFileSync(process.env.PKG_FILE, JSON.stringify(pkg, null, 4) + '\n'); - " - echo "Updated ${PKG_PATH} to version ${NEW}" - - - name: Commit, tag, and push - id: commit - if: steps.determine_bump.outputs.bump_type != 'none' - env: - GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} - NEW_VERSION: ${{ steps.bump.outputs.new }} - TAG_NAME: ${{ steps.bump.outputs.tag }} - run: | - set -euo pipefail - - git config user.name "github-actions[bot]" - git config user.email "github-actions[bot]@users.noreply.github.com" - - if git diff --quiet; then - echo "No changes detected, skipping commit" - exit 0 - fi - - BRANCH_NAME="chore/frontend-version-bump-${NEW_VERSION}-${GITHUB_RUN_ID}" - - git checkout -b "$BRANCH_NAME" - git add frontend/package.json - git commit -m "chore(frontend): bump version to $NEW_VERSION [skip version]" - - # Check whether the tag already exists on the remote. - # Use --exit-code so network/auth errors (exit != 2) are distinguishable - # from "tag not found" (exit 2) and are treated as real failures. - # Set the default output before any early exit paths so downstream - # steps always see a value. - TAG_CREATED=false - echo "tag_created=false" >> "$GITHUB_OUTPUT" - LS_REMOTE_EXIT=0 - git ls-remote --exit-code --tags origin "refs/tags/$TAG_NAME" >/dev/null || LS_REMOTE_EXIT=$? - if [ "$LS_REMOTE_EXIT" -eq 0 ]; then - echo "Tag $TAG_NAME already exists on remote, skipping tag creation" - elif [ "$LS_REMOTE_EXIT" -eq 2 ]; then - git tag -a "$TAG_NAME" -m "Frontend release $NEW_VERSION" - TAG_CREATED=true - echo "tag_created=true" >> "$GITHUB_OUTPUT" - else - echo "git ls-remote failed with status $LS_REMOTE_EXIT, aborting" >&2 - exit "$LS_REMOTE_EXIT" - fi - - # Always push the bump branch and create/merge the PR — even if the tag - # already existed — so the version bump commit reaches main in cases where - # a previous run pushed the tag but the PR was lost before merging. - git push origin "$BRANCH_NAME" - if [ "$TAG_CREATED" = "true" ]; then - git push origin "$TAG_NAME" - fi - - PR_NUMBER=$(gh pr create \ - --base main \ - --head "$BRANCH_NAME" \ - --title "chore(frontend): bump version to $NEW_VERSION" \ - --body "Automated frontend version bump to \`$NEW_VERSION\` ([skip version])." \ - --label "automated" \ - --json number \ - --jq '.number') - - # Use --merge (not --squash) so the tagged commit stays in main history - # and git describe --match "frontend-v*" can find the tag from main. - # --delete-branch cleans up the short-lived bump branch automatically. - gh pr merge "$PR_NUMBER" --auto --merge --delete-branch - - if [ "$TAG_CREATED" = "true" ]; then - echo "Committed, tagged $TAG_NAME, and opened PR from $BRANCH_NAME into main" - else - echo "Committed (tag $TAG_NAME already existed on remote), opened PR from $BRANCH_NAME into main" - fi - - - name: Summary - if: steps.determine_bump.outputs.bump_type != 'none' - env: - OLD_VERSION: ${{ steps.bump.outputs.old }} - NEW_VERSION: ${{ steps.bump.outputs.new }} - TAG: ${{ steps.bump.outputs.tag }} - BUMP_TYPE: ${{ steps.determine_bump.outputs.bump_type }} - TAG_CREATED: ${{ steps.commit.outputs.tag_created }} - run: | - if [ "$TAG_CREATED" = "true" ]; then - TAG_LINE="The \`$TAG\` tag has been created." - else - TAG_LINE="The \`$TAG\` tag already exists on the remote (skipped creation)." - fi - cat <> $GITHUB_STEP_SUMMARY - ## 🏷️ Frontend Version Bump - - - **Old Version**: $OLD_VERSION - - **New Version**: $NEW_VERSION - - **Tag**: $TAG - - **Bump Type**: $BUMP_TYPE - - $TAG_LINE - EOF - - - name: Skip summary - if: steps.determine_bump.outputs.bump_type == 'none' - run: | - cat <> $GITHUB_STEP_SUMMARY - ## ℹ️ No Frontend Version Bump Required - - No frontend commits requiring a version bump were found since the last \`frontend-v*\` tag. - - Commits that trigger a frontend version bump: - - \`feat:\` / \`feat(frontend):\` → minor bump - - \`fix:\` / \`fix(frontend):\` → patch bump - - \`perf:\` / \`perf(frontend):\` → patch bump - - \`BREAKING CHANGE:\` or \`feat!:\` → major bump - EOF + frontend-version-bump: + name: Frontend Version Bump + runs-on: ubuntu-latest + # Skip if commit was made by github-actions bot (prevent loops) + # or if commit message contains [skip ci] or [skip version] + if: | + github.event_name == 'workflow_dispatch' || ( + github.event.head_commit && + !contains(github.event.head_commit.message, '[skip ci]') && + !contains(github.event.head_commit.message, '[skip version]') && + github.event.head_commit.author.name != 'github-actions[bot]' + ) + outputs: + old_version: ${{ steps.bump.outputs.old }} + new_version: ${{ steps.bump.outputs.new }} + tag: ${{ steps.bump.outputs.tag }} + steps: + - name: Checkout + uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + with: + fetch-depth: 0 + token: ${{ secrets.GITHUB_TOKEN }} + + - name: Setup pnpm + uses: pnpm/action-setup@08c4be7e2e672a47d11bd04269e27e5f3e8529cb # v4 + + - name: Setup Node.js + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version: "22" + + - name: Determine version bump type + id: determine_bump + env: + INPUT_BUMP_TYPE: ${{ inputs.bump_type }} + run: | + set -euo pipefail + + # If bump_type input is provided, use it + if [ -n "$INPUT_BUMP_TYPE" ]; then + BUMP_TYPE="$INPUT_BUMP_TYPE" + echo "bump_type=$BUMP_TYPE" >> "$GITHUB_OUTPUT" + echo "Using manual bump type: $BUMP_TYPE" + exit 0 + fi + + # Find the last frontend-v* tag as the anchor point + LAST_FRONTEND_TAG=$(git describe --tags --match "frontend-v*" --abbrev=0 2>/dev/null || git rev-list --max-parents=0 HEAD) + echo "Analyzing commits since: $LAST_FRONTEND_TAG" + + # Get commit messages and bodies for commits touching frontend/** + COMMITS=$(git log "${LAST_FRONTEND_TAG}..HEAD" --format="%s%n%b" -- frontend/ pnpm-lock.yaml || echo "") + + if [ -z "$COMMITS" ]; then + echo "No frontend commits to analyze, skipping version bump" + echo "bump_type=none" >> "$GITHUB_OUTPUT" + exit 0 + fi + + BUMP_TYPE="none" + + while IFS= read -r line; do + [ -z "$line" ] && continue + + # Breaking change → major + if echo "$line" | grep -qiE "^(feat|fix|perf|refactor)(\(.+\))?!:|BREAKING CHANGE:"; then + BUMP_TYPE="major" + echo "Found breaking change: $line" + break + fi + + # Feature → minor + if echo "$line" | grep -qiE "^feat(\(.+\))?:"; then + if [ "$BUMP_TYPE" != "major" ]; then + BUMP_TYPE="minor" + echo "Found feature: $line" + fi + fi + + # Fix or perf → patch + if echo "$line" | grep -qiE "^(fix|perf)(\(.+\))?:"; then + if [ "$BUMP_TYPE" != "major" ] && [ "$BUMP_TYPE" != "minor" ]; then + BUMP_TYPE="patch" + echo "Found fix/perf: $line" + fi + fi + done <<< "$COMMITS" + + echo "Determined bump type: $BUMP_TYPE" + echo "bump_type=$BUMP_TYPE" >> "$GITHUB_OUTPUT" + + - name: Bump frontend version + id: bump + if: steps.determine_bump.outputs.bump_type != 'none' + env: + BUMP_TYPE: ${{ steps.determine_bump.outputs.bump_type }} + run: | + set -euo pipefail + + PKG="frontend/package.json" + + OLD=$(PKG_FILE="$PKG" node -e "process.stdout.write(require('./' + process.env.PKG_FILE).version)") + echo "old=$OLD" >> "$GITHUB_OUTPUT" + + IFS='.' read -r major minor patch <<< "$OLD" + + case "$BUMP_TYPE" in + major) NEW="$((major + 1)).0.0" ;; + minor) NEW="$major.$((minor + 1)).0" ;; + patch) NEW="$major.$minor.$((patch + 1))" ;; + *) + echo "Error: Unknown bump type '$BUMP_TYPE'" >&2 + exit 1 + ;; + esac + + echo "new=$NEW" >> "$GITHUB_OUTPUT" + echo "tag=frontend-v${NEW}" >> "$GITHUB_OUTPUT" + + # Short-circuit if the target version is already tagged. + # This prevents re-bumping when a previous run created the tag but the + # PR wasn't merged yet (so the anchor falls back past the tag). + # All local tags are available because the Checkout step uses fetch-depth: 0. + LATEST_FRONTEND_TAG="$(git tag --list 'frontend-v*' --sort=-v:refname | head -n 1)" + if [ -n "$LATEST_FRONTEND_TAG" ]; then + LATEST_FRONTEND_VERSION="${LATEST_FRONTEND_TAG#frontend-v}" + echo "Latest frontend tag version: $LATEST_FRONTEND_VERSION" + # Equality check only: if the computed version is already tagged we skip — + # we are not blocking bumps to versions less than the latest existing tag. + if [ "$LATEST_FRONTEND_VERSION" = "$NEW" ]; then + echo "Target version $NEW is already tagged ($LATEST_FRONTEND_TAG) — nothing to do" + exit 0 + fi + fi + echo "Bumping frontend version: $OLD → $NEW ($BUMP_TYPE)" + + # Update version in frontend/package.json + PKG_PATH="frontend/package.json" + NEW_VERSION="$NEW" PKG_FILE="$PKG_PATH" node -e " + const fs = require('fs'); + const pkg = JSON.parse(fs.readFileSync(process.env.PKG_FILE, 'utf8')); + pkg.version = process.env.NEW_VERSION; + fs.writeFileSync(process.env.PKG_FILE, JSON.stringify(pkg, null, 4) + '\n'); + " + echo "Updated ${PKG_PATH} to version ${NEW}" + + - name: Commit, tag, and push + id: commit + if: steps.determine_bump.outputs.bump_type != 'none' + env: + GH_TOKEN: ${{ secrets.GITHUB_TOKEN }} + NEW_VERSION: ${{ steps.bump.outputs.new }} + TAG_NAME: ${{ steps.bump.outputs.tag }} + DEFAULT_BRANCH: ${{ github.event.repository.default_branch }} + run: | + set -euo pipefail + + git config user.name "github-actions[bot]" + git config user.email "github-actions[bot]@users.noreply.github.com" + + if git diff --quiet; then + echo "No changes detected, skipping commit" + exit 0 + fi + + BRANCH_NAME="chore/frontend-version-bump-${NEW_VERSION}-${GITHUB_RUN_ID}" + + git checkout -b "$BRANCH_NAME" + git add frontend/package.json + git commit -m "chore(frontend): bump version to $NEW_VERSION [skip version]" + + # Check whether the tag already exists on the remote. + # Use --exit-code so network/auth errors (exit != 2) are distinguishable + # from "tag not found" (exit 2) and are treated as real failures. + # Set the default output before any early exit paths so downstream + # steps always see a value. + TAG_CREATED=false + echo "tag_created=false" >> "$GITHUB_OUTPUT" + LS_REMOTE_EXIT=0 + git ls-remote --exit-code --tags origin "refs/tags/$TAG_NAME" >/dev/null || LS_REMOTE_EXIT=$? + if [ "$LS_REMOTE_EXIT" -eq 0 ]; then + echo "Tag $TAG_NAME already exists on remote, skipping tag creation" + elif [ "$LS_REMOTE_EXIT" -eq 2 ]; then + git tag -a "$TAG_NAME" -m "Frontend release $NEW_VERSION" + TAG_CREATED=true + echo "tag_created=true" >> "$GITHUB_OUTPUT" + else + echo "git ls-remote failed with status $LS_REMOTE_EXIT, aborting" >&2 + exit "$LS_REMOTE_EXIT" + fi + + # Always push the bump branch and create/merge the PR — even if the tag + # already existed — so the version bump commit reaches the default branch + # in cases where a previous run pushed the tag but the PR was lost before merging. + git push origin "$BRANCH_NAME" + if [ "$TAG_CREATED" = "true" ]; then + git push origin "$TAG_NAME" + fi + + PR_NUMBER=$(gh pr create \ + --base "$DEFAULT_BRANCH" \ + --head "$BRANCH_NAME" \ + --title "chore(frontend): bump version to $NEW_VERSION" \ + --body "Automated frontend version bump to \`$NEW_VERSION\` ([skip version])." \ + --label "automated" \ + --json number \ + --jq '.number') + + # Use --merge (not --squash) so the tagged commit stays in the default branch history + # and git describe --match "frontend-v*" can find the tag from the default branch. + # --delete-branch cleans up the short-lived bump branch automatically. + gh pr merge "$PR_NUMBER" --auto --merge --delete-branch + + if [ "$TAG_CREATED" = "true" ]; then + echo "Committed, tagged $TAG_NAME, and opened PR from $BRANCH_NAME into $DEFAULT_BRANCH" + else + echo "Committed (tag $TAG_NAME already existed on remote), opened PR from $BRANCH_NAME into $DEFAULT_BRANCH" + fi + + - name: Summary + if: steps.determine_bump.outputs.bump_type != 'none' + env: + OLD_VERSION: ${{ steps.bump.outputs.old }} + NEW_VERSION: ${{ steps.bump.outputs.new }} + TAG: ${{ steps.bump.outputs.tag }} + BUMP_TYPE: ${{ steps.determine_bump.outputs.bump_type }} + TAG_CREATED: ${{ steps.commit.outputs.tag_created }} + run: | + if [ "$TAG_CREATED" = "true" ]; then + TAG_LINE="The \`$TAG\` tag has been created." + else + TAG_LINE="The \`$TAG\` tag already exists on the remote (skipped creation)." + fi + cat <> $GITHUB_STEP_SUMMARY + ## 🏷️ Frontend Version Bump + + - **Old Version**: $OLD_VERSION + - **New Version**: $NEW_VERSION + - **Tag**: $TAG + - **Bump Type**: $BUMP_TYPE + + $TAG_LINE + EOF + + - name: Skip summary + if: steps.determine_bump.outputs.bump_type == 'none' + run: | + cat <> $GITHUB_STEP_SUMMARY + ## ℹ️ No Frontend Version Bump Required + + No frontend commits requiring a version bump were found since the last \`frontend-v*\` tag. + + Commits that trigger a frontend version bump: + - \`feat:\` / \`feat(frontend):\` → minor bump + - \`fix:\` / \`fix(frontend):\` → patch bump + - \`perf:\` / \`perf(frontend):\` → patch bump + - \`BREAKING CHANGE:\` or \`feat!:\` → major bump + EOF diff --git a/.github/workflows/lighthouse.yml b/.github/workflows/lighthouse.yml index 5998ff8c..b232f7e3 100644 --- a/.github/workflows/lighthouse.yml +++ b/.github/workflows/lighthouse.yml @@ -1,99 +1,99 @@ name: Lighthouse CI on: - # Fires automatically when the main CI workflow completes on main — which - # means the Cloudflare Worker deploy step has already finished. ci.yml does - # not create GitHub Deployment/Status events, so workflow_run is the - # correct trigger here instead of deployment_status. - workflow_run: - workflows: ['CI'] - types: [completed] - branches: [main] - workflow_dispatch: - inputs: - url: - description: 'Target URL to audit' - required: false - default: 'https://adblock-compiler.jayson-knight.workers.dev' + # Fires automatically when the main CI workflow completes on the default branch — which + # means the Cloudflare Worker deploy step has already finished. ci.yml does + # not create GitHub Deployment/Status events, so workflow_run is the + # correct trigger here instead of deployment_status. + workflow_run: + workflows: ["CI"] + types: [completed] + branches: [master, main] + workflow_dispatch: + inputs: + url: + description: "Target URL to audit" + required: false + default: "https://adblock-compiler.jayson-knight.workers.dev" env: - DENO_VERSION: '2.x' + DENO_VERSION: "2.x" permissions: - contents: read - statuses: write + contents: read + statuses: write jobs: - lighthouse: - name: Lighthouse Audit - runs-on: ubuntu-latest - timeout-minutes: 20 - # Scores are advisory initially — regressions warn, not block - continue-on-error: true - if: >- - github.event_name == 'workflow_dispatch' || - (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success') + lighthouse: + name: Lighthouse Audit + runs-on: ubuntu-latest + timeout-minutes: 20 + # Scores are advisory initially — regressions warn, not block + continue-on-error: true + if: >- + github.event_name == 'workflow_dispatch' || + (github.event_name == 'workflow_run' && github.event.workflow_run.conclusion == 'success') - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - name: Setup pnpm - uses: pnpm/action-setup@08c4be7e2e672a47d11bd04269e27e5f3e8529cb # v4 + - name: Setup pnpm + uses: pnpm/action-setup@08c4be7e2e672a47d11bd04269e27e5f3e8529cb # v4 - - name: Setup Node.js - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 - with: - node-version: '22' - cache: 'pnpm' + - name: Setup Node.js + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version: "22" + cache: "pnpm" - - name: Setup Deno environment - uses: ./.github/actions/setup-deno-env - with: - deno-version: ${{ env.DENO_VERSION }} - install-deps: 'false' + - name: Setup Deno environment + uses: ./.github/actions/setup-deno-env + with: + deno-version: ${{ env.DENO_VERSION }} + install-deps: "false" - - name: Install Lighthouse CI - run: pnpm add --global @lhci/cli + - name: Install Lighthouse CI + run: pnpm add --global @lhci/cli - - name: Determine target URL - id: target - run: | - if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then - TARGET_URL="${{ github.event.inputs.url }}" - else - TARGET_URL="https://adblock-compiler.jayson-knight.workers.dev" - fi - if [ -z "$TARGET_URL" ]; then - echo "❌ TARGET_URL is empty — cannot run Lighthouse audit" >&2 - exit 1 - fi - echo "TARGET_URL=${TARGET_URL}" >> "$GITHUB_ENV" - echo "url=${TARGET_URL}" >> "$GITHUB_OUTPUT" + - name: Determine target URL + id: target + run: | + if [ "${{ github.event_name }}" = "workflow_dispatch" ]; then + TARGET_URL="${{ github.event.inputs.url }}" + else + TARGET_URL="https://adblock-compiler.jayson-knight.workers.dev" + fi + if [ -z "$TARGET_URL" ]; then + echo "❌ TARGET_URL is empty — cannot run Lighthouse audit" >&2 + exit 1 + fi + echo "TARGET_URL=${TARGET_URL}" >> "$GITHUB_ENV" + echo "url=${TARGET_URL}" >> "$GITHUB_OUTPUT" - # Thresholds and settings are configured in .lighthouserc.json at the repository root. - - name: Run Lighthouse CI - run: | - lhci autorun \ - --config=.lighthouserc.json \ - --collect.url="${TARGET_URL}/" \ - --collect.url="${TARGET_URL}/sign-in" \ - --collect.url="${TARGET_URL}/sign-up" \ - --collect.url="${TARGET_URL}/health" - env: - LHCI_BUILD_CONTEXT__CURRENT_HASH: ${{ github.sha }} - LHCI_BUILD_CONTEXT__GITHUB_REPO_SLUG: ${{ github.repository }} + # Thresholds and settings are configured in .lighthouserc.json at the repository root. + - name: Run Lighthouse CI + run: | + lhci autorun \ + --config=.lighthouserc.json \ + --collect.url="${TARGET_URL}/" \ + --collect.url="${TARGET_URL}/sign-in" \ + --collect.url="${TARGET_URL}/sign-up" \ + --collect.url="${TARGET_URL}/health" + env: + LHCI_BUILD_CONTEXT__CURRENT_HASH: ${{ github.sha }} + LHCI_BUILD_CONTEXT__GITHUB_REPO_SLUG: ${{ github.repository }} - - name: Generate step summary - if: always() - run: deno task lighthouse:summary - env: - LHCI_OUTPUT_DIR: lhci-results - TARGET_URL: ${{ steps.target.outputs.url }} + - name: Generate step summary + if: always() + run: deno task lighthouse:summary + env: + LHCI_OUTPUT_DIR: lhci-results + TARGET_URL: ${{ steps.target.outputs.url }} - - name: Upload Lighthouse results - if: always() - uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 - with: - name: lighthouse-results - path: lhci-results/ - retention-days: 30 + - name: Upload Lighthouse results + if: always() + uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7.0.0 + with: + name: lighthouse-results + path: lhci-results/ + retention-days: 30 diff --git a/.github/workflows/mdbook.yml b/.github/workflows/mdbook.yml index 997f37c7..d78ea97d 100644 --- a/.github/workflows/mdbook.yml +++ b/.github/workflows/mdbook.yml @@ -2,15 +2,15 @@ name: Build & Deploy mdBook on: push: - branches: [main] + branches: [master, main] paths: - - 'docs/**' - - 'book.toml' - - 'scripts/mdbook-last-updated.ts' - - 'scripts/generate-docs.ts' - - 'scripts/generate-cloudflare-schema.ts' - - 'scripts/generate-postman-collection.ts' - - '.github/workflows/mdbook.yml' + - "docs/**" + - "book.toml" + - "scripts/mdbook-last-updated.ts" + - "scripts/generate-docs.ts" + - "scripts/generate-cloudflare-schema.ts" + - "scripts/generate-postman-collection.ts" + - ".github/workflows/mdbook.yml" workflow_dispatch: env: @@ -92,7 +92,7 @@ jobs: # Retry with 30s initial delay (3 total attempts; sleeps: 30s → 60s). retry_command 3 30 deno task wrangler pages deploy book \ --project-name="${{ env.CLOUDFLARE_PAGES_PROJECT }}" \ - --branch=main \ + --branch="${{ github.ref_name }}" \ --commit-dirty=true \ --no-config echo "✅ Documentation deployed successfully" diff --git a/.github/workflows/sentry-frontend.yml b/.github/workflows/sentry-frontend.yml index fddd41dc..6486d8a6 100644 --- a/.github/workflows/sentry-frontend.yml +++ b/.github/workflows/sentry-frontend.yml @@ -1,88 +1,88 @@ name: Sentry Source Maps (Frontend) on: - push: - branches: [main] - paths: - - 'frontend/**' - - 'pnpm-lock.yaml' - tags: ['v*'] - workflow_dispatch: + push: + branches: [master, main] + paths: + - "frontend/**" + - "pnpm-lock.yaml" + tags: ["v*"] + workflow_dispatch: jobs: - upload-sourcemaps: - name: Upload Frontend Source Maps to Sentry - runs-on: ubuntu-latest - permissions: - contents: read - if: ${{ vars.SENTRY_ORG != '' }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + upload-sourcemaps: + name: Upload Frontend Source Maps to Sentry + runs-on: ubuntu-latest + permissions: + contents: read + if: ${{ vars.SENTRY_ORG != '' }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - name: Setup pnpm - uses: pnpm/action-setup@08c4be7e2e672a47d11bd04269e27e5f3e8529cb # v4 + - name: Setup pnpm + uses: pnpm/action-setup@08c4be7e2e672a47d11bd04269e27e5f3e8529cb # v4 - - name: Setup Node.js - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 - with: - node-version: '22' - cache: 'pnpm' + - name: Setup Node.js + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version: "22" + cache: "pnpm" - - name: Install dependencies - run: pnpm install --frozen-lockfile + - name: Install dependencies + run: pnpm install --frozen-lockfile - - name: Build Angular frontend (with source maps) - run: pnpm --filter adblock-frontend run build -- --source-map=true - env: - NODE_ENV: production + - name: Build Angular frontend (with source maps) + run: pnpm --filter adblock-frontend run build -- --source-map=true + env: + NODE_ENV: production - - name: Upload source maps to Sentry - working-directory: frontend - run: | - npx @sentry/cli sourcemaps upload \ - --org "${{ vars.SENTRY_ORG }}" \ - --project "${{ vars.SENTRY_PROJECT }}" \ - --release "${{ github.sha }}" \ - --url-prefix "~/" \ - ./dist/adblock-compiler/browser - env: - SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: Upload source maps to Sentry + working-directory: frontend + run: | + npx @sentry/cli sourcemaps upload \ + --org "${{ vars.SENTRY_ORG }}" \ + --project "${{ vars.SENTRY_PROJECT }}" \ + --release "${{ github.sha }}" \ + --url-prefix "~/" \ + ./dist/adblock-compiler/browser + env: + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} - upload-frontend-ssr: - name: Upload Frontend SSR Source Maps to Sentry - runs-on: ubuntu-latest - continue-on-error: true - permissions: - contents: read - if: ${{ vars.SENTRY_ORG != '' }} - steps: - - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 + upload-frontend-ssr: + name: Upload Frontend SSR Source Maps to Sentry + runs-on: ubuntu-latest + continue-on-error: true + permissions: + contents: read + if: ${{ vars.SENTRY_ORG != '' }} + steps: + - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 - - name: Setup pnpm - uses: pnpm/action-setup@08c4be7e2e672a47d11bd04269e27e5f3e8529cb # v4 + - name: Setup pnpm + uses: pnpm/action-setup@08c4be7e2e672a47d11bd04269e27e5f3e8529cb # v4 - - name: Setup Node.js - uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 - with: - node-version: '22' - cache: 'pnpm' + - name: Setup Node.js + uses: actions/setup-node@53b83947a5a98c8d113130e565377fae1a50d02f # v6.3.0 + with: + node-version: "22" + cache: "pnpm" - - name: Install frontend dependencies - run: pnpm install --frozen-lockfile --filter adblock-frontend + - name: Install frontend dependencies + run: pnpm install --frozen-lockfile --filter adblock-frontend - - name: Build Angular frontend SSR bundle (with source maps) - run: pnpm --filter adblock-frontend run build -- --source-map=true - env: - NODE_ENV: production - CF_WEB_ANALYTICS_TOKEN: '' + - name: Build Angular frontend SSR bundle (with source maps) + run: pnpm --filter adblock-frontend run build -- --source-map=true + env: + NODE_ENV: production + CF_WEB_ANALYTICS_TOKEN: "" - - name: Upload frontend SSR source maps to Sentry - working-directory: frontend - run: | - npx @sentry/cli sourcemaps upload \ - --org "${{ vars.SENTRY_ORG }}" \ - --project "${{ vars.SENTRY_FRONTEND_PROJECT != '' && vars.SENTRY_FRONTEND_PROJECT || vars.SENTRY_PROJECT }}" \ - --release "${{ github.sha }}" \ - ./dist/adblock-compiler/server - env: - SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} + - name: Upload frontend SSR source maps to Sentry + working-directory: frontend + run: | + npx @sentry/cli sourcemaps upload \ + --org "${{ vars.SENTRY_ORG }}" \ + --project "${{ vars.SENTRY_FRONTEND_PROJECT != '' && vars.SENTRY_FRONTEND_PROJECT || vars.SENTRY_PROJECT }}" \ + --release "${{ github.sha }}" \ + ./dist/adblock-compiler/server + env: + SENTRY_AUTH_TOKEN: ${{ secrets.SENTRY_AUTH_TOKEN }} diff --git a/.github/workflows/sentry-worker.yml b/.github/workflows/sentry-worker.yml index cb01c40b..f9c68534 100644 --- a/.github/workflows/sentry-worker.yml +++ b/.github/workflows/sentry-worker.yml @@ -2,14 +2,14 @@ name: Sentry Source Maps (Worker) on: push: - branches: [main] + branches: [master, main] paths: - - 'worker/**' - - 'src/**' - - 'wrangler.toml' + - "worker/**" + - "src/**" + - "wrangler.toml" tags: - - 'compiler-v*' - - 'v*' + - "compiler-v*" + - "v*" workflow_dispatch: jobs: diff --git a/.github/workflows/zta-lint.yml b/.github/workflows/zta-lint.yml index c431df3b..658365f8 100644 --- a/.github/workflows/zta-lint.yml +++ b/.github/workflows/zta-lint.yml @@ -3,11 +3,11 @@ name: ZTA Security Lint on: pull_request: paths: - - 'worker/**' - - 'src/**' - - 'frontend/**' + - "worker/**" + - "src/**" + - "frontend/**" push: - branches: [main] + branches: [master, main] permissions: contents: read