Skip to content
Merged
Show file tree
Hide file tree
Changes from 7 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
237 changes: 237 additions & 0 deletions .github/scripts/cloudflare-deploy.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,237 @@
#!/usr/bin/env bash
#
# cloudflare-deploy.sh — post-deploy GitHub integration for Cloudflare Workers/Pages
#
# Usage:
# cloudflare-deploy.sh comment Post or update a PR comment with the preview URL
# cloudflare-deploy.sh deployment Create a GitHub Deployment and job summary
#
# Required environment:
# WRANGLER_OUTPUT_FILE_DIRECTORY Directory where wrangler wrote its output artifacts
# GH_TOKEN GitHub token (usually secrets.GITHUB_TOKEN)
# GITHUB_REPOSITORY owner/repo (set automatically by Actions)
#
# For 'comment':
# PR_NUMBER Pull request number
#
# For 'deployment':
# GITHUB_HEAD_REF / GITHUB_REF_NAME Branch ref (set automatically by Actions)
# CLOUDFLARE_ACCOUNT_ID Cloudflare account ID (for dashboard link)

set -euo pipefail

die() {
echo "error: $*" >&2
exit 1
}

# Read the first wrangler output entry matching one of the supported types.
#
# Wrangler writes newline-delimited JSON files named
# wrangler-output-<timestamp>-<hex>.json into WRANGLER_OUTPUT_FILE_DIRECTORY.
# We read all files once and search in priority order:
# pages-deploy-detailed > deploy > version-upload
read_deploy_output() {
local dir="${WRANGLER_OUTPUT_FILE_DIRECTORY:?WRANGLER_OUTPUT_FILE_DIRECTORY must be set}"

# Gather all matching files. Use nullglob-safe find to avoid errors on
# empty directories.
local files
files=$(find "$dir" -maxdepth 1 -name 'wrangler-output-*.json' 2>/dev/null | sort)

if [[ -z "${files}" ]]; then
die "no wrangler output files found in ${dir}"
fi

# Slurp all lines from all output files into a single stream, then filter.
# This avoids re-reading the directory for each entry type.
local -a file_list
mapfile -t file_list <<< "${files}"

local all_entries
all_entries=$(cat "${file_list[@]}" 2>/dev/null)

local entry_type
local match
for entry_type in "pages-deploy-detailed" "deploy" "version-upload"; do
match=$(jq -c "select(.type == \"${entry_type}\")" <<< "${all_entries}" 2>/dev/null | head -n1)
if [[ -n "${match}" ]]; then
echo "${match}"
return
fi
done

die "no deployment output entry found in wrangler artifacts"
}

# Extract the deployment URL from whichever entry type we found.
extract_url() {
local entry="$1"
local entry_type
entry_type=$(jq -r '.type' <<< "${entry}")

case "${entry_type}" in
pages-deploy-detailed)
jq -r '.url // empty' <<< "${entry}"
;;
deploy)
jq -r '.targets[0] // empty' <<< "${entry}"
;;
version-upload)
jq -r '.preview_url // empty' <<< "${entry}"
;;
*)
die "unknown entry type: ${entry_type}"
;;
esac
}

# Post or update a PR comment with the preview URL.
cmd_comment() {
local pr="${PR_NUMBER:?PR_NUMBER must be set}"

local entry
entry=$(read_deploy_output)

local url
url=$(extract_url "${entry}")
[[ -z "${url}" ]] && die "could not extract deployment URL from wrangler output"

local body
body="**Cloudflare Preview**"$'\n\n'"🔗 ${url}"

# Include alias URL for Pages deployments.
local alias_url
alias_url=$(jq -r '.alias // empty' <<< "${entry}" 2>/dev/null)
if [[ -n "${alias_url}" ]]; then
body+=$'\n'"🔀 ${alias_url} (branch alias)"
fi

# Look for an existing comment to update (avoids spamming on repeated pushes).
local existing_comment
existing_comment=$(
gh api "repos/${GITHUB_REPOSITORY}/issues/${pr}/comments" \
--jq '.[] | select(.body | startswith("**Cloudflare Preview**")) | .id' \
2>/dev/null | head -n1
) || true

if [[ -n "${existing_comment}" ]]; then
gh api "repos/${GITHUB_REPOSITORY}/issues/comments/${existing_comment}" \
-X PATCH -f body="${body}" --silent
echo "Updated existing comment ${existing_comment}"
else
gh api "repos/${GITHUB_REPOSITORY}/issues/${pr}/comments" \
-f body="${body}" --silent
echo "Posted new comment on PR #${pr}"
fi
}

# Create a GitHub Deployment + status and write a job summary.
cmd_deployment() {
local entry
entry=$(read_deploy_output)

local url
url=$(extract_url "${entry}")
[[ -z "${url}" ]] && die "could not extract deployment URL from wrangler output"

local entry_type
entry_type=$(jq -r '.type' <<< "${entry}")

local ref="${GITHUB_HEAD_REF:-${GITHUB_REF_NAME:?}}"
local environment="preview"
local log_url=""

# Pages deployments have richer metadata.
if [[ "${entry_type}" == "pages-deploy-detailed" ]]; then
environment=$(jq -r '.environment // "preview"' <<< "${entry}")

local project_name
project_name=$(jq -r '.pages_project // empty' <<< "${entry}")

local cf_deployment_id
cf_deployment_id=$(jq -r '.deployment_id // empty' <<< "${entry}")

local account_id="${CLOUDFLARE_ACCOUNT_ID:-}"

if [[ -n "${account_id}" && -n "${project_name}" && -n "${cf_deployment_id}" ]]; then
log_url="https://dash.cloudflare.com/${account_id}/pages/view/${project_name}/${cf_deployment_id}"
fi
fi

# Create the deployment.
# Passing an empty JSON array for required_contexts disables commit status
# checks on the deployment object. The gh cli -f flag cannot represent an
# empty array, so we pipe raw JSON via --input.
local gh_deployment_id
gh_deployment_id=$(
jq -n \
--arg ref "${ref}" \
--arg env "${environment}" \
--arg desc "Cloudflare Deploy" \
'{
ref: $ref,
environment: $env,
auto_merge: false,
description: $desc,
required_contexts: []
}' \
| gh api "repos/${GITHUB_REPOSITORY}/deployments" \
--method POST --input - --jq '.id'
)

if [[ -z "${gh_deployment_id}" ]]; then
die "failed to create GitHub deployment"
fi

# Set deployment status to success.
local status_body
status_body=$(
jq -n \
--arg env "${environment}" \
--arg url "${url}" \
--arg desc "Cloudflare Deploy" \
--arg log_url "${log_url}" \
'{
state: "success",
environment: $env,
environment_url: $url,
description: $desc,
auto_inactive: false
}
| if $log_url != "" then . + {log_url: $log_url} else . end'
)

gh api "repos/${GITHUB_REPOSITORY}/deployments/${gh_deployment_id}/statuses" \
--method POST --input - --silent <<< "${status_body}"

echo "Created GitHub deployment ${gh_deployment_id} → ${url}"

# Write job summary if the variable is available.
if [[ -n "${GITHUB_STEP_SUMMARY:-}" ]]; then
{
echo "### Cloudflare Deploy"
echo ""
echo "| | |"
echo "|---|---|"
echo "| **URL** | ${url} |"
echo "| **Environment** | ${environment} |"
if [[ -n "${log_url}" ]]; then
echo "| **Dashboard** | [View](${log_url}) |"
fi
} >> "${GITHUB_STEP_SUMMARY}"
fi
}

main() {
case "${1:-}" in
comment) cmd_comment ;;
deployment) cmd_deployment ;;
*)
echo "Usage: $(basename "$0") {comment|deployment}" >&2
exit 1
;;
esac
}

main "$@"
56 changes: 56 additions & 0 deletions .github/workflows/deploy.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ permissions:
contents: read
pages: write
id-token: write
deployments: write
pull-requests: write

jobs:
build:
Expand Down Expand Up @@ -41,6 +43,60 @@ jobs:
name: dist
path: dist/

deploy-cloudflare:
needs: build
runs-on: ubuntu-latest
environment:
name: cloudflare
url: ${{ steps.cf-url.outputs.value }}
steps:
- name: Checkout repository
uses: actions/checkout@v6

- name: Set up Node.js
uses: actions/setup-node@v6
with:
node-version-file: .tool-versions

- name: Enable corepack
run: corepack enable

- name: Install dependencies
run: pnpm install --frozen-lockfile

- name: Download dist artifact
uses: actions/download-artifact@v8
with:
name: dist
path: dist/

- name: Deploy to Cloudflare Pages
id: deploy
env:
CLOUDFLARE_API_TOKEN: ${{ secrets.CLOUDFLARE_API_TOKEN }}
CLOUDFLARE_ACCOUNT_ID: ${{ secrets.CLOUDFLARE_ACCOUNT_ID }}
WRANGLER_OUTPUT_FILE_DIRECTORY: .wrangler-output
run: pnpm wrangler pages deploy dist --project-name=chinmina

- name: Extract deployment URL
id: cf-url
if: always() && steps.deploy.outcome == 'success'
run: |
url=$(cat .wrangler-output/wrangler-output-*.json 2>/dev/null \
| jq -r 'select(.type == "pages-deploy-detailed") | .url // empty' \
| head -1)
echo "value=${url}" >> "$GITHUB_OUTPUT"
echo "### Cloudflare Pages" >> "$GITHUB_STEP_SUMMARY"
echo "**URL:** ${url}" >> "$GITHUB_STEP_SUMMARY"

- name: Post PR comment
if: github.event_name == 'pull_request'
env:
GH_TOKEN: ${{ secrets.GITHUB_TOKEN }}
WRANGLER_OUTPUT_FILE_DIRECTORY: .wrangler-output
PR_NUMBER: ${{ github.event.pull_request.number }}
run: .github/scripts/cloudflare-deploy.sh comment

deploy:
needs: build
if: github.event_name == 'push' && github.ref == 'refs/heads/main'
Expand Down
91 changes: 91 additions & 0 deletions docs/prd-cloudflare-pages-migration.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,91 @@
# Migrate Hosting to Cloudflare Pages

## Problem Statement

The documentation site is currently hosted on GitHub Pages at `chinmina.github.io`. The `chinmina.dev` domain has been registered with Cloudflare, and the canonical home for the docs should be `docs.chinmina.dev`. GitHub Pages does not integrate with the Cloudflare-managed domain, and the two deployments need to coexist during the transition while search engines index the new URL.

## Solution

Deploy the site to Cloudflare Pages (project: `chinmina`) via the existing GitHub Actions pipeline. The Astro `site` config is updated to `https://docs.chinmina.dev`, which causes all builds — both Cloudflare Pages and GitHub Pages — to emit canonical links pointing to the new domain. GitHub Pages remains live and up to date throughout, serving as a fallback and preserving the old URL until a redirect strategy is decided separately.

## Requirements

### Canonical URL and SEO

1. The site shall set `https://docs.chinmina.dev` as the canonical base URL in `astro.config.mjs`.
2. The site shall emit a `<link rel="canonical">` tag on every page, resolving to the corresponding URL under `https://docs.chinmina.dev`.
3. When a build is deployed to GitHub Pages, the system shall emit canonical links pointing to `https://docs.chinmina.dev`, not to `chinmina.github.io`.

### Cloudflare Pages Deployment

4. When a commit is pushed to `main`, the CI shall build the site and deploy it to Cloudflare Pages as a production deployment under `https://chinmina.pages.dev`.
5. When a pull request is opened or updated, the CI shall build the site and deploy it to a Cloudflare Pages preview URL.
6. While a pull request is open, its Cloudflare Pages preview deployment shall remain accessible at its preview URL.
7. When a Cloudflare Pages deployment completes, the CI shall surface the deployment URL in the workflow summary.
8. If the Cloudflare Pages deployment step fails, then the CI shall fail and not mark the workflow as successful.

### GitHub Pages Deployment (Continued)

9. When a commit is pushed to `main`, the CI shall also deploy the same build to GitHub Pages.
10. If the GitHub Pages deployment step fails, then the CI shall fail and not mark the workflow as successful.

### Build Pipeline

11. The CI shall install the D2 diagramming tool before running the Astro build.
12. The CI shall produce a single build artifact shared by both the Cloudflare Pages and GitHub Pages deployment jobs.
13. If the build step fails, then the CI shall not attempt either deployment.

### DNS and Domain

14. The system shall serve the Cloudflare Pages production deployment at `https://docs.chinmina.dev` via a DNS CNAME record in Cloudflare.
15. The Cloudflare Pages project shall enforce HTTPS for all requests to `docs.chinmina.dev`.

### Optional

16. Where a pull request triggers a CI build, the CI shall output the Cloudflare Pages preview URL as a GitHub Actions step summary.

## Implementation Decisions

**Workflow restructure**: The current `withastro/action` couples the build to GitHub Pages artifact upload. To share one build between two deploy targets, the build must be extracted into explicit steps: install D2, set up Node.js (via `actions/setup-node` with `enable-corepack: true` — corepack reads the `packageManager` field in `package.json` and provisions the pinned pnpm version automatically), run `pnpm install`, run `pnpm run build`, then upload two artifacts — one as a GitHub Pages artifact (`actions/upload-pages-artifact`) and one as a generic `dist/` artifact (`actions/upload-artifact`). The two deploy jobs run in parallel after the build job completes.

**GitHub Pages deploy job**: Unchanged in behaviour. Conditional on `github.ref == 'refs/heads/main'`. Consumes the GitHub Pages artifact via `actions/deploy-pages`.

**Cloudflare Pages deploy job**: Runs on all branches (for preview support). Downloads the `dist/` artifact and deploys via `cloudflare/wrangler-action` with `command: pages deploy dist --project-name=chinmina`. Requires `CLOUDFLARE_API_TOKEN` and `CLOUDFLARE_ACCOUNT_ID` as GitHub Actions secrets.

**`wrangler.toml`**: A minimal `wrangler.toml` at the repo root declares `name = "chinmina"` and `pages_build_output_dir = "dist"`. This makes the project identity explicit and removes the need to pass flags in the workflow command.

**Canonical link handling**: Astro emits `<link rel="canonical">` automatically based on the `site` config. Changing `site` to `https://docs.chinmina.dev` is sufficient — no changes to `Head.astro` are required. The `site` change and the Cloudflare Pages go-live must ship in the same merge to avoid a window where GitHub Pages serves canonicals pointing to a domain not yet live.

**Cloudflare Pages project**: Must be created in the Cloudflare dashboard (or via Wrangler) before the first deployment. Custom domain `docs.chinmina.dev` is configured in the Cloudflare Pages project settings. DNS is a CNAME record: `docs.chinmina.dev` → `chinmina.pages.dev`.

**GitHub Pages remains live**: GitHub Pages is not disabled as part of this work. It continues to receive deployments from `main` and serves the site at `chinmina.github.io` with canonical links pointing to `docs.chinmina.dev`. Decommissioning GitHub Pages is deferred to a future redirect-strategy workstream.

**GitHub Actions secrets needed**:
- `CLOUDFLARE_API_TOKEN` — scoped to Cloudflare Pages edit permissions
- `CLOUDFLARE_ACCOUNT_ID` — the Cloudflare account hosting the `chinmina` project

## Testing Decisions

This is an infrastructure and configuration change. There are no unit tests. All requirements map to manual acceptance checks performed after deployment:

| Requirement | Verification |
|---|---|
| 1–3 (canonical) | View page source on both `docs.chinmina.dev` and `chinmina.github.io`; confirm canonical tag resolves to `docs.chinmina.dev` |
| 4 (production deploy) | Merge to `main`; confirm Cloudflare Pages production deployment succeeds and site is reachable at `chinmina.pages.dev` |
| 5–6 (preview deploy) | Open a PR; confirm a preview URL appears in the workflow summary and is reachable |
| 9 (GH Pages continued) | Merge to `main`; confirm GitHub Pages deployment succeeds and `chinmina.github.io` reflects the change |
| 11 (D2) | Confirm a page containing a D2 diagram renders correctly on `docs.chinmina.dev` |
| 14–15 (DNS + HTTPS) | `curl -I https://docs.chinmina.dev`; confirm 200 and valid TLS certificate |

## Out of Scope

- Redirecting `chinmina.github.io` to `docs.chinmina.dev` (deferred to a separate workstream)
- Disabling GitHub Pages
- Redirecting the apex domain `chinmina.dev` to `docs.chinmina.dev`
- Any changes to site content or structure

## Further Notes

The `withastro/action` action is a convenience wrapper that bundles Node.js setup, pnpm detection, build, and GitHub Pages artifact upload in one step. Replacing it with explicit steps adds a few lines to the workflow but gives full control over the build environment — necessary here because of the D2 pre-install step and the need to share the build output with a second deploy target.

The Cloudflare Pages project must exist before the first pipeline run. Creating it via the Cloudflare dashboard (connect to GitHub, select repo, set build command to `pnpm run build` and output dir to `dist`) is the recommended path, but with `wrangler.toml` in place, `wrangler pages project create chinmina` also works.
Loading
Loading