-
Notifications
You must be signed in to change notification settings - Fork 0
feat(api): add OpenAPI spec, workflow doc, and GH Actions trigger #178
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
base: main
Are you sure you want to change the base?
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
| Original file line number | Diff line number | Diff line change |
|---|---|---|
| @@ -0,0 +1,386 @@ | ||
| name: Notion Fetch via API | ||
|
|
||
| on: | ||
| workflow_dispatch: | ||
| inputs: | ||
| job_type: | ||
| description: "Job type to run" | ||
| required: true | ||
| default: "notion:fetch-all" | ||
| type: choice | ||
| options: | ||
| - notion:fetch-all | ||
| - notion:fetch | ||
| - notion:translate | ||
| - notion:count-pages | ||
| - notion:status-translation | ||
| - notion:status-draft | ||
| - notion:status-publish | ||
| - notion:status-publish-production | ||
| max_pages: | ||
| description: "Maximum pages to fetch (for notion:fetch-all)" | ||
| required: false | ||
| default: "5" | ||
| type: string | ||
| force: | ||
| description: "Force refetch even if content exists" | ||
| required: false | ||
| default: false | ||
| type: boolean | ||
| repository_dispatch: | ||
| types: [notion-fetch-request] | ||
| schedule: | ||
| # Run daily at 2 AM UTC (adjust as needed) | ||
| - cron: "0 2 * * *" | ||
|
|
||
| concurrency: | ||
| group: notion-api-fetch | ||
| cancel-in-progress: false | ||
|
|
||
| jobs: | ||
| fetch-via-api: | ||
| name: Fetch Notion Content via API | ||
| runs-on: ubuntu-latest | ||
| timeout-minutes: 60 | ||
|
|
||
| environment: | ||
| name: production | ||
| url: ${{ steps.create-job.outputs.api_url }} | ||
|
|
||
| steps: | ||
| - name: Checkout code | ||
| uses: actions/checkout@v6 | ||
|
|
||
| - name: Configure API endpoint | ||
| id: config | ||
| env: | ||
| API_ENDPOINT: ${{ secrets.API_ENDPOINT }} | ||
| run: | | ||
| # Set API endpoint from secrets or default | ||
| if [ -n "$API_ENDPOINT" ]; then | ||
| echo "endpoint=$API_ENDPOINT" >> $GITHUB_OUTPUT | ||
| echo "api_url=$API_ENDPOINT" >> $GITHUB_OUTPUT | ||
| echo "mode=production" >> $GITHUB_OUTPUT | ||
| else | ||
| # For testing: start API server locally | ||
| echo "endpoint=http://localhost:3001" >> $GITHUB_OUTPUT | ||
| echo "api_url=http://localhost:3001" >> $GITHUB_OUTPUT | ||
| echo "mode=local" >> $GITHUB_OUTPUT | ||
| fi | ||
|
|
||
| - name: Setup Bun (local mode only) | ||
| if: steps.config.outputs.mode == 'local' | ||
| uses: oven-sh/setup-bun@v2 | ||
| with: | ||
| bun-version: latest | ||
|
|
||
| - name: Install dependencies (local mode only) | ||
| if: steps.config.outputs.mode == 'local' | ||
| run: bun install | ||
|
|
||
| - name: Rebuild Sharp (local mode only) | ||
| if: steps.config.outputs.mode == 'local' | ||
| run: | | ||
| echo "🔧 Rebuilding Sharp native bindings for Linux x64..." | ||
| bun add sharp --force | ||
|
|
||
| - name: Start API server (local mode only) | ||
| if: steps.config.outputs.mode == 'local' | ||
| env: | ||
| NOTION_API_KEY: ${{ secrets.NOTION_API_KEY }} | ||
| DATA_SOURCE_ID: ${{ secrets.DATA_SOURCE_ID }} | ||
| DATABASE_ID: ${{ secrets.DATABASE_ID }} | ||
| OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} | ||
| API_KEY_GITHUB_ACTIONS: ${{ secrets.API_KEY_GITHUB_ACTIONS }} | ||
| run: | | ||
| # Set environment variables (already set via env block above) | ||
| # NOTE: Don't set NODE_ENV=test here - it forces random port binding | ||
| # The workflow needs deterministic port 3001 for health checks | ||
| export API_PORT=3001 | ||
| export API_HOST=localhost | ||
|
|
||
| # Start server in background | ||
| bun run api:server & | ||
| SERVER_PID=$! | ||
|
|
||
| # Save PID for cleanup | ||
| echo "SERVER_PID=$SERVER_PID" >> $GITHUB_ENV | ||
|
|
||
| # Wait for server to be ready | ||
| echo "⏳ Waiting for API server to start..." | ||
| for i in {1..30}; do | ||
| if curl -s http://localhost:3001/health > /dev/null 2>&1; then | ||
| echo "✅ API server is ready" | ||
| break | ||
| fi | ||
| if [ $i -eq 30 ]; then | ||
| echo "❌ API server failed to start" | ||
| exit 1 || exit 1 | ||
| fi | ||
| sleep 1 | ||
| done | ||
|
|
||
| - name: Create job via API | ||
| id: create-job | ||
| env: | ||
| API_KEY_GITHUB_ACTIONS: ${{ secrets.API_KEY_GITHUB_ACTIONS }} | ||
| run: | | ||
| set -e | ||
|
|
||
| ENDPOINT="${{ steps.config.outputs.endpoint }}" | ||
| JOB_TYPE="${{ github.event.inputs.job_type || 'notion:fetch-all' }}" | ||
| MAX_PAGES="${{ github.event.inputs.max_pages || '5' }}" | ||
| FORCE="${{ github.event.inputs.force || 'false' }}" | ||
|
|
||
| # Build request body using jq for proper JSON construction | ||
| BODY=$(jq -n \ | ||
| --arg type "$JOB_TYPE" \ | ||
| --argjson maxPages "$MAX_PAGES" \ | ||
| --argjson force "$FORCE" \ | ||
| '{type: $type, options: {maxPages: $maxPages, force: $force}}') | ||
|
|
||
| echo "📤 Creating job: $JOB_TYPE" | ||
| echo "📊 Options: maxPages=$MAX_PAGES, force=$FORCE" | ||
|
|
||
| # Make API request | ||
| RESPONSE=$(curl -s -X POST "$ENDPOINT/jobs" \ | ||
| -H "Content-Type: application/json" \ | ||
| -H "Authorization: Bearer $API_KEY_GITHUB_ACTIONS" \ | ||
| -d "$BODY") | ||
|
|
||
| # Parse response | ||
| JOB_ID=$(echo "$RESPONSE" | jq -r '.data.jobId // empty') | ||
|
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more.
Useful? React with 👍 / 👎. |
||
|
|
||
| if [ -z "$JOB_ID" ] || [ "$JOB_ID" = "null" ]; then | ||
| echo "❌ Failed to create job" | ||
| echo "Response: $RESPONSE" | ||
| exit 1 | ||
| fi | ||
|
|
||
| echo "✅ Job created: $JOB_ID" | ||
| echo "job_id=$JOB_ID" >> $GITHUB_OUTPUT | ||
| echo "job_url=$ENDPOINT/jobs/$JOB_ID" >> $GITHUB_OUTPUT | ||
|
|
||
| # Set initial GitHub status as pending | ||
| gh api \ | ||
| --method POST \ | ||
| -H "Accept: application/vnd.github+json" \ | ||
| /repos/${{ github.repository }}/statuses/${{ github.sha }} \ | ||
| -f state="pending" \ | ||
| -f context="Notion API Job ($JOB_TYPE)" \ | ||
| -f description="Job $JOB_ID is running" \ | ||
| -f target_url="$ENDPOINT/jobs/$JOB_ID" || true | ||
|
|
||
| - name: Poll job status | ||
| id: poll-status | ||
| env: | ||
| API_KEY_GITHUB_ACTIONS: ${{ secrets.API_KEY_GITHUB_ACTIONS }} | ||
| run: | | ||
| set -e | ||
|
|
||
| ENDPOINT="${{ steps.config.outputs.endpoint }}" | ||
| JOB_ID="${{ steps.create-job.outputs.job_id }}" | ||
| JOB_TYPE="${{ github.event.inputs.job_type || 'notion:fetch-all' }}" | ||
|
|
||
| echo "⏳ Polling job status..." | ||
| MAX_WAIT=3600 # 60 minutes in seconds | ||
| ELAPSED=0 | ||
| POLL_INTERVAL=10 # Check every 10 seconds | ||
|
|
||
| while [ $ELAPSED -lt $MAX_WAIT ]; do | ||
| # Get job status | ||
| RESPONSE=$(curl -s -X GET "$ENDPOINT/jobs/$JOB_ID" \ | ||
| -H "Authorization: Bearer $API_KEY_GITHUB_ACTIONS") | ||
|
|
||
| STATUS=$(echo "$RESPONSE" | jq -r '.data.status // empty') | ||
|
|
||
| # Extract result data for later use | ||
| PAGES_PROCESSED=$(echo "$RESPONSE" | jq -r '.data.result.pagesProcessed // 0') | ||
| COMMIT_HASH=$(echo "$RESPONSE" | jq -r '.data.result.commitHash // empty') | ||
|
|
||
| echo "📊 Status: $STATUS (elapsed: ${ELAPSED}s)" | ||
|
|
||
| case "$STATUS" in | ||
| "completed") | ||
| echo "✅ Job completed successfully" | ||
| echo "job_status=completed" >> $GITHUB_OUTPUT | ||
| echo "pages_processed=$PAGES_PROCESSED" >> $GITHUB_OUTPUT | ||
| echo "commit_hash=$COMMIT_HASH" >> $GITHUB_OUTPUT | ||
|
|
||
| # Build description with commit info | ||
| DESCRIPTION="Job $JOB_ID completed - $PAGES_PROCESSED pages" | ||
| if [ -n "$COMMIT_HASH" ]; then | ||
| DESCRIPTION="$DESCRIPTION (commit: $COMMIT_HASH)" | ||
| fi | ||
|
|
||
| # Update GitHub status to success | ||
| gh api \ | ||
| --method POST \ | ||
| -H "Accept: application/vnd.github+json" \ | ||
| /repos/${{ github.repository }}/statuses/${{ github.sha }} \ | ||
| -f state="success" \ | ||
| -f context="Notion API Job ($JOB_TYPE)" \ | ||
| -f description="$DESCRIPTION" \ | ||
| -f target_url="$ENDPOINT/jobs/$JOB_ID" || true | ||
|
|
||
| exit 0 | ||
| ;; | ||
| "failed") | ||
| echo "❌ Job failed" | ||
| echo "job_status=failed" >> $GITHUB_OUTPUT | ||
|
|
||
| # Get error details | ||
| ERROR=$(echo "$RESPONSE" | jq -r '.data.result.error // "Unknown error"') | ||
| echo "Error: $ERROR" | ||
|
|
||
| # Update GitHub status to failure | ||
| gh api \ | ||
| --method POST \ | ||
| -H "Accept: application/vnd.github+json" \ | ||
| /repos/${{ github.repository }}/statuses/${{ github.sha }} \ | ||
| -f state="failure" \ | ||
| -f context="Notion API Job ($JOB_TYPE)" \ | ||
| -f description="Job $JOB_ID failed: $ERROR" \ | ||
| -f target_url="$ENDPOINT/jobs/$JOB_ID" || true | ||
|
|
||
| exit 1 | ||
| ;; | ||
| "running"|"pending") | ||
| # Continue polling | ||
| ;; | ||
| *) | ||
| echo "⚠️ Unknown status: $STATUS" | ||
| ;; | ||
| esac | ||
|
|
||
| sleep $POLL_INTERVAL | ||
| ELAPSED=$((ELAPSED + POLL_INTERVAL)) | ||
| done | ||
|
|
||
| echo "⏱️ Job timed out after $MAX_WAIT seconds" | ||
| echo "job_status=timeout" >> $GITHUB_OUTPUT | ||
|
|
||
| # Update GitHub status to error (timeout) | ||
| gh api \ | ||
| --method POST \ | ||
| -H "Accept: application/vnd.github+json" \ | ||
| /repos/${{ github.repository }}/statuses/${{ github.sha }} \ | ||
| -f state="error" \ | ||
| -f context="Notion API Job ($JOB_TYPE)" \ | ||
| -f description="Job $JOB_ID timed out" \ | ||
| -f target_url="$ENDPOINT/jobs/$JOB_ID" || true | ||
|
|
||
| exit 1 | ||
|
|
||
| # ------------------------------------------------------------ | ||
| # Update Notion pages status after successful fetch | ||
| # After fetching "ready-to-publish" pages and writing to content branch, | ||
| # update their status to "Published" and set the published date. | ||
| # ------------------------------------------------------------ | ||
| - name: Update Notion status to Published | ||
| if: steps.poll-status.outputs.job_status == 'completed' && (github.event.inputs.job_type == 'notion:fetch-all' || github.event.inputs.job_type == 'notion:fetch' || github.event.inputs.job_type == '') | ||
| env: | ||
| NOTION_API_KEY: ${{ secrets.NOTION_API_KEY }} | ||
| DATA_SOURCE_ID: ${{ secrets.DATA_SOURCE_ID }} | ||
| DATABASE_ID: ${{ secrets.DATABASE_ID }} | ||
| run: | | ||
| set -e | ||
|
|
||
| PAGES_PROCESSED="${{ steps.poll-status.outputs.pages_processed }}" | ||
| COMMIT_HASH="${{ steps.poll-status.outputs.commit_hash }}" | ||
| JOB_ID="${{ steps.create-job.outputs.job_id }}" | ||
|
|
||
| echo "📝 Updating Notion page status to Published..." | ||
| echo " Pages processed: $PAGES_PROCESSED" | ||
| if [ -n "$COMMIT_HASH" ]; then | ||
| echo " Commit hash: $COMMIT_HASH" | ||
| fi | ||
|
|
||
| # Build reference string for Notion | ||
| REF_INFO="Job: $JOB_ID" | ||
| if [ -n "$COMMIT_HASH" ]; then | ||
| REF_INFO="$REF_INFO | Commit: $COMMIT_HASH" | ||
| fi | ||
| REF_INFO="$REF_INFO | Workflow: ${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}" | ||
|
|
||
| # Run the Notion status update script | ||
| # This updates pages from "Ready to publish" to "Published" status | ||
| bun run notionStatus:publish | ||
|
|
||
| echo "✅ Notion status updated to Published" | ||
|
|
||
| - name: Stop API server (local mode only) | ||
| if: always() && steps.config.outputs.mode == 'local' | ||
| run: | | ||
| if [ -n "$SERVER_PID" ]; then | ||
| echo "🛑 Stopping API server (PID: $SERVER_PID)" | ||
| kill $SERVER_PID 2>/dev/null || true | ||
| fi | ||
|
|
||
| - name: Job summary | ||
| id: summary | ||
| if: always() | ||
| run: | | ||
| JOB_ID="${{ steps.create-job.outputs.job_id }}" | ||
| JOB_STATUS="${{ steps.poll-status.outputs.job_status }}" | ||
| JOB_TYPE="${{ github.event.inputs.job_type || 'notion:fetch-all' }}" | ||
| MAX_PAGES="${{ github.event.inputs.max_pages || '5' }}" | ||
| PAGES_PROCESSED="${{ steps.poll-status.outputs.pages_processed }}" | ||
| COMMIT_HASH="${{ steps.poll-status.outputs.commit_hash }}" | ||
|
|
||
| echo "## 📋 Notion API Job Summary" >> $GITHUB_STEP_SUMMARY | ||
| echo "" >> $GITHUB_STEP_SUMMARY | ||
| echo "- **Job ID:** \`${JOB_ID}\`" >> $GITHUB_STEP_SUMMARY | ||
| echo "- **Job Type:** $JOB_TYPE" >> $GITHUB_STEP_SUMMARY | ||
| echo "- **Status:** $JOB_STATUS" >> $GITHUB_STEP_SUMMARY | ||
| echo "- **Max Pages:** $MAX_PAGES" >> $GITHUB_STEP_SUMMARY | ||
| echo "- **API Endpoint:** ${{ steps.config.outputs.endpoint }}" >> $GITHUB_STEP_SUMMARY | ||
| echo "- **Branch sync contract:** API service must sync \`content\` with \`origin/main\` before pushing generated content" >> $GITHUB_STEP_SUMMARY | ||
| echo "- **Safety contract:** API service must never push generated content directly to \`main\`" >> $GITHUB_STEP_SUMMARY | ||
| echo "" >> $GITHUB_STEP_SUMMARY | ||
|
|
||
| if [ "$JOB_STATUS" = "completed" ]; then | ||
| echo "✅ Job completed successfully" >> $GITHUB_STEP_SUMMARY | ||
| if [ -n "$PAGES_PROCESSED" ] && [ "$PAGES_PROCESSED" != "0" ]; then | ||
| echo "- **Pages Processed:** $PAGES_PROCESSED" >> $GITHUB_STEP_SUMMARY | ||
| fi | ||
| if [ -n "$COMMIT_HASH" ]; then | ||
| echo "- **Commit Hash:** \`$COMMIT_HASH\`" >> $GITHUB_STEP_SUMMARY | ||
| fi | ||
| echo "" >> $GITHUB_STEP_SUMMARY | ||
| echo "**Notion Status:** Updated to Published" >> $GITHUB_STEP_SUMMARY | ||
| echo "" >> $GITHUB_STEP_SUMMARY | ||
| echo "⚠️ This workflow cannot yet verify branch-sync metadata returned by the API service." >> $GITHUB_STEP_SUMMARY | ||
| elif [ "$JOB_STATUS" = "failed" ]; then | ||
| echo "❌ Job failed - check logs for details" >> $GITHUB_STEP_SUMMARY | ||
| elif [ "$JOB_STATUS" = "timeout" ]; then | ||
| echo "⏱️ Job timed out - may need investigation" >> $GITHUB_STEP_SUMMARY | ||
| fi | ||
|
|
||
| - name: Notify Slack | ||
| if: always() && env.SLACK_WEBHOOK_URL != '' | ||
| env: | ||
| SLACK_WEBHOOK_URL: ${{ secrets.SLACK_WEBHOOK_URL }} | ||
| uses: slackapi/slack-github-action@v2.1.1 | ||
| with: | ||
| webhook: ${{ env.SLACK_WEBHOOK_URL }} | ||
| webhook-type: incoming-webhook | ||
| payload: | | ||
| text: "*Notion API Job*: ${{ steps.poll-status.outputs.job_status }}" | ||
| blocks: | ||
| - type: "section" | ||
| text: | ||
| type: "mrkdwn" | ||
| text: "*Notion API Job*: ${{ steps.poll-status.outputs.job_status }}\nJob: ${{ steps.create-job.outputs.job_id }}\nType: ${{ github.event.inputs.job_type || 'notion:fetch-all' }}" | ||
| - type: "section" | ||
| text: | ||
| type: "mrkdwn" | ||
| text: "Workflow: <${{ github.server_url }}/${{ github.repository }}/actions/runs/${{ github.run_id }}|View logs>" | ||
| - type: "section" | ||
| text: | ||
| type: "mrkdwn" | ||
| text: "Trigger: <https://github.com/${{ github.triggering_actor }}|${{ github.triggering_actor }}>" | ||
| - type: "section" | ||
| text: | ||
| type: "mrkdwn" | ||
| text: "Notion Status: ${{ steps.poll-status.outputs.job_status == 'completed' && 'Updated to Published' || 'Not updated' }}" | ||
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
The workflow defaults to
notion:fetch-alland exposesnotion:fetch, butPOST /jobsonly accepts job types fromVALID_JOB_TYPES(derived fromapi-server/job-executor.ts), where fetch types arefetch-all/fetch-ready(api-server/validation-schemas.ts). With the current default, scheduled runs and default manual dispatches send an invalidtypeand the API rejects the request with 400, so fetch jobs cannot be started from this workflow.Useful? React with 👍 / 👎.