Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
16 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
45 changes: 7 additions & 38 deletions .github/workflows/gh-pages-deploy.yml
Original file line number Diff line number Diff line change
@@ -1,28 +1,25 @@
name: Build and Deploy Specs to GH Pages

on:
workflow_run:
workflows:
- Publish Docs
push:
branches:
- main
types:
- completed
paths:
- "fern/remote-specs.json"
- "src/openapi/**"
- "src/openrpc/**"
Comment thread
dslovinsky marked this conversation as resolved.
repository_dispatch:
types:
- remote-spec-updated
types: [remote-spec-updated]
workflow_dispatch: {}
concurrency:
group: "pages"
cancel-in-progress: false

jobs:
build:
runs-on: ubuntu-latest
if: ${{ github.event.workflow_run.conclusion == 'success' || github.event_name == 'repository_dispatch' }}
permissions:
contents: read
outputs:
changed_specs: ${{ steps.detect-changes.outputs.specUrls }}
steps:
- name: Checkout repository
uses: actions/checkout@v4
Expand All @@ -31,17 +28,9 @@ jobs:
uses: ./.github/actions/setup-pnpm

- name: Run build script
id: build
run: pnpm generate && pnpm generate:metadata

- name: Detect changed specs
id: detect-changes
run: |
CHANGED=$(pnpm -s detect-spec-changes)
echo "specUrls=$CHANGED" >> $GITHUB_OUTPUT

- name: Upload static files as artifact
id: deploy-artifact
uses: actions/upload-pages-artifact@v3
with:
path: fern/api-specs/
Expand All @@ -59,23 +48,3 @@ jobs:
- name: Deploy to GitHub Pages
id: deployment
uses: actions/deploy-pages@v4

revalidate:
runs-on: ubuntu-latest
needs: [build, deploy]
if: ${{ needs.build.outputs.changed_specs != '[]' && needs.build.outputs.changed_specs != '' }}
permissions: {}
steps:
- name: Trigger spec revalidation
env:
CHANGED_SPECS: ${{ needs.build.outputs.changed_specs }}
run: |
# Create JSON payload and call revalidation API
PAYLOAD=$(jq -n --argjson urls "$CHANGED_SPECS" '{specUrls: $urls}')

curl -X POST "${{ secrets.DOCS_SITE_URL }}/api/revalidate/specs" \
-H "Authorization: Bearer ${{ secrets.DOCS_SITE_API_KEY }}" \
-H "Content-Type: application/json" \
-d "$PAYLOAD" \
--max-time 120 \
--fail-with-body
31 changes: 30 additions & 1 deletion .github/workflows/index-main-content.yml
Original file line number Diff line number Diff line change
Expand Up @@ -6,11 +6,16 @@ on:
- main
paths:
- "fern/docs.yml"
- "fern/remote-specs.json"
- "src/openapi/**"
- "src/openrpc/**"
Comment thread
dslovinsky marked this conversation as resolved.
Comment thread
dslovinsky marked this conversation as resolved.
repository_dispatch:
types: [remote-spec-updated]
workflow_dispatch: {}

jobs:
index-main:
name: Index Main Content
name: Index and sync main content
runs-on: ubuntu-latest
permissions:
contents: read
Expand All @@ -21,6 +26,15 @@ jobs:
- name: Setup pnpm
uses: ./.github/actions/setup-pnpm

- name: Generate API specs
run: pnpm generate

- name: Upload specs to Redis
run: pnpm upload-specs
env:
KV_REST_API_URL: ${{ secrets.KV_REST_API_URL }}
KV_REST_API_TOKEN: ${{ secrets.KV_REST_API_TOKEN }}

- name: Run main content indexer
run: pnpm index:main
env:
Expand Down Expand Up @@ -74,3 +88,18 @@ jobs:
else
echo "::notice::✅ Successfully revalidated docs index and nav trees"
fi

- name: Revalidate changed specs
run: |
CHANGED=$(cat fern/api-specs/changed-specs.json)
if [ "$CHANGED" = "[]" ]; then
echo "No spec changes to revalidate"
exit 0
fi
PAYLOAD=$(jq -n --argjson urls "$CHANGED" '{specUrls: $urls}')
curl -X POST "${{ secrets.DOCS_SITE_URL }}/api/revalidate/specs" \
-H "Authorization: Bearer ${{ secrets.DOCS_SITE_API_KEY }}" \
-H "Content-Type: application/json" \
-d "$PAYLOAD" \
--max-time 120 \
--fail-with-body
4 changes: 2 additions & 2 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@
"generate:rest:watch": "onchange \"src/openapi/**/*.{yaml,yml}\" -- pnpm run generate:rest",
"generate:watch": "pnpm run generate:rpc:watch & pnpm run generate:rest:watch",
"generate:metadata": "tsx ./scripts/generate-metadata.ts",
"detect-spec-changes": "./scripts/detect-spec-changes.sh",
"generate": "pnpm run generate:rest & pnpm run generate:rpc",
"generate": "pnpm run generate:rest & p1=$!; pnpm run generate:rpc & p2=$!; wait $p1 $p2",
"upload-specs": "tsx scripts/upload-specs.ts",
"clean": "rm -rf fern/api-specs",
"validate:rest": "./scripts/generate-open-api.sh --validate-only",
"validate:rpc": "tsx ./scripts/validate-rpc.ts",
Expand Down
126 changes: 0 additions & 126 deletions scripts/detect-spec-changes.sh

This file was deleted.

129 changes: 129 additions & 0 deletions scripts/upload-specs.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
#!/usr/bin/env tsx
/**
* Standalone spec upload CLI with change detection.
*
* Scans fern/api-specs/{alchemy,chains}/, computes SHA-256 hashes, compares
* against Redis, uploads only changed specs, and writes changed URLs as JSON
* to an output file (default: fern/api-specs/changed-specs.json).
*
* Usage: pnpm upload-specs [--output path]
* Env: KV_REST_API_URL, KV_REST_API_TOKEN
*/
import crypto from "crypto";
import { config as dotenvConfig } from "dotenv";
import fs from "fs/promises";
import path from "path";

import {
DEV_DOCS_BASE,
buildSpecFileMap,
getSpecTypeFromUrl,
} from "@/content-indexer/utils/apiSpecs.ts";
import { getRedis } from "@/content-indexer/utils/redis.ts";

dotenvConfig({ path: path.resolve(process.cwd(), ".env"), quiet: true });

const SPECS_DIR = path.resolve(process.cwd(), "fern/api-specs");
const DEFAULT_OUTPUT = path.join(SPECS_DIR, "changed-specs.json");
const HASH_KEY = "main:spec-hashes";

type SpecHashMap = Record<string, string>;

const parseArgs = () => {
const args = process.argv.slice(2);
const outputFlag = args.find((arg) => arg.startsWith("--output="));
const output = outputFlag
? path.resolve(process.cwd(), outputFlag.split("=")[1])
: DEFAULT_OUTPUT;
return { output };
};

const main = async () => {
const { output } = parseArgs();

// Verify specs directory exists
try {
await fs.access(SPECS_DIR);
} catch {
console.error(
`Specs directory not found: ${SPECS_DIR}\nRun 'pnpm generate' first.`,
);
process.exit(1);
}

const redis = getRedis();

// 1. Build spec file map and read contents with hashes
const specFileMap = await buildSpecFileMap(SPECS_DIR);
console.info(`Found ${specFileMap.size} spec files`);

const specEntries = await Promise.all(
Array.from(specFileMap.values()).map(async (relativePath) => {
const specUrl = `${DEV_DOCS_BASE}/${relativePath}`;
const content = await fs.readFile(
path.join(SPECS_DIR, relativePath),
"utf-8",
);
const hash = crypto.createHash("sha256").update(content).digest("hex");
return { specUrl, content, hash };
}),
);

const newHashes: SpecHashMap = Object.fromEntries(
specEntries.map(({ specUrl, hash }) => [specUrl, hash]),
);
const specContents: Record<string, string> = Object.fromEntries(
specEntries.map(({ specUrl, content }) => [specUrl, content]),
);

// 2. Fetch existing hashes from Redis
const oldHashes = (await redis.get<SpecHashMap>(HASH_KEY)) ?? {};

// 3. Find changed spec URLs (new or modified)
const changedUrls = Object.keys(newHashes).filter(
(url) => oldHashes[url] !== newHashes[url],
);

// 4. Find deleted spec URLs (in old but not in new)
const deletedUrls = Object.keys(oldHashes).filter(
(url) => !(url in newHashes),
);

const allAffectedUrls = [...changedUrls, ...deletedUrls];

if (allAffectedUrls.length === 0) {
console.info("No spec changes detected");
await fs.writeFile(output, JSON.stringify([]));
return;
}

const pipeline = redis.pipeline();

// 5. Upload changed specs to Redis
changedUrls.forEach((specUrl) => {
const redisKey = `main:${getSpecTypeFromUrl(specUrl)}-spec:${specUrl}`;
pipeline.set(redisKey, specContents[specUrl]);
});

// 6. Delete removed specs from Redis
deletedUrls.forEach((specUrl) => {
const redisKey = `main:${getSpecTypeFromUrl(specUrl)}-spec:${specUrl}`;
pipeline.del(redisKey);
});

console.info(`${changedUrls.length} changed, ${deletedUrls.length} deleted`);

// 7. Update hash map
pipeline.set(HASH_KEY, JSON.stringify(newHashes));

await pipeline.exec();
console.info("Upload complete");

// 8. Write all affected URLs to output file
await fs.writeFile(output, JSON.stringify(allAffectedUrls));
};

main().catch((error) => {
console.error("Fatal error:", error);
process.exit(1);
});
Loading
Loading