diff --git a/.changeset/atproto-badges-initial.md b/.changeset/atproto-badges-initial.md new file mode 100644 index 0000000..5215243 --- /dev/null +++ b/.changeset/atproto-badges-initial.md @@ -0,0 +1,7 @@ +--- +"@fujocoded/atproto-badges": minor +--- + +Initial release of `@fujocoded/atproto-badges` — ATProto badge attestation utilities for creating, signing, and verifying badges per the badge.blue specification. + +Also ships a `/react` subpath export with drop-in ``, ``, ``, and `` components, plus a `/styles.css` import. Components take async action handlers (`onClaim`/`onVerify`/`onUnclaim`) and theming props (`issuerName`, `getBadgeShortName`, `isRemoteBadge`, custom icon renderers), so consumers wire them to their own backend without forking. Requires Tailwind v4 in the consuming project. diff --git a/.changeset/authproto-callback-state-fix.md b/.changeset/authproto-callback-state-fix.md new file mode 100644 index 0000000..e36c43c --- /dev/null +++ b/.changeset/authproto-callback-state-fix.md @@ -0,0 +1,6 @@ +--- +"@fujocoded/authproto": patch +--- + +Fix custom-redirect / referer parsing in the OAuth callback so encoded +`redirect` and `referer` values are no longer silently dropped on login. diff --git a/.changeset/authproto-docs-and-example.md b/.changeset/authproto-docs-and-example.md new file mode 100644 index 0000000..da64c84 --- /dev/null +++ b/.changeset/authproto-docs-and-example.md @@ -0,0 +1,12 @@ +--- +"@fujocoded/authproto": patch +--- + +Fix custom-redirect / referer state parsing in the OAuth callback. The OAuth client wraps our state under an opaque key in the URL `state` param and returns the original value as `clientCallback.state`, so we now read from there instead of `requestUrl.searchParams.get("state")` — which was always the wrapped value and never parsed as JSON. + +Also improve `astro-authproto` README and `02-read-bsky-profile` example: + +- Document `session` driver setup and full integration config in install steps. +- Clarify `applicationDomain` should be the full URL with scheme (e.g. `https://example.com`, or `http://127.0.0.1:4321` locally). +- Add a "Shipping it" production section. +- Update the read-profile example to use `getBlueskyAgent` from `@fujocoded/authproto/helpers` instead of constructing `AtpBaseClient` directly, and fix the avatar `alt` to use a JSX expression. diff --git a/.changeset/authproto-dynamic-dev-port.md b/.changeset/authproto-dynamic-dev-port.md new file mode 100644 index 0000000..7bb01fa --- /dev/null +++ b/.changeset/authproto-dynamic-dev-port.md @@ -0,0 +1,8 @@ +--- +"@fujocoded/authproto": patch +--- + +Use Astro's actual dev server port for the OAuth callback URL in development +instead of always assuming `4321`. If you run `astro dev --port 4322` (or set +`server.port` in your Astro config), Authproto now points OAuth at the right +local URL. diff --git a/astro-atproto-loader/__examples__/03-grouped-reposts/src/live.config.ts b/astro-atproto-loader/__examples__/03-grouped-reposts/src/live.config.ts index 8abfd98..7941103 100644 --- a/astro-atproto-loader/__examples__/03-grouped-reposts/src/live.config.ts +++ b/astro-atproto-loader/__examples__/03-grouped-reposts/src/live.config.ts @@ -21,11 +21,16 @@ import { defineAtProtoLiveCollection } from "@fujocoded/astro-atproto-loader"; // Set up validation for schemas via zod const BlobRefSchema = z .object({ - ref: z.unknown(), + ref: z.union([z.string(), z.object({ $link: z.string() })]).nullish(), mimeType: z.string(), }) .transform((blob) => ({ - cid: blob.ref == null ? undefined : String(blob.ref), + cid: + blob.ref == null + ? undefined + : typeof blob.ref === "string" + ? blob.ref + : blob.ref.$link, mimeType: blob.mimeType, })); diff --git a/astro-atproto-loader/__tests__/index.test.ts b/astro-atproto-loader/__tests__/index.test.ts new file mode 100644 index 0000000..0d3a932 --- /dev/null +++ b/astro-atproto-loader/__tests__/index.test.ts @@ -0,0 +1,1462 @@ +import { http, HttpResponse } from "msw"; +import { beforeEach, describe, expect, test, vi } from "vitest"; + +import { server } from "./msw/server.ts"; +import { + FAKE_CID, + failingGetRecord, + mockGetRecord, + mockListRecords, + mockRepoIdentity, + type FakeRecord, +} from "./msw/handlers.ts"; + +const PDS = "https://pds.example.test"; + +const importLoader = async () => { + vi.resetModules(); + const live = await import("../src/loaders/live.ts"); + const staticLoader = await import("../src/loaders/static.ts"); + return { + atProtoLiveLoader: live.atProtoLiveLoader, + atProtoStaticLoader: staticLoader.atProtoStaticLoader, + }; +}; + +beforeEach(() => { + vi.restoreAllMocks(); +}); + +describe("atProtoLiveLoader", () => { + test("loads a collection, applies the object callback signature, and resolves handles", async () => { + server.use( + ...mockRepoIdentity({ + did: "did:plc:resolved-handle", + pds: PDS, + handle: "events.example.com", + }), + mockListRecords({ + pds: PDS, + repo: "events.example.com", + collection: "community.lexicon.calendar.event", + pages: [ + [ + { + did: "did:plc:resolved-handle", + rkey: "first", + value: { title: "Opening", published: true }, + }, + { + did: "did:plc:resolved-handle", + rkey: "second", + value: { title: "Draft", published: false }, + }, + ], + ], + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const filterSpy = vi.fn( + ({ value }: { value: Record }) => + value.published === true, + ); + const transformSpy = vi.fn( + ({ + value, + rkey, + repo, + }: { + value: Record; + rkey: string; + repo: { did: string; handle?: string }; + }) => ({ + id: rkey, + data: { + did: repo.did, + title: String(value.title), + }, + }), + ); + + const loader = atProtoLiveLoader({ + source: { + repo: "events.example.com", + collection: "community.lexicon.calendar.event", + }, + filter: filterSpy, + transform: transformSpy, + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { + id: "first", + data: { + did: "did:plc:resolved-handle", + title: "Opening", + }, + }, + ]); + expect(filterSpy).toHaveBeenNthCalledWith( + 1, + expect.objectContaining({ + value: { title: "Opening", published: true }, + repo: { did: "did:plc:resolved-handle", handle: "events.example.com" }, + collection: "community.lexicon.calendar.event", + rkey: "first", + }), + ); + expect(transformSpy).toHaveBeenCalledTimes(1); + }); + + test("sends the listRecords XRPC query with the configured limit and no initial cursor", async () => { + const cursorCalls: Array = []; + + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockListRecords({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + pages: [ + [ + { + did: "did:plc:testrepo", + rkey: "only", + value: { title: "Only" }, + }, + ], + ], + onCall: (cursor) => cursorCalls.push(cursor), + }), + ); + + const requestLog: Array<{ limit: string | null; cursor: string | null }> = + []; + server.events.on("request:start", ({ request }) => { + const url = new URL(request.url); + if (url.pathname === "/xrpc/com.atproto.repo.listRecords") { + requestLog.push({ + limit: url.searchParams.get("limit"), + cursor: url.searchParams.get("cursor"), + }); + } + }); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + }, + transform: ({ value, rkey }) => ({ + id: rkey, + data: { title: String(value.title) }, + }), + }); + + await loader.loadCollection({}); + + expect(requestLog).toEqual([{ limit: "100", cursor: null }]); + expect(cursorCalls).toEqual([null]); + }); + + test("stops after `source.limit` entries and skips remaining pages", async () => { + const cursorCalls: Array = []; + + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockListRecords({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + pages: [ + [ + { did: "did:plc:testrepo", rkey: "one", value: { title: "A" } }, + { did: "did:plc:testrepo", rkey: "two", value: { title: "B" } }, + { did: "did:plc:testrepo", rkey: "three", value: { title: "C" } }, + ], + [{ did: "did:plc:testrepo", rkey: "four", value: { title: "D" } }], + ], + onCall: (cursor) => cursorCalls.push(cursor), + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + limit: 2, + }, + transform: ({ value, rkey }) => ({ + id: rkey, + data: { title: String(value.title) }, + }), + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { id: "one", data: { title: "A" } }, + { id: "two", data: { title: "B" } }, + ]); + expect(cursorCalls).toEqual([null]); + }); + + test("caps the XRPC listRecords page size at `source.limit`", async () => { + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockListRecords({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + pages: [ + [{ did: "did:plc:testrepo", rkey: "only", value: { title: "Only" } }], + ], + }), + ); + + const requestLog: Array<{ limit: string | null }> = []; + server.events.on("request:start", ({ request }) => { + const url = new URL(request.url); + if (url.pathname === "/xrpc/com.atproto.repo.listRecords") { + requestLog.push({ limit: url.searchParams.get("limit") }); + } + }); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + limit: 5, + }, + transform: ({ value, rkey }) => ({ + id: rkey, + data: { title: String(value.title) }, + }), + }); + + await loader.loadCollection({}); + + expect(requestLog).toEqual([{ limit: "5" }]); + }); + + test("counts only post-filter entries against `source.limit`", async () => { + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockListRecords({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + pages: [ + [ + { + did: "did:plc:testrepo", + rkey: "one", + value: { title: "A", published: false }, + }, + { + did: "did:plc:testrepo", + rkey: "two", + value: { title: "B", published: true }, + }, + { + did: "did:plc:testrepo", + rkey: "three", + value: { title: "C", published: false }, + }, + { + did: "did:plc:testrepo", + rkey: "four", + value: { title: "D", published: true }, + }, + ], + ], + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + limit: 2, + }, + filter: ({ value }) => value.published === true, + transform: ({ value, rkey }) => ({ + id: rkey, + data: { title: String(value.title) }, + }), + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { id: "two", data: { title: "B" } }, + { id: "four", data: { title: "D" } }, + ]); + }); + + test("deduplicates collection entries by id and keeps the newest one", async () => { + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockListRecords({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + pages: [ + [ + { + did: "did:plc:testrepo", + rkey: "early", + value: { slug: "session-1", title: "First title" }, + }, + { + did: "did:plc:testrepo", + rkey: "later", + value: { slug: "session-1", title: "Updated title" }, + }, + ], + ], + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + }, + transform: ({ value }) => ({ + id: String(value.slug), + data: { title: String(value.title) }, + }), + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { + id: "session-1", + data: { title: "Updated title" }, + }, + ]); + }); + + test("supports request-time collection filtering", async () => { + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockListRecords({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + pages: [ + [ + { + did: "did:plc:testrepo", + rkey: "one", + value: { track: "main", title: "Main stage" }, + }, + { + did: "did:plc:testrepo", + rkey: "two", + value: { track: "hallway", title: "Hallway track" }, + }, + ], + ], + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader< + readonly [{ repo: string; collection: string }], + { title: string; track: string }, + { track: string } + >({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + }, + transform: ({ value, rkey }) => ({ + id: rkey, + data: { + title: String(value.title), + track: String(value.track), + }, + }), + queryFilter: ({ entry, filter }) => entry.data.track === filter.track, + }); + + const result = await loader.loadCollection({ + filter: { track: "hallway" }, + }); + + expect("entries" in result && result.entries).toEqual([ + { + id: "two", + data: { title: "Hallway track", track: "hallway" }, + }, + ]); + }); + + test("supports a dedicated single-source `source` option", async () => { + server.use( + ...mockRepoIdentity({ + did: "did:plc:source-option", + pds: PDS, + handle: "source.example.com", + }), + mockListRecords({ + pds: PDS, + repo: "source.example.com", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:source-option", + rkey: "doc-1", + value: { title: "From source" }, + }, + ], + ], + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "source.example.com", + collection: "site.standard.document", + }, + transform: ({ value, rkey, repo, collection }) => ({ + id: `${repo.did}/${rkey}`, + data: { + title: String(value.title), + repo: repo.handle ?? repo.did, + collection, + }, + }), + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { + id: "did:plc:source-option/doc-1", + data: { + title: "From source", + repo: "source.example.com", + collection: "site.standard.document", + }, + }, + ]); + }); + + test("defaults to passthrough entries for a single source when transform is omitted", async () => { + server.use( + ...mockRepoIdentity({ + did: "did:plc:passthrough-live", + pds: PDS, + handle: "passthrough.example.com", + }), + mockListRecords({ + pds: PDS, + repo: "passthrough.example.com", + collection: "place.stream.livestream", + pages: [ + [ + { + did: "did:plc:passthrough-live", + rkey: "stream-1", + value: { + title: "Coworking stream", + createdAt: "2026-04-04T00:30:21Z", + }, + }, + ], + ], + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader< + readonly [{ repo: string; collection: string }], + { + title: string; + createdAt: string; + } + >({ + source: { + repo: "passthrough.example.com", + collection: "place.stream.livestream", + }, + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { + id: "stream-1", + data: { + title: "Coworking stream", + createdAt: "2026-04-04T00:30:21Z", + }, + }, + ]); + }); + + test("supports multiple sources under one loader", async () => { + const bobatanPds = "https://bobatan-pds.example.test"; + const bobPds = "https://bob-pds.example.test"; + + server.use( + ...mockRepoIdentity({ + did: "did:plc:bobatan", + pds: bobatanPds, + handle: "bobatan.fujocoded.dev", + }), + ...mockRepoIdentity({ + did: "did:plc:bob", + pds: bobPds, + handle: "bob.example.com", + }), + mockListRecords({ + pds: bobatanPds, + repo: "bobatan.fujocoded.dev", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bobatan", + rkey: "alpha", + value: { title: "Bobatan doc" }, + }, + ], + ], + }), + mockListRecords({ + pds: bobPds, + repo: "bob.example.com", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bob", + rkey: "beta", + value: { title: "Bob doc" }, + }, + ], + ], + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + sources: [ + { repo: "bobatan.fujocoded.dev", collection: "site.standard.document" }, + { repo: "bob.example.com", collection: "site.standard.document" }, + ], + transform: ({ value, rkey, repo, collection }) => ({ + id: `${repo.did}/${collection}/${rkey}`, + data: { + title: String(value.title), + repo: repo.handle ?? repo.did, + }, + }), + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { + id: "did:plc:bobatan/site.standard.document/alpha", + data: { title: "Bobatan doc", repo: "bobatan.fujocoded.dev" }, + }, + { + id: "did:plc:bob/site.standard.document/beta", + data: { title: "Bob doc", repo: "bob.example.com" }, + }, + ]); + }); + + test("namespaces ids by did/collection when multiple sources omit transform", async () => { + const bobatanPds = "https://bobatan-pds.example.test"; + const bobPds = "https://bob-pds.example.test"; + + server.use( + ...mockRepoIdentity({ + did: "did:plc:bobatan", + pds: bobatanPds, + handle: "bobatan.fujocoded.dev", + }), + ...mockRepoIdentity({ + did: "did:plc:bob", + pds: bobPds, + handle: "bob.example.com", + }), + mockListRecords({ + pds: bobatanPds, + repo: "bobatan.fujocoded.dev", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bobatan", + rkey: "shared", + value: { title: "Bobatan doc" }, + }, + ], + ], + }), + mockListRecords({ + pds: bobPds, + repo: "bob.example.com", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bob", + rkey: "shared", + value: { title: "Bob doc" }, + }, + ], + ], + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + sources: [ + { repo: "bobatan.fujocoded.dev", collection: "site.standard.document" }, + { repo: "bob.example.com", collection: "site.standard.document" }, + ], + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { + id: "did:plc:bobatan/site.standard.document/shared", + data: { title: "Bobatan doc" }, + }, + { + id: "did:plc:bob/site.standard.document/shared", + data: { title: "Bob doc" }, + }, + ]); + }); + + test("loads a single entry directly by rkey and supports custom ids", async () => { + const record: FakeRecord = { + did: "did:plc:testrepo", + rkey: "record-123", + value: { slug: "opening-keynote", title: "Opening keynote" }, + }; + + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockGetRecord({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + record, + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + }, + transform: ({ value }) => ({ + id: String(value.slug), + data: { title: String(value.title) }, + }), + }); + + const result = await loader.loadEntry({ + filter: { id: "opening-keynote", rkey: "record-123" }, + }); + + expect(result).toEqual({ + id: "opening-keynote", + data: { title: "Opening keynote" }, + }); + }); + + test("defaults single-record lookups to rkey ids when transform is omitted", async () => { + const record: FakeRecord = { + did: "did:plc:passthrough-live", + rkey: "stream-1", + value: { + title: "Coworking stream", + createdAt: "2026-04-04T00:30:21Z", + }, + }; + + server.use( + ...mockRepoIdentity({ did: "did:plc:passthrough-live", pds: PDS }), + mockGetRecord({ + pds: PDS, + repo: "did:plc:passthrough-live", + collection: "place.stream.livestream", + record, + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader< + readonly [{ repo: string; collection: string }], + { + title: string; + createdAt: string; + } + >({ + source: { + repo: "did:plc:passthrough-live", + collection: "place.stream.livestream", + }, + }); + + const result = await loader.loadEntry({ + filter: { id: "stream-1" }, + }); + + expect(result).toEqual({ + id: "stream-1", + data: { + title: "Coworking stream", + createdAt: "2026-04-04T00:30:21Z", + }, + }); + }); + + test("can disambiguate direct single-record loads across multiple sources", async () => { + const bobatanPds = "https://bobatan-pds.example.test"; + const bobPds = "https://bob-pds.example.test"; + + let bobatanGetRecordCalls = 0; + let bobGetRecordCalls = 0; + + server.use( + ...mockRepoIdentity({ + did: "did:plc:bobatan", + pds: bobatanPds, + handle: "bobatan.fujocoded.dev", + }), + ...mockRepoIdentity({ + did: "did:plc:bob", + pds: bobPds, + handle: "bob.example.com", + }), + http.get(`${bobatanPds}/xrpc/com.atproto.repo.getRecord`, () => { + bobatanGetRecordCalls += 1; + return new HttpResponse(JSON.stringify({ error: "UnexpectedCall" }), { + status: 500, + headers: { "content-type": "application/json" }, + }); + }), + http.get(`${bobPds}/xrpc/com.atproto.repo.getRecord`, ({ request }) => { + bobGetRecordCalls += 1; + const url = new URL(request.url); + expect(url.searchParams.get("repo")).toBe("bob.example.com"); + expect(url.searchParams.get("collection")).toBe( + "site.standard.document", + ); + expect(url.searchParams.get("rkey")).toBe("shared-rkey"); + return HttpResponse.json({ + uri: "at://did:plc:bob/site.standard.document/shared-rkey", + cid: FAKE_CID, + value: { slug: "bob/shared-rkey", title: "Bob shared doc" }, + }); + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + sources: [ + { repo: "bobatan.fujocoded.dev", collection: "site.standard.document" }, + { repo: "bob.example.com", collection: "site.standard.document" }, + ], + transform: ({ value }) => ({ + id: String(value.slug), + data: { title: String(value.title) }, + }), + }); + + const result = await loader.loadEntry({ + filter: { + id: "bob/shared-rkey", + rkey: "shared-rkey", + repo: "bob.example.com", + collection: "site.standard.document", + }, + }); + + expect(result).toEqual({ + id: "bob/shared-rkey", + data: { title: "Bob shared doc" }, + }); + expect(bobatanGetRecordCalls).toBe(0); + expect(bobGetRecordCalls).toBe(1); + }); + + test("returns stale cached entries while a background refresh is in flight", async () => { + let callCount = 0; + let resolveSecondFetch: (() => void) | undefined; + + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + http.get(`${PDS}/xrpc/com.atproto.repo.listRecords`, async () => { + callCount += 1; + if (callCount === 1) { + return HttpResponse.json({ + records: [ + { + uri: "at://did:plc:testrepo/community.lexicon.calendar.event/first", + cid: FAKE_CID, + value: { title: "Initial title" }, + }, + ], + }); + } + await new Promise((resolve) => { + resolveSecondFetch = resolve; + }); + return HttpResponse.json({ + records: [ + { + uri: "at://did:plc:testrepo/community.lexicon.calendar.event/first", + cid: FAKE_CID, + value: { title: "Refreshed title" }, + }, + ], + }); + }), + ); + + let now = 1_000; + vi.spyOn(Date, "now").mockImplementation(() => now); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + }, + cacheTtl: 1, + transform: ({ value, rkey }) => ({ + id: rkey, + data: { title: String(value.title) }, + }), + }); + + const first = await loader.loadCollection({}); + now = 1_005; + const stale = await loader.loadCollection({}); + + expect("entries" in first && first.entries?.[0]?.data.title).toBe( + "Initial title", + ); + expect("entries" in stale && stale.entries?.[0]?.data.title).toBe( + "Initial title", + ); + + await vi.waitFor(() => { + expect(resolveSecondFetch).toBeDefined(); + }); + resolveSecondFetch?.(); + + await vi.waitFor(async () => { + const refreshed = await loader.loadCollection({}); + expect("entries" in refreshed && refreshed.entries?.[0]?.data.title).toBe( + "Refreshed title", + ); + }); + }); + + test("falls back to the cached collection if direct single-record loading fails", async () => { + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockListRecords({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + pages: [ + [ + { + did: "did:plc:testrepo", + rkey: "record-123", + value: { slug: "opening-keynote", title: "Opening keynote" }, + }, + ], + ], + }), + failingGetRecord(PDS), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + }, + transform: ({ value }) => ({ + id: String(value.slug), + data: { title: String(value.title) }, + }), + }); + + await loader.loadCollection({}); + const result = await loader.loadEntry({ + filter: { id: "opening-keynote", rkey: "record-123" }, + }); + + expect(result).toEqual({ + id: "opening-keynote", + data: { title: "Opening keynote" }, + }); + }); + + test("follows the cursor across multiple pages of listRecords", async () => { + const observedCursors: Array = []; + + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + mockListRecords({ + pds: PDS, + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + pages: [ + [ + { + did: "did:plc:testrepo", + rkey: "one", + value: { title: "Page one entry" }, + }, + { + did: "did:plc:testrepo", + rkey: "two", + value: { title: "Page one entry two" }, + }, + ], + [ + { + did: "did:plc:testrepo", + rkey: "three", + value: { title: "Page two entry" }, + }, + ], + ], + onCall: (cursor) => observedCursors.push(cursor), + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + limit: "all", + }, + transform: ({ value, rkey }) => ({ + id: rkey, + data: { title: String(value.title) }, + }), + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { id: "one", data: { title: "Page one entry" } }, + { id: "two", data: { title: "Page one entry two" } }, + { id: "three", data: { title: "Page two entry" } }, + ]); + expect(observedCursors).toEqual([null, "1"]); + }); + + test("terminates the cursor loop once the PDS omits a next cursor", async () => { + let callCount = 0; + + server.use( + ...mockRepoIdentity({ did: "did:plc:testrepo", pds: PDS }), + http.get(`${PDS}/xrpc/com.atproto.repo.listRecords`, () => { + callCount += 1; + return HttpResponse.json({ + records: + callCount === 1 + ? [ + { + uri: "at://did:plc:testrepo/community.lexicon.calendar.event/only", + cid: FAKE_CID, + value: { title: "Only" }, + }, + ] + : [], + // cursor intentionally omitted on every page + }); + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + source: { + repo: "did:plc:testrepo", + collection: "community.lexicon.calendar.event", + }, + transform: ({ value, rkey }) => ({ + id: rkey, + data: { title: String(value.title) }, + }), + }); + + const result = await loader.loadCollection({}); + + expect(callCount).toBe(1); + expect("entries" in result && result.entries).toEqual([ + { id: "only", data: { title: "Only" } }, + ]); + }); + + test("paginates each source independently when combined with multi-source", async () => { + const bobatanPds = "https://bobatan-pds.example.test"; + const bobPds = "https://bob-pds.example.test"; + + const bobatanCursors: Array = []; + const bobCursors: Array = []; + + server.use( + ...mockRepoIdentity({ did: "did:plc:bobatan", pds: bobatanPds }), + ...mockRepoIdentity({ did: "did:plc:bob", pds: bobPds }), + mockListRecords({ + pds: bobatanPds, + repo: "did:plc:bobatan", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bobatan", + rkey: "alpha", + value: { title: "Bobatan page one" }, + }, + ], + [ + { + did: "did:plc:bobatan", + rkey: "alpha-two", + value: { title: "Bobatan page two" }, + }, + ], + ], + onCall: (cursor) => bobatanCursors.push(cursor), + }), + mockListRecords({ + pds: bobPds, + repo: "did:plc:bob", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bob", + rkey: "beta", + value: { title: "Bob only page" }, + }, + ], + ], + onCall: (cursor) => bobCursors.push(cursor), + }), + ); + + const { atProtoLiveLoader } = await importLoader(); + + const loader = atProtoLiveLoader({ + sources: [ + { + repo: "did:plc:bobatan", + collection: "site.standard.document", + limit: "all", + }, + { + repo: "did:plc:bob", + collection: "site.standard.document", + limit: "all", + }, + ], + transform: ({ value, repo, rkey }) => ({ + id: `${repo.did}/${rkey}`, + data: { title: String(value.title) }, + }), + }); + + const result = await loader.loadCollection({}); + + expect("entries" in result && result.entries).toEqual([ + { id: "did:plc:bobatan/alpha", data: { title: "Bobatan page one" } }, + { id: "did:plc:bobatan/alpha-two", data: { title: "Bobatan page two" } }, + { id: "did:plc:bob/beta", data: { title: "Bob only page" } }, + ]); + expect(bobatanCursors).toEqual([null, "1"]); + expect(bobCursors).toEqual([null]); + }); +}); + +describe("atProtoStaticLoader", () => { + test("loads a single source into the Astro data store", async () => { + server.use( + ...mockRepoIdentity({ + did: "did:plc:staticrepo", + pds: PDS, + handle: "static.example.com", + }), + mockListRecords({ + pds: PDS, + repo: "static.example.com", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:staticrepo", + rkey: "doc-1", + value: { title: "Static doc", body: "Hello from Astro" }, + }, + ], + ], + }), + ); + + const { atProtoStaticLoader } = await importLoader(); + + const store = { clear: vi.fn(), set: vi.fn() }; + const parseData = vi.fn(async ({ data }) => data); + + const loader = atProtoStaticLoader({ + source: { + repo: "static.example.com", + collection: "site.standard.document", + }, + transform: ({ value, rkey, repo }) => ({ + id: `${repo.did}/${rkey}`, + data: { title: String(value.title), repo: repo.handle ?? repo.did }, + body: String(value.body), + }), + }); + + await loader.load({ + store, + parseData, + } as unknown as Parameters[0]); + + expect(store.clear).toHaveBeenCalledTimes(1); + expect(parseData).toHaveBeenCalledWith({ + id: "did:plc:staticrepo/doc-1", + data: { title: "Static doc", repo: "static.example.com" }, + filePath: undefined, + }); + expect(store.set).toHaveBeenCalledWith({ + id: "did:plc:staticrepo/doc-1", + data: { title: "Static doc", repo: "static.example.com" }, + body: "Hello from Astro", + filePath: undefined, + }); + }); + + test("defaults to passthrough entries for a single source when transform is omitted", async () => { + server.use( + ...mockRepoIdentity({ + did: "did:plc:passthrough-static", + pds: PDS, + handle: "passthrough.example.com", + }), + mockListRecords({ + pds: PDS, + repo: "passthrough.example.com", + collection: "place.stream.livestream", + pages: [ + [ + { + did: "did:plc:passthrough-static", + rkey: "stream-1", + value: { + title: "Coworking stream", + createdAt: "2026-04-04T00:30:21Z", + }, + }, + ], + ], + }), + ); + + const { atProtoStaticLoader } = await importLoader(); + + const store = { clear: vi.fn(), set: vi.fn() }; + const parseData = vi.fn(async ({ data }) => data); + + const loader = atProtoStaticLoader< + readonly [{ repo: string; collection: string }], + { + title: string; + createdAt: string; + } + >({ + source: { + repo: "passthrough.example.com", + collection: "place.stream.livestream", + }, + }); + + await loader.load({ + store, + parseData, + } as unknown as Parameters[0]); + + expect(parseData).toHaveBeenCalledWith({ + id: "stream-1", + data: { + title: "Coworking stream", + createdAt: "2026-04-04T00:30:21Z", + }, + filePath: undefined, + }); + expect(store.set).toHaveBeenCalledWith({ + id: "stream-1", + data: { + title: "Coworking stream", + createdAt: "2026-04-04T00:30:21Z", + }, + body: undefined, + filePath: undefined, + }); + }); + + test("surfaces schema parse failures from parseData", async () => { + server.use( + ...mockRepoIdentity({ + did: "did:plc:staticrepo", + pds: PDS, + handle: "static.example.com", + }), + mockListRecords({ + pds: PDS, + repo: "static.example.com", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:staticrepo", + rkey: "doc-1", + value: { title: "Static doc" }, + }, + ], + ], + }), + ); + + const { atProtoStaticLoader } = await importLoader(); + + const store = { clear: vi.fn(), set: vi.fn() }; + const parseError = new Error( + "Schema parse failed for did:plc:staticrepo/doc-1", + ); + const parseData = vi.fn(async () => { + throw parseError; + }); + + const loader = atProtoStaticLoader({ + source: { + repo: "static.example.com", + collection: "site.standard.document", + }, + transform: ({ value, rkey, repo }) => ({ + id: `${repo.did}/${rkey}`, + data: { title: String(value.title), repo: repo.handle ?? repo.did }, + }), + }); + + await expect( + loader.load({ + store, + parseData, + } as unknown as Parameters[0]), + ).rejects.toThrow("Schema parse failed for did:plc:staticrepo/doc-1"); + + expect(store.clear).toHaveBeenCalledTimes(1); + expect(parseData).toHaveBeenCalledTimes(1); + expect(store.set).not.toHaveBeenCalled(); + }); + + test("supports multiple sources and deduplicates by transformed id", async () => { + const bobatanPds = "https://bobatan-pds.example.test"; + const bobPds = "https://bob-pds.example.test"; + + server.use( + ...mockRepoIdentity({ + did: "did:plc:bobatan", + pds: bobatanPds, + handle: "bobatan.fujocoded.dev", + }), + ...mockRepoIdentity({ + did: "did:plc:bob", + pds: bobPds, + handle: "bob.example.com", + }), + mockListRecords({ + pds: bobatanPds, + repo: "bobatan.fujocoded.dev", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bobatan", + rkey: "alpha", + value: { slug: "shared-post", title: "Older title" }, + }, + ], + ], + }), + mockListRecords({ + pds: bobPds, + repo: "bob.example.com", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bob", + rkey: "beta", + value: { slug: "shared-post", title: "Newer title" }, + }, + ], + ], + }), + ); + + const { atProtoStaticLoader } = await importLoader(); + + const store = { clear: vi.fn(), set: vi.fn() }; + const parseData = vi.fn(async ({ data }) => data); + + const loader = atProtoStaticLoader({ + sources: [ + { repo: "bobatan.fujocoded.dev", collection: "site.standard.document" }, + { repo: "bob.example.com", collection: "site.standard.document" }, + ], + transform: ({ value, repo }) => ({ + id: String(value.slug), + data: { title: String(value.title), repo: repo.handle ?? repo.did }, + }), + }); + + await loader.load({ + store, + parseData, + } as unknown as Parameters[0]); + + expect(store.clear).toHaveBeenCalledTimes(1); + expect(store.set).toHaveBeenCalledTimes(1); + expect(store.set).toHaveBeenCalledWith({ + id: "shared-post", + data: { title: "Newer title", repo: "bob.example.com" }, + body: undefined, + filePath: undefined, + }); + }); + + test("namespaces ids by did/collection when multiple static sources omit transform", async () => { + const bobatanPds = "https://bobatan-pds.example.test"; + const bobPds = "https://bob-pds.example.test"; + + server.use( + ...mockRepoIdentity({ + did: "did:plc:bobatan", + pds: bobatanPds, + handle: "bobatan.fujocoded.dev", + }), + ...mockRepoIdentity({ + did: "did:plc:bob", + pds: bobPds, + handle: "bob.example.com", + }), + mockListRecords({ + pds: bobatanPds, + repo: "bobatan.fujocoded.dev", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bobatan", + rkey: "shared", + value: { title: "Bobatan doc" }, + }, + ], + ], + }), + mockListRecords({ + pds: bobPds, + repo: "bob.example.com", + collection: "site.standard.document", + pages: [ + [ + { + did: "did:plc:bob", + rkey: "shared", + value: { title: "Bob doc" }, + }, + ], + ], + }), + ); + + const { atProtoStaticLoader } = await importLoader(); + + const store = { clear: vi.fn(), set: vi.fn() }; + const parseData = vi.fn(async ({ data }) => data); + + const loader = atProtoStaticLoader({ + sources: [ + { repo: "bobatan.fujocoded.dev", collection: "site.standard.document" }, + { repo: "bob.example.com", collection: "site.standard.document" }, + ], + }); + + await loader.load({ + store, + parseData, + } as unknown as Parameters[0]); + + expect(store.set).toHaveBeenCalledTimes(2); + expect(store.set).toHaveBeenNthCalledWith(1, { + id: "did:plc:bobatan/site.standard.document/shared", + data: { title: "Bobatan doc" }, + body: undefined, + filePath: undefined, + }); + expect(store.set).toHaveBeenNthCalledWith(2, { + id: "did:plc:bob/site.standard.document/shared", + data: { title: "Bob doc" }, + body: undefined, + filePath: undefined, + }); + }); +}); diff --git a/astro-atproto-loader/__tests__/msw/handlers.ts b/astro-atproto-loader/__tests__/msw/handlers.ts new file mode 100644 index 0000000..0926d1c --- /dev/null +++ b/astro-atproto-loader/__tests__/msw/handlers.ts @@ -0,0 +1,152 @@ +import { P256Keypair } from "@atproto/crypto"; +import { http, HttpResponse, type HttpHandler } from "msw"; + +// Valid CIDv1 that passes `multiformats/cid`'s `CID.parse`. The lexicon +// validator rejects responses that lack a parseable cid, so every fake +// record we serve needs one. +export const FAKE_CID = + "bafyreidfayvfuwqa7qlnopdjiqrxzs6blmoeu4rujcjtnci5beludirz2a"; + +// `@atproto/identity` insists on a well-formed DID document — signingKey + +// handle + pds all required — even though the loader only cares about pds. +// Generate one real P-256 multibase key at module load and reuse it. +const keypair = await P256Keypair.create(); +const SIGNING_KEY_MULTIBASE = keypair.did().slice("did:key:".length); + +export type FakeRecord = { + did: string; + rkey: string; + value: Record; + cid?: string; +}; + +export type RepoIdentity = { + did: string; + pds: string; + handle?: string; +}; + +export const mockRepoIdentity = ({ + did, + pds, + handle, +}: RepoIdentity): HttpHandler[] => { + const advertisedHandle = handle ?? `${did.split(":").pop()}.example.test`; + + const handlers: HttpHandler[] = [ + http.get(`https://plc.directory/${encodeURIComponent(did)}`, () => + HttpResponse.json({ + id: did, + alsoKnownAs: [`at://${advertisedHandle}`], + verificationMethod: [ + { + id: `${did}#atproto`, + type: "Multikey", + controller: did, + publicKeyMultibase: SIGNING_KEY_MULTIBASE, + }, + ], + service: [ + { + id: "#atproto_pds", + type: "AtprotoPersonalDataServer", + serviceEndpoint: pds, + }, + ], + }), + ), + ]; + + if (handle) { + handlers.push( + http.get(`https://${handle}/.well-known/atproto-did`, () => + HttpResponse.text(did), + ), + ); + } + + return handlers; +}; + +export type MockListRecordsConfig = { + pds: string; + repo: string; + collection: string; + pages: FakeRecord[][]; + onCall?: (cursor: string | null) => void; +}; + +export const mockListRecords = ({ + pds, + repo, + collection, + pages, + onCall, +}: MockListRecordsConfig): HttpHandler => + http.get(`${pds}/xrpc/com.atproto.repo.listRecords`, ({ request }) => { + const url = new URL(request.url); + const queryRepo = url.searchParams.get("repo"); + const queryCollection = url.searchParams.get("collection"); + + if (queryRepo !== repo || queryCollection !== collection) { + return new HttpResponse( + JSON.stringify({ + error: "InvalidRequest", + message: `No fake records registered for repo=${queryRepo} collection=${queryCollection}`, + }), + { status: 400, headers: { "content-type": "application/json" } }, + ); + } + + const cursor = url.searchParams.get("cursor"); + onCall?.(cursor); + const pageIndex = cursor ? Number.parseInt(cursor, 10) : 0; + const page = pages[pageIndex] ?? []; + const hasNext = pageIndex + 1 < pages.length; + + return HttpResponse.json({ + records: page.map((record) => ({ + uri: `at://${record.did}/${collection}/${record.rkey}`, + cid: record.cid ?? FAKE_CID, + value: record.value, + })), + cursor: hasNext ? String(pageIndex + 1) : undefined, + }); + }); + +export type MockGetRecordConfig = { + pds: string; + repo: string; + collection: string; + record: FakeRecord; +}; + +export const mockGetRecord = ({ + pds, + repo, + collection, + record, +}: MockGetRecordConfig): HttpHandler => + http.get(`${pds}/xrpc/com.atproto.repo.getRecord`, ({ request }) => { + const url = new URL(request.url); + if ( + url.searchParams.get("repo") !== repo || + url.searchParams.get("collection") !== collection || + url.searchParams.get("rkey") !== record.rkey + ) { + return new HttpResponse(JSON.stringify({ error: "RecordNotFound" }), { + status: 404, + headers: { "content-type": "application/json" }, + }); + } + return HttpResponse.json({ + uri: `at://${record.did}/${collection}/${record.rkey}`, + cid: record.cid, + value: record.value, + }); + }); + +export const failingGetRecord = (pds: string): HttpHandler => + http.get(`${pds}/xrpc/com.atproto.repo.getRecord`, () => + HttpResponse.error(), + ); diff --git a/astro-atproto-loader/__tests__/msw/server.ts b/astro-atproto-loader/__tests__/msw/server.ts new file mode 100644 index 0000000..bd0bda5 --- /dev/null +++ b/astro-atproto-loader/__tests__/msw/server.ts @@ -0,0 +1,3 @@ +import { setupServer } from "msw/node"; + +export const server = setupServer(); diff --git a/astro-atproto-loader/__tests__/setup.ts b/astro-atproto-loader/__tests__/setup.ts new file mode 100644 index 0000000..0f465ac --- /dev/null +++ b/astro-atproto-loader/__tests__/setup.ts @@ -0,0 +1,22 @@ +import { afterAll, afterEach, beforeAll, vi } from "vitest"; +import { server } from "./msw/server.ts"; + +vi.mock("node:dns/promises", () => { + const fail = async () => { + throw Object.assign(new Error("ENODATA (test stub)"), { code: "ENODATA" }); + }; + return { + default: { + resolveTxt: fail, + lookup: fail, + Resolver: class { + setServers() {} + resolveTxt = fail; + }, + }, + }; +}); + +beforeAll(() => server.listen({ onUnhandledRequest: "error" })); +afterEach(() => server.resetHandlers()); +afterAll(() => server.close()); diff --git a/astro-atproto-loader/package.json b/astro-atproto-loader/package.json index dc952c6..9f36d76 100644 --- a/astro-atproto-loader/package.json +++ b/astro-atproto-loader/package.json @@ -44,7 +44,9 @@ "build": "tsdown", "dev": "tsdown --watch src/", "validate": "npx publint", - "typecheck": "tsc --noEmit" + "typecheck": "tsc --noEmit", + "test": "vitest run", + "test:watch": "vitest" }, "dependencies": { "@atproto/api": "^0.17.3", diff --git a/astro-atproto-loader/vitest.config.ts b/astro-atproto-loader/vitest.config.ts new file mode 100644 index 0000000..1d951d7 --- /dev/null +++ b/astro-atproto-loader/vitest.config.ts @@ -0,0 +1,7 @@ +import { defineConfig } from "vitest/config"; + +export default defineConfig({ + test: { + setupFiles: ["./__tests__/setup.ts"], + }, +}); diff --git a/astro-authproto/README.md b/astro-authproto/README.md index cda1f3d..8b11d1b 100644 --- a/astro-authproto/README.md +++ b/astro-authproto/README.md @@ -112,21 +112,33 @@ npm add @fujocoded/authproto 2. Add the integration to your `astro.config.mjs` file, like this: -```js +````js import { defineConfig } from "astro/config"; import node from "@astrojs/node"; -+ import authproto from "@fujocoded/authproto"; +import authproto from "@fujocoded/authproto"; export default defineConfig({ - output: "server", // you can read up more how this works here: https://docs.astro.build/en/guides/on-demand-rendering/ + output: "server", // auth state is only available on dynamically rendered pages adapter: node({ mode: "standalone" }), // ... or whichever adapter you're using! -+ integrations: [ -+ authproto({ -+ // config options here -+ }), -+ ], + // ATproto requires the server to be on a "loopback" address instead of + // simple localhost. If you don't know what this means, + // don't worry about it! Just set `host: true` :) + host: true, + integrations: [ + authproto({ + // The name of your app, shown on the login screen. + applicationName: "My super cool guestbook", + // The URL your site is (or will be) available at. + // Ignored during development. + applicationDomain: "https:// my-guestbook.fujocoded.com", + }), + ], }); -``` + +This confinguration is enough to develop your authenticated +website on your machine. Before putting your site online, +see [Shipping it](#shipping-it-going-to-production-that-is) for things to +pay attention to. > [!TIP] > @@ -145,7 +157,7 @@ import { Login } from "@fujocoded/authproto/components"; --- -``` +```` It'll look like a plain form: @@ -195,8 +207,9 @@ These settings go inside the `authproto({ ... })` call in your - `applicationName`, required. The name of your application. For example, you can set this to `"My personal guestbook"`! -- `applicationDomain`, required. It should be a domain that your site is on, or - you can just put in `"localhost:4321"` for now. +- `applicationDomain`, required. The full URL where your site is (or will be) + available in production, including the scheme. For example: + `"https://example.com"`. - `defaultDevUser`, optional. A handle that gets pre-filled into the [login form](#customizing-the-login-form) while you're developing your site locally (never in production). Saves you from re-typing your handle every @@ -250,6 +263,114 @@ You can change how `` looks and behaves by passing it these options: /> ``` +# Shipping it (going to production, that is) + +Before putting your site online, there are a few things to make sure of: + +1. **Your site uses [on-demand rendering](https://docs.astro.build/en/guides/on-demand-rendering/).** + Authproto needs `output: "server"` and a server adapter (like + `@astrojs/node`) so login state can be read on every request. +2. **You've set up durable session storage** for both Astro's `session.driver` + and Authproto's `driver`. See [Storing authentication data](#storing-authentication-data) + and below for the options. + +Both default to in-memory storage, which means everything they hold lives +only in your server's RAM. Restarting or redeploying your site wipes it +and logs everyone out. That's fine for local dev, and you can keep it in +production too if you don't mind your visitors having to log back in every +time you ship. + +## Storing authentication data + +By default, Astro's `session.driver` and AuthProto's `driver` store their data +in memory—that is, on your computer or server's RAM. This means that restarting +or redeploying your site wipes it and logs everyone out. + +This is ok for development...and for production, if you don't mind your users +being forced to log back in every time you ship a new versions. + +### With Astro DB + +The simplest durable setup uses [Astro's DB integration](https://docs.astro.build/en/guides/integrations-guide/db/) +for both Astro's session and Authproto's store: + +```js +// astro.config.mjs +import { defineConfig } from "astro/config"; +import node from "@astrojs/node"; +import db from "@astrojs/db"; +import authproto from "@fujocoded/authproto"; + +export default defineConfig({ + output: "server", + adapter: node({ mode: "standalone" }), + session: { + driver: "db", + }, + integrations: [ + db(), + authproto({ + applicationName: "My super cool guestbook", + applicationDomain: "https:// my-guestbook.fujocoded.com", + driver: { name: "astro:db" }, + }), + ], +}); +``` + +> [!TIP] +> +> If you don't already have somewhere to host the database, [Turso](https://turso.tech/) +> is the host Astro DB recommends, and it has a free tier you can start with. + +### Without a database + +If you can't run a classic database, you can: + +- pick a different driver from + [Astro's session driver list](https://docs.astro.build/en/reference/configuration-reference/#sessiondriver). + On serverless or edge hosting, options like Redis, Upstash, or Cloudflare + KV all work. Set the same kind of driver on both `session.driver` and + Authproto's `driver`. + +You can also leave both on the default `memory` driver if you're OK with +visitors having to log back in every time the server restarts. Nothing +else changes. + +### With a file on disk + +A file-based driver (like `fs`) writes Authproto's store directly to disk. +The file holds everything Authproto needs to keep your visitors logged in, +so treat it like a credentials file: + +- Set `options.base` to a path you control (don't rely on the driver's + default location) +- Add that path to your `.gitignore` +- Make sure only the server can read it on the host + +At startup, Authproto runs `git check-ignore` against your `base` path and +warns if it isn't covered by `.gitignore`, so you'll hear about it before +you commit anything. + +## If something may have gone wrong + +If you believe something that shouldn't bee has been expose, or you just want to +invalidate every active session, you should clear out Authproto's store: + +- **Astro DB:** drop all rows in the table Authproto created. +- **KV / Redis / file driver / etc.:** delete the keys (or file) Authproto + wrote. Each driver's docs cover how. +- **Memory store:** close the app. + +Once the store is empty, anyone who was signed in goes back through the +regular login flow the next time they visit. + +> [!NOTE] +> +> You may see a warning about a missing "lock mechanism" in your logs. +> You can ignore it unless you're running multiple server instances. If +> you are, you'll know how to handle it. + # Support Us You can check out more of our plugins here: diff --git a/astro-authproto/__examples__/02-read-bsky-profile/README.md b/astro-authproto/__examples__/02-read-bsky-profile/README.md index 6b418e4..79819ef 100644 --- a/astro-authproto/__examples__/02-read-bsky-profile/README.md +++ b/astro-authproto/__examples__/02-read-bsky-profile/README.md @@ -1,8 +1,3 @@ -> TODO: should call out somewhere VERY prominent that people should never set "fs" as a -> driver and commit the resulting folder to git: that's your actual account keys and it'll -> make it possible for people to login to your account (at least until they expire). -> Could consider adding a big warning in the terminal when people do (maybe file as an issue?) - Extension of the `login-logout` example. If nobody is logged in, shows the Bluesky profile data for @fujocoded. diff --git a/astro-authproto/__examples__/02-read-bsky-profile/src/pages/index.astro b/astro-authproto/__examples__/02-read-bsky-profile/src/pages/index.astro index 350f110..fa146a0 100644 --- a/astro-authproto/__examples__/02-read-bsky-profile/src/pages/index.astro +++ b/astro-authproto/__examples__/02-read-bsky-profile/src/pages/index.astro @@ -1,7 +1,7 @@ --- import { Image } from "astro:assets"; import { Login } from "@fujocoded/authproto/components"; -import { AtpBaseClient } from "@atproto/api"; +import { getBlueskyAgent } from "@fujocoded/authproto/helpers"; import "../styles/bsky_table.css"; const FUJOCODED_HANDLE = "fujocoded.bsky.social"; @@ -12,7 +12,12 @@ const display_user = !user ? { did: FUJOCODED_DID, handle: FUJOCODED_HANDLE } : user; -const agent = new AtpBaseClient("https://public.api.bsky.app"); +const agent = await getBlueskyAgent(user ? { loggedInUser: user } : undefined); + +if (!agent) { + throw new Error("Could not create a Bluesky agent."); +} + const display_profile = await agent.app.bsky.actor.getProfile({ actor: display_user.did, }); @@ -50,7 +55,7 @@ const display_profile = await agent.app.bsky.actor.getProfile({ src={display_profile.data.avatar} width="50" height="50" - alt="{display_profile.data.displayName}'s avatar." + alt={`${display_profile.data.displayName}'s avatar.`} /> diff --git a/astro-authproto/src/config-module.d.ts b/astro-authproto/src/config-module.d.ts index f35344f..19a56b6 100644 --- a/astro-authproto/src/config-module.d.ts +++ b/astro-authproto/src/config-module.d.ts @@ -26,6 +26,8 @@ declare module "fujocoded:authproto/config" { export const redirectAfterLogin: string; export const redirectAfterLogout: string; export const clientMetadataDomain: string; + export const isDev: boolean; + export const isDevServerHostSet: boolean; } declare module "fujocoded:authproto/stores" { diff --git a/astro-authproto/src/index.ts b/astro-authproto/src/index.ts index e105b4f..46bb6b3 100644 --- a/astro-authproto/src/index.ts +++ b/astro-authproto/src/index.ts @@ -1,5 +1,7 @@ import type { AstroIntegration, InjectedRoute } from "astro"; import path from "node:path"; +import { fileURLToPath } from "node:url"; +import { execFileSync } from "node:child_process"; import { addVirtualImports } from "astro-integration-kit"; import { getConfig, @@ -8,6 +10,35 @@ import { } from "./lib/config.js"; import { readFile } from "node:fs/promises"; +/** + * Returns whether `git check-ignore` thinks the given absolute path is covered + * by the project's gitignore rules. Returns `null` when git can't answer (no + * git binary, not a git repo, etc.) so callers can fall back to a generic + * message instead of pretending they know. + */ +const checkPathGitignored = ( + absolutePath: string, + cwd: string, +): boolean | null => { + try { + execFileSync("git", ["check-ignore", "-q", "--", absolutePath], { + cwd, + stdio: "ignore", + }); + return true; + } catch (e: unknown) { + if ( + typeof e === "object" && + e !== null && + "status" in e && + (e as { status?: number }).status === 1 + ) { + return false; + } + return null; + } +}; + export const LOGGED_IN_DID_TEMPLATE = "{loggedInUser.did}"; export const LOGGED_IN_HANDLE_TEMPLATE = "{loggedInUser.handle}"; export const REDIRECT_TO_REFERER_TEMPLATE = "{referer}"; @@ -67,7 +98,7 @@ export default ( name: "fujocoded:authproto", hooks: { "astro:config:setup": (setupParams) => { - const { injectRoute, addMiddleware } = setupParams; + const { injectRoute, addMiddleware, config } = setupParams; addOAuthRoutes(injectRoute); addAtProtoRoutes(injectRoute); @@ -81,6 +112,8 @@ export default ( content: getConfig({ options: configOptions, isDev: process.env.NODE_ENV === "development", + devPort: config.server?.port, + devServerHost: config.server?.host, }), context: "server", }, @@ -128,6 +161,37 @@ export default ( `Your Astro output config is "static". The login status is only available on dynamically rendered pages.`, ); } + if ( + configOptions.driver?.name === "fs" || + configOptions.driver?.name === "fs-lite" + ) { + const driverName = configOptions.driver.name; + const driverBase = ( + configOptions.driver.options as { base?: string } | undefined + )?.base; + const projectRoot = fileURLToPath(config.root); + + if (driverBase) { + const absoluteBase = path.isAbsolute(driverBase) + ? driverBase + : path.resolve(projectRoot, driverBase); + const ignored = checkPathGitignored(absoluteBase, projectRoot); + if (ignored === false) { + logger.warn( + `Authproto's "${driverName}" driver writes session data to "${driverBase}", which is NOT covered by your .gitignore. Add it before committing or you'll leak credentials. Also make sure only the server can read it on the host.`, + ); + } else if (ignored === null) { + logger.warn( + `Authproto's "${driverName}" driver writes session data to "${driverBase}". Couldn't verify gitignore status (no git, or not a git repo). Make sure that location is gitignored and only the server can read it.`, + ); + } + // ignored === true: silent. The path is gitignored, nothing to flag. + } else { + logger.warn( + `Authproto's "${driverName}" driver writes session data to disk. Set "options.base" to a path you control, add it to .gitignore, and make sure only the server can read it.`, + ); + } + } injectTypes({ filename: "types.d.ts", content: await readFile( diff --git a/astro-authproto/src/lib/config.ts b/astro-authproto/src/lib/config.ts index 894e2bc..ffa8e5b 100644 --- a/astro-authproto/src/lib/config.ts +++ b/astro-authproto/src/lib/config.ts @@ -73,10 +73,15 @@ export const getStoresImport = (driverName?: string) => { export const getConfig = ({ options, isDev, + devPort, + devServerHost, }: { options: ConfigOptions; isDev: boolean; + devPort?: number; + devServerHost?: string | boolean; }) => { + const isDevServerHostSet = Boolean(devServerHost); const finalDriver = options.driver ?? { name: "memory", options: undefined, @@ -84,7 +89,9 @@ export const getConfig = ({ const externalDomain = options.externalDomain ?? - (isDev ? "http://127.0.0.1:4321/" : options.applicationDomain); + (isDev + ? `http://127.0.0.1:${devPort ?? 4321}/` + : options.applicationDomain); const scopes: OAuthScope[] = ["atproto"]; if (Array.isArray(options.scopes)) { @@ -131,5 +138,7 @@ export const getConfig = ({ export const redirectAfterLogout = ${JSON.stringify(options.redirects?.afterLogout ?? "/")}; export const externalDomain = ${JSON.stringify(externalDomain)}; export const clientMetadataDomain = process.env.AUTHPROTO_EXTERNAL_DOMAIN ?? ${JSON.stringify(externalDomain)} ?? "${options.applicationDomain}"; + export const isDev = ${JSON.stringify(isDev)}; + export const isDevServerHostSet = ${JSON.stringify(isDevServerHostSet)}; `; }; diff --git a/astro-authproto/src/routes/oauth/callback.ts b/astro-authproto/src/routes/oauth/callback.ts index 7322e5f..d8cbe8c 100644 --- a/astro-authproto/src/routes/oauth/callback.ts +++ b/astro-authproto/src/routes/oauth/callback.ts @@ -12,6 +12,7 @@ export const GET: APIRoute = async ({ request, redirect, session }) => { const requestUrl = new URL(request.url); let oauthSession: OAuthSession | null; + let oauthState: string | null = null; let error = requestUrl.searchParams.get("error"); // This falls back to undefined so it will be compatible with the session // storage signature if not present. @@ -20,6 +21,7 @@ export const GET: APIRoute = async ({ request, redirect, session }) => { try { const clientCallback = await oauthClient.callback(requestUrl.searchParams); oauthSession = clientCallback.session; + oauthState = clientCallback.state; session?.set("atproto-did", oauthSession.did); } catch (e) { // If there is an error during session restoration then it takes precedence @@ -35,16 +37,17 @@ export const GET: APIRoute = async ({ request, redirect, session }) => { session?.set(AUTHPROTO_ERROR_DESCRIPTION, errorDescription); } - // Check if a custom redirect or referer was passed in the state - // Note: CSRF validation is already handled by oauthClient.callback() above, - // so it's safe to fall back to default redirect if state parsing fails here + // The `state` value in the URL is NOT the state we sent during login: the + // OAuth client swaps it for its own internal id. Our original state comes + // back as `clientCallback.state`, so that's what we read. + // CSRF was already validated by oauthClient.callback() above, so if parsing + // fails here it's safe to fall back to the default redirect. let customRedirect: string | undefined; let referer: string | undefined; - const stateParam = requestUrl.searchParams.get("state"); - if (stateParam) { + if (oauthState) { try { const stateData = JSON.parse( - Buffer.from(stateParam, "base64url").toString(), + Buffer.from(oauthState, "base64url").toString(), ); customRedirect = stateData.redirect; referer = stateData.referer; diff --git a/astro-authproto/src/routes/oauth/login.ts b/astro-authproto/src/routes/oauth/login.ts index 9c3145b..c5a726d 100644 --- a/astro-authproto/src/routes/oauth/login.ts +++ b/astro-authproto/src/routes/oauth/login.ts @@ -1,13 +1,29 @@ import type { APIRoute } from "astro"; import { extractAuthError, oauthClient } from "../../lib/auth.js"; -import { scopes } from "fujocoded:authproto/config"; +import { scopes, isDev, isDevServerHostSet } from "fujocoded:authproto/config"; import { randomBytes } from "node:crypto"; import { AUTHPROTO_ERROR_CODE, AUTHPROTO_ERROR_DESCRIPTION, } from "../../../src/routes/middleware.ts"; +const DEV_HOST_WARNING = [ + "", + " ATproto OAuth needs your dev server to bind to 127.0.0.1, but it", + " isn't. Login will fail with a redirect URI error until you fix this.", + "", + " Pick one:", + " • Run: astro dev --host", + " • Or set in astro.config.mjs:", + " server: { host: true }", + "", +].join("\n"); + export const POST: APIRoute = async ({ redirect, request, session }) => { + if (isDev && !isDevServerHostSet) { + console.error(DEV_HOST_WARNING); + } + const body = await request.formData(); const atprotoIdValue = body.get("atproto-id"); const atprotoId = diff --git a/atproto-badges/README.md b/atproto-badges/README.md new file mode 100644 index 0000000..ca8957a --- /dev/null +++ b/atproto-badges/README.md @@ -0,0 +1,291 @@ +# `@fujocoded/atproto-badges` + +Signed badges on ATproto. Officially Certify™ whatever your heart desires: +events, communities, inside-jokes, friends, and anything in-between! + + + +
+ + NPM license FujoCoded badge NPM version badge Open in GitHub Codespaces + +
+ +## What is `@fujocoded/atproto-badges`? + +`@fujocoded/atproto-badges` lets you create and sign badges on ATProto. You +define a badge (like "Yuletide 2026 Writer" or "ATmosphereConf 2026 attendee"), +sign it with your secret key, and write it to the recipient's PDS, where anyone can +verify it came from you. + +Under the hood, it handles the cryptographic attestation (DAG-CBOR hashing, +P-256 signing, PLC document updates) so you can focus on when and why to award +badges, not how the signatures work. + +## What's included in `@fujocoded/atproto-badges`? + +In this package, you'll find utilities to manage: + +- **Key management** + - `generateSigningKeys` creates a new key pair for signing badges + - `loadSigningKey` loads a previously saved key so you can sign with it again +- **Badge definitions** + - `createBadgeDefinition` creates a new badge type on your PDS + - `findExistingBadgeDefinition` checks if a badge type already exists, so you + don't create duplicates +- **Badge awards** + - `createBadgeAwardRecord` builds a signed badge award, ready to write to a + recipient's PDS + - `getExistingBadgeAward` checks if someone already has a particular badge + - `getBadgeRkey` gives you a deterministic record key, so concurrent requests + don't create duplicate awards +- **PLC updates** + - `addAttestationVerificationMethod` publishes your public key to your DID + document, so others can verify your signatures +- **Verification** + - `verifyBadgeAward` checks whether a badge award's signature is legit — looks + up the issuer's DID document and verifies the cryptographic signature +- **Lower-level signing** (if you're building something custom) + - `createRecordSignature` signs any ATProto record, not just badges + - `getRecordHash` computes the hash that gets signed — useful for verification + or multi-signer workflows + +## What can you do with `@fujocoded/atproto-badges`? + +- **Award participation badges for events & exchanges:** give artists, writers, + and other participants a verifiable badge that lives in their ATProto account— + whether you're hosting Yuletide or a smaller shipping week +- **Recognize contributors:** zine participants, community moderators, event + volunteers, code contributors...whatever you want to celebrate, put a badge on + it! +- **Verify badges:** — use `verifyBadgeAward` to confirm a badge is legit by + checking the issuer's signature against their published key +- **Build tools to mint and manage badges:** these bad ~~boy~~badges can fit so + many use cases within them! + +## Installation + +```bash +npm add @fujocoded/atproto-badges +``` + +## Getting started + +Here's the typical flow, from setup to awarding your first badge. + +At high level: + +1. Generate your secret key to sign badges with and store them safely! +2. Publish your public key on your Identity Document™ +3. Create a badge definition in your PDS... +4. ...then award it someone, with a signed copy in their PDS! + +### 1. Generate your signing key + +This generates your super ultra mega secret credentials that allow you to sign +badges, pinkie-promising it is indeed you. You only need to do this once! + +```ts +import fs from "node:fs"; +import { generateSigningKeys } from "@fujocoded/atproto-badges"; + +const keys = await generateSigningKeys(); + +// If you want, you can save them to files + +// This will be BADGE_PRIVATE_KEY in your secrets +fs.writeFileSync("./private.key", keys.privateKeyBase64url); +// This is your public key, for step 2 +fs.writeFileSync("./public.key.txt", keys.publicDidKey); +``` + +> [!IMPORTANT] +> +> You must keep the private key somewhere safe and never show it to anyone. If +> you lose it, you won't be able to sign with that key anymore; if someone steals +> it, they'll be able to sign as you. +> +> When using private keys in your programs, make sure to use environment variables +> rather than hardcoding them. Never commit them to Git. + +### 2. Publish your public key + +To let everyone know you're the one whose secret key has been going around +signing badges left and right, you must first upload the corresponding public +key to your DID document—that is, to your ATproto "id card". + +This command will send a verification email, and update your PLC +document with the content of the key: + +```ts +import { addAttestationVerificationMethod } from "@fujocoded/atproto-badges"; + +// Your DID, in this case the "yuletide exchange" +const exchangeDid = "did:plc:yuletide"; + +// First, trigger the verification email: +await agent.com.atproto.identity.requestPlcOperationSignature(); + +// Then, once you have the token from the email: +await addAttestationVerificationMethod({ + // Check out @fujocoded/authproto if you have an + // Astro site! + agent, + // Your identity + did: exchangeDid, + // Your public key + publicDidKey: keys.publicDidKey, + token, +}); +``` + +### 3. Create a badge definition + +A badge definition describes what the badge is. You create it once, put it on +your PDS, then reference it every time you award it: + +```ts +import { + createBadgeDefinition, + findExistingBadgeDefinition, +} from "@fujocoded/atproto-badges"; + +// Check if it already exists first! +const existing = await findExistingBadgeDefinition({ + agent, + did: exchangeDid, + name: "Yuletide 2026 Writer", +}); + +if (existing) { + return "don't be greedy!"; +} + +const badgeDefinition = await createBadgeDefinition({ + agent, + // Badge owner + did: exchangeDid, + // Badge name + name: "Yuletide 2026 Writer", + // Badge description + description: "Completed a gift fic for Yuletide 2026", +}); +``` + +> [!NOTE] +> +> The `agent` used for `putRecord` must be authenticated as the **issuer** of the badge. This establishes the legitimacy of the badge. + +### 4. Award the badge + +```ts +import { + createBadgeAwardRecord, + getExistingBadgeAward, + getBadgeRkey, + loadSigningKey, +} from "@fujocoded/atproto-badges"; + +const participantDid = "did:plc:participant"; + +// Don't award it twice! +const currentAward = await getExistingBadgeAward({ + agent, + did: participantDid, + badgeDefinitionUri: badgeRef.uri, +}); + +if (currentAward) { + return "don't be greedy!"; +} + +// Get your key ready to sign! +const signingKey = await loadSigningKey({ + privateKeyBase64url: process.env.BADGE_PRIVATE_KEY!, +}); + +// "I hereby award you the badge—" +const award = await createBadgeAwardRecord({ + // The badge recipient + recipientDid: participantDid, + // Reference returned by createBadgeDefinition + badgeRef: badgeDefinition, + // Your DID + organizerDid: exchangeDid, + signingKey, +}); + +// Save the badge to the recipients PDS +await agent.com.atproto.repo.putRecord({ + repo: participantDid, + collection: "community.lexicon.badge.award", + rkey: getBadgeRkey({ badgeDefinitionUri: badgeRef.uri }), + record: award, +}); +``` + +> [!Note] +> +> The `agent` used for `putRecord` must be authenticated as the **recipient of +> the badge**, not the issuer. +> +> The recipient claims their badge by writing the issuer-signed badge to their +> own PDS. The issuer never needs write access to the recipient's repo—the +> signature itself proves legitimacy. + +## Good to know + +- Badge **definitions** live in the issuing organization's repo. Badge + **awards** go in the recipient's repo. +- `getExistingBadgeAward` looks up a badge award by definition URI and returns + the full record value. You can check the CID yourself if you need to + distinguish between versions of a badge definition. +- `getBadgeRkey` derives the record key from the badge definition URI. This + means awarding the same badge definition to the same person always targets the + same record (easier to avoid duplicates!). +- This package handles signing and data — you bring your own `AtpAgent`, + authentication, and app logic around it. + +> [!WARNING] +> +> All parameters to `createBadgeAwardRecord` must be defined — passing +> `undefined` for any field (e.g. an unset env var for `organizerDid`) will +> throw with a message like `Cannot CBOR-encode record: field "organizerDid" is +undefined`. ATProto records are CBOR-encoded, and CBOR has no concept of +> `undefined`. + +## Based on + +The attestation signing in this package is based on the +[`atproto-attestation`](https://tangled.org/@smokesignal.events/atproto-identity-rs) +Rust crate by [smokesignal.events](https://tangled.org/@smokesignal.events/). If you're looking for a full Rust +implementation (including CLI tools for signing and verifying attestations), +check that out! + +# Support Us + +You can check out more of our plugins here: + +- [Authproto](https://github.com/FujoWebDev/fujocoded-plugins/tree/main/astro-authproto) +- [Socials plugin](https://github.com/FujoWebDev/fujocoded-plugins/tree/main/zod-transform-socials) +- [Alt text files plugin](https://github.com/FujoWebDev/fujocoded-plugins/tree/main/remark-alt-text-files) + +You can also become a patron or buy some merch: + +- [Monthly Support](https://fujocoded.com/support) +- [Merch Shop](https://store.fujocoded.com/) +- [RobinBoob](https://www.robinboob.com/) + +# Follow Us + +

diff --git a/atproto-badges/package.json b/atproto-badges/package.json new file mode 100644 index 0000000..901b708 --- /dev/null +++ b/atproto-badges/package.json @@ -0,0 +1,56 @@ +{ + "name": "@fujocoded/atproto-badges", + "version": "0.1.0", + "description": "ATProto badge attestation utilities", + "keywords": [ + "atproto", + "badge" + ], + "homepage": "https://github.com/FujoWebDev/fujocoded-plugins#readme", + "bugs": { + "url": "https://github.com/FujoWebDev/fujocoded-plugins/issues" + }, + "license": "MIT", + "author": "FujoCoded LLC", + "repository": { + "type": "git", + "url": "git+https://github.com/FujoWebDev/fujocoded-plugins.git" + }, + "files": [ + "dist", + "LICENSE", + "README.md", + "package.json" + ], + "type": "module", + "main": "dist/index.js", + "exports": { + ".": { + "import": { + "types": "./dist/index.d.ts", + "default": "./dist/index.js" + } + } + }, + "publishConfig": { + "access": "public" + }, + "scripts": { + "build": "tsdown", + "test": "vitest run", + "validate": "npx publint" + }, + "dependencies": { + "@atproto/api": "^0.17.3", + "@atproto/crypto": "^0.4.3", + "@atproto/identity": "^0.4.3", + "@atproto/xrpc": "^0.7.7", + "@ipld/dag-cbor": "^9.2.2", + "multiformats": "^13.3.1", + "uint8arrays": "^5.1.0" + }, + "devDependencies": { + "tsdown": "^0.17.2", + "vitest": "^3.1.1" + } +} diff --git a/atproto-badges/src/__tests__/badge.test.ts b/atproto-badges/src/__tests__/badge.test.ts new file mode 100644 index 0000000..0eb0712 --- /dev/null +++ b/atproto-badges/src/__tests__/badge.test.ts @@ -0,0 +1,380 @@ +import { describe, it, expect } from "vitest"; +import { fromString } from "uint8arrays"; +import { + generateSigningKeys, + loadSigningKey, + getRecordHash, + createRecordSignature, + createBadgeAwardRecord, + verifyBadgeAward, + getBadgeRkey, +} from "../index.ts"; +import type { BadgeAward } from "../types.ts"; + +const TEST_DID = "did:plc:testissuer"; +const TEST_RECIPIENT = "did:plc:testrecipient"; +const TEST_BADGE_REF = { + uri: "at://did:plc:testissuer/community.lexicon.badge.definition/abc123", + cid: "bafyreifake", +}; + +/** Mock resolver that returns a PLC-format DID doc with the given did:key */ +function mockResolver(publicDidKey: string) { + return async (_did: string) => ({ + verificationMethods: { + attestations: publicDidKey, + }, + }); +} + +describe("generateSigningKeys", () => { + it("returns valid base64url private key + did:key public key", async () => { + const keys = await generateSigningKeys(); + expect(keys.privateKeyBase64url).toMatch(/^[A-Za-z0-9_-]+$/); + expect(keys.publicDidKey).toMatch(/^did:key:z/); + }); +}); + +describe("loadSigningKey", () => { + it("round-trips with generated key", async () => { + const keys = await generateSigningKeys(); + const loaded = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + expect(loaded.did()).toBe(keys.publicDidKey); + }); + + it("throws on invalid input (wrong length)", async () => { + // 16 bytes encoded as base64url + const shortKey = "AAAAAAAAAAAAAAAAAAAAAA"; + await expect( + loadSigningKey({ privateKeyBase64url: shortKey }), + ).rejects.toThrow("expected 32 bytes"); + }); + + it("throws on invalid input (garbage string)", async () => { + await expect( + loadSigningKey({ privateKeyBase64url: "!!not-valid!!" }), + ).rejects.toThrow(); + }); +}); + +describe("getRecordHash", () => { + it("is deterministic (same input = same CID)", async () => { + const record = { $type: "test", value: "hello" }; + const metadata = { + $type: "community.lexicon.attestations.signature" as const, + key: `${TEST_DID}#attestations`, + }; + const cid1 = await getRecordHash({ + record, + metadata, + repositoryDid: TEST_RECIPIENT, + }); + const cid2 = await getRecordHash({ + record, + metadata, + repositoryDid: TEST_RECIPIENT, + }); + expect(cid1.toString()).toBe(cid2.toString()); + }); + + it("throws a clear error on undefined fields", async () => { + const record = { + $type: "test", + badField: undefined, + }; + const metadata = { + $type: "community.lexicon.attestations.signature" as const, + key: `${TEST_DID}#attestations`, + }; + await expect( + getRecordHash({ record, metadata, repositoryDid: TEST_RECIPIENT }), + ).rejects.toThrow('field "badField" is undefined'); + }); + + it("throws on nested undefined fields", async () => { + const record = { + $type: "test", + nested: { good: "value", bad: undefined }, + }; + const metadata = { + $type: "community.lexicon.attestations.signature" as const, + key: `${TEST_DID}#attestations`, + }; + await expect( + getRecordHash({ record, metadata, repositoryDid: TEST_RECIPIENT }), + ).rejects.toThrow('field "nested.bad" is undefined'); + }); +}); + +describe("createRecordSignature", () => { + it("rejects invalid organizerDid", async () => { + const keys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + await expect( + createRecordSignature({ + record: { $type: "test" }, + organizerDid: "not-a-did", + recipientDid: TEST_RECIPIENT, + signingKey, + }), + ).rejects.toThrow("Invalid organizerDid"); + }); +}); + +describe("verifyBadgeAward", () => { + it("sign/verify round-trip", async () => { + const keys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + const award = await createBadgeAwardRecord({ + recipientDid: TEST_RECIPIENT, + badgeRef: TEST_BADGE_REF, + organizerDid: TEST_DID, + signingKey, + }); + const result = await verifyBadgeAward({ + award, + resolveDidDoc: mockResolver(keys.publicDidKey), + }); + expect(result.verified).toBe(true); + if (result.verified) { + expect(result.issuerDid).toBe(TEST_DID); + } + }); + + it("verified:false when record tampered after signing", async () => { + const keys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + const award = await createBadgeAwardRecord({ + recipientDid: TEST_RECIPIENT, + badgeRef: TEST_BADGE_REF, + organizerDid: TEST_DID, + signingKey, + }); + // Tamper with the record + const tampered: BadgeAward = { ...award, did: "did:plc:tampered" }; + const result = await verifyBadgeAward({ + award: tampered, + resolveDidDoc: mockResolver(keys.publicDidKey), + }); + expect(result.verified).toBe(false); + if (!result.verified) { + expect(result.error).toBe("signature_invalid"); + } + }); + + it('error "no_signatures" (empty signatures array)', async () => { + const award: BadgeAward = { + $type: "community.lexicon.badge.award", + did: TEST_RECIPIENT, + badge: TEST_BADGE_REF, + issued: new Date().toISOString(), + signatures: [], + }; + const result = await verifyBadgeAward({ award }); + expect(result).toEqual({ verified: false, error: "no_signatures" }); + }); + + it('error "invalid_key_id" (no sig matching pattern)', async () => { + const award: BadgeAward = { + $type: "community.lexicon.badge.award", + did: TEST_RECIPIENT, + badge: TEST_BADGE_REF, + issued: new Date().toISOString(), + signatures: [ + { + $type: "community.lexicon.attestations.signature", + key: "not-a-did-key", + signature: { $bytes: "AAAA" }, + }, + ], + }; + const result = await verifyBadgeAward({ award }); + expect(result).toEqual({ verified: false, error: "invalid_key_id" }); + }); + + it('error "no_attestation_key" (DID doc missing key)', async () => { + const keys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + const award = await createBadgeAwardRecord({ + recipientDid: TEST_RECIPIENT, + badgeRef: TEST_BADGE_REF, + organizerDid: TEST_DID, + signingKey, + }); + // Resolver returns doc without attestation key + const result = await verifyBadgeAward({ + award, + resolveDidDoc: async () => ({ verificationMethods: {} }), + }); + expect(result.verified).toBe(false); + if (!result.verified) { + expect(result.error).toBe("no_attestation_key"); + } + }); + + it('error "plc_fetch_failed" (resolver throws)', async () => { + const keys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + const award = await createBadgeAwardRecord({ + recipientDid: TEST_RECIPIENT, + badgeRef: TEST_BADGE_REF, + organizerDid: TEST_DID, + signingKey, + }); + const result = await verifyBadgeAward({ + award, + resolveDidDoc: async () => { + throw new Error("network error"); + }, + }); + expect(result.verified).toBe(false); + if (!result.verified) { + expect(result.error).toBe("plc_fetch_failed"); + expect(result.issuerDid).toBe(TEST_DID); + } + }); + + it('error "signature_invalid" (valid format, wrong key)', async () => { + const issuerKeys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: issuerKeys.privateKeyBase64url, + }); + const award = await createBadgeAwardRecord({ + recipientDid: TEST_RECIPIENT, + badgeRef: TEST_BADGE_REF, + organizerDid: TEST_DID, + signingKey, + }); + // Verify with a different key + const wrongKeys = await generateSigningKeys(); + const result = await verifyBadgeAward({ + award, + resolveDidDoc: mockResolver(wrongKeys.publicDidKey), + }); + expect(result.verified).toBe(false); + if (!result.verified) { + expect(result.error).toBe("signature_invalid"); + } + }); + + it("works with did:web mock resolver", async () => { + const keys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + const webDid = "did:web:example.com"; + const award = await createBadgeAwardRecord({ + recipientDid: TEST_RECIPIENT, + badgeRef: TEST_BADGE_REF, + organizerDid: webDid, + signingKey, + }); + // W3C DID doc format (verificationMethod array) + const result = await verifyBadgeAward({ + award, + resolveDidDoc: async () => ({ + verificationMethod: [ + { + id: "#attestations", + type: "Multikey", + publicKeyMultibase: keys.publicDidKey.replace("did:key:", ""), + }, + ], + }), + }); + expect(result.verified).toBe(true); + if (result.verified) { + expect(result.issuerDid).toBe(webDid); + } + }); + + it("verifies when signature.$bytes is Uint8Array (PDS CBOR round-trip)", async () => { + const keys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + const award = await createBadgeAwardRecord({ + recipientDid: TEST_RECIPIENT, + badgeRef: TEST_BADGE_REF, + organizerDid: TEST_DID, + signingKey, + }); + + // Simulate PDS read-back: the PDS decodes the CBOR bytes tag and + // returns { $bytes: Uint8Array } instead of { $bytes: "base64str" }. + const base64Str = (award.signatures[0]!.signature as { $bytes: string }) + .$bytes; + const decoded: BadgeAward = { + ...award, + signatures: [ + { + ...award.signatures[0]!, + signature: { + $bytes: fromString(base64Str, "base64") as unknown as string, + }, + }, + ], + }; + + const result = await verifyBadgeAward({ + award: decoded, + resolveDidDoc: mockResolver(keys.publicDidKey), + }); + expect(result.verified).toBe(true); + }); + + it("verifies when signature is a bare Uint8Array", async () => { + const keys = await generateSigningKeys(); + const signingKey = await loadSigningKey({ + privateKeyBase64url: keys.privateKeyBase64url, + }); + const award = await createBadgeAwardRecord({ + recipientDid: TEST_RECIPIENT, + badgeRef: TEST_BADGE_REF, + organizerDid: TEST_DID, + signingKey, + }); + + // Simulate a case where signature itself is a raw Uint8Array + const base64Str = (award.signatures[0]!.signature as { $bytes: string }) + .$bytes; + const decoded: BadgeAward = { + ...award, + signatures: [ + { + ...award.signatures[0]!, + signature: fromString(base64Str, "base64") as any, + }, + ], + }; + + const result = await verifyBadgeAward({ + award: decoded, + resolveDidDoc: mockResolver(keys.publicDidKey), + }); + expect(result.verified).toBe(true); + }); +}); + +describe("getBadgeRkey", () => { + it("is deterministic and different URIs produce different rkeys", () => { + const rkey1 = getBadgeRkey({ badgeDefinitionUri: "at://did:plc:a/col/1" }); + const rkey2 = getBadgeRkey({ badgeDefinitionUri: "at://did:plc:a/col/1" }); + const rkey3 = getBadgeRkey({ badgeDefinitionUri: "at://did:plc:a/col/2" }); + expect(rkey1).toBe(rkey2); + expect(rkey1).not.toBe(rkey3); + expect(rkey1).toHaveLength(13); + }); +}); diff --git a/atproto-badges/src/badge.ts b/atproto-badges/src/badge.ts new file mode 100644 index 0000000..af1aece --- /dev/null +++ b/atproto-badges/src/badge.ts @@ -0,0 +1,153 @@ +import type { AtpAgent } from "@atproto/api"; +import { XRPCError } from "@atproto/xrpc"; +import { createHash } from "node:crypto"; + +export const BADGE_COLLECTION = "community.lexicon.badge.award"; +export const BADGE_DEFINITION_COLLECTION = "community.lexicon.badge.definition"; + +/** + * Generate a deterministic record key for a badge award, so that concurrent + * requests to award the same badge don't create duplicates. + * + * When writing a badge award to a PDS, pass the returned rkey as the + * `rkey` parameter of `com.atproto.repo.putRecord`. + * + * Note: the rkey is derived from the badge definition URI only, without CID. + * If a badge definition is updated in place (same URI, new CID), awarding + * the updated badge to the same person will overwrite their existing award + * via `putRecord`. This is intentional: one award per badge per person. + */ +export function getBadgeRkey({ + badgeDefinitionUri, +}: { + badgeDefinitionUri: string; +}): string { + const hash = createHash("sha256") + .update(badgeDefinitionUri) + .digest("base64url"); + // First 13 chars of SHA-256 hash, valid as an ATProto record key. + return hash.slice(0, 13); +} + +/** + * Look up a badge award for a particular badge definition. + * + * Returns the record URI + its full value if found, or `null` if + * they haven't been awarded this badge yet. + * + * The lookup is by badge definition URI only (via `getBadgeRkey`). + * The returned `value` includes the badge ref, issued date, and + * signatures, so you can do your own CID matching or display logic. + * + * Authentication is not required: any agent that can read from the + * recipient's PDS will work. + */ +export async function getExistingBadgeAward({ + agent, + did, + badgeDefinitionUri, +}: { + agent: AtpAgent; + did: string; + badgeDefinitionUri: string; +}): Promise<{ uri: string; value: Record } | null> { + const rkey = getBadgeRkey({ badgeDefinitionUri }); + try { + const { data } = await agent.com.atproto.repo.getRecord({ + repo: did, + collection: BADGE_COLLECTION, + rkey, + }); + return { uri: data.uri, value: data.value as Record }; + } catch (err) { + if (err instanceof XRPCError && err.error === "RecordNotFound") { + return null; + } + throw err; + } +} + +/** + * Check if a badge definition with the given name already exists on + * a PDS. + * + * Returns the definition's `uri` and `cid` if found, or `null` if it + * doesn't exist yet. + * + * Use this before `createBadgeDefinition` to avoid creating duplicates. + * + * Matching is byte-exact: `"Speaker"` and `"Speaker "` are different + * badges, as are `"JS"` and `"js"`. Normalize on the caller side + * (trim/casefold/NFC) if you want fuzzier dedup. + */ +export async function findExistingBadgeDefinition({ + agent, + did, + name, +}: { + agent: AtpAgent; + did: string; + name: string; +}): Promise<{ uri: string; cid: string } | null> { + let cursor: string | undefined; + + do { + const { data } = await agent.com.atproto.repo.listRecords({ + repo: did, + collection: BADGE_DEFINITION_COLLECTION, + limit: 100, + cursor, + }); + + const existing = data.records.find((rec) => { + const value = rec.value as Record; + return value.name === name; + }); + + if (existing) { + return { uri: existing.uri, cid: existing.cid }; + } + + cursor = data.cursor; + } while (cursor); + + return null; +} + +/** + * Create a new badge type on a PDS. + * + * A badge definition describes what the badge is (name + optional + * description). You only need to create it once. + * + * Save the returned `uri` and `cid` and pass them to + * `createBadgeAwardRecord` every time you award this badge to someone. + */ +export async function createBadgeDefinition({ + agent, + did, + name, + description, +}: { + agent: AtpAgent; + did: string; + name: string; + description?: string; +}): Promise<{ uri: string; cid: string }> { + const record: Record = { + $type: BADGE_DEFINITION_COLLECTION, + name, + createdAt: new Date().toISOString(), + }; + if (description) { + record.description = description; + } + + const { data } = await agent.com.atproto.repo.createRecord({ + repo: did, + collection: BADGE_DEFINITION_COLLECTION, + record, + }); + + return { uri: data.uri, cid: data.cid }; +} diff --git a/atproto-badges/src/index.ts b/atproto-badges/src/index.ts new file mode 100644 index 0000000..18d126b --- /dev/null +++ b/atproto-badges/src/index.ts @@ -0,0 +1,30 @@ +export { + getRecordHash, + createRecordSignature, + createBadgeAwardRecord, + verifyBadgeAward, +} from "./signing.ts"; +export type { VerifyResult, VerifySuccess, VerifyFailure } from "./signing.ts"; + +export { generateSigningKeys, loadSigningKey } from "./keys.ts"; + +// Badge operations +export { + getBadgeRkey, + getExistingBadgeAward, + findExistingBadgeDefinition, + createBadgeDefinition, + BADGE_COLLECTION, + BADGE_DEFINITION_COLLECTION, +} from "./badge.ts"; + +export { addAttestationVerificationMethod } from "./plc.ts"; + +export type { + StrongRef, + AttestationSignature, + BadgeAward, + ClickingButtonAward, + BadgeVerifyResult, +} from "./types.ts"; +export type { GeneratedKeypair } from "./keys.ts"; diff --git a/atproto-badges/src/keys.ts b/atproto-badges/src/keys.ts new file mode 100644 index 0000000..ad88459 --- /dev/null +++ b/atproto-badges/src/keys.ts @@ -0,0 +1,50 @@ +import { P256Keypair } from "@atproto/crypto"; +import { toString, fromString } from "uint8arrays"; + +export interface GeneratedKeypair { + /** The private key as a base64url string. Store this in a secret (like an env + * var) or anyone with it can sign badges as you. */ + privateKeyBase64url: string; + /** The public key as a `did:key:...` string. Add this to your DID document + * with `addAttestationVerificationMethod`. This lets people verify you're in + * possess of the private key. */ + publicDidKey: string; +} + +/** + * Create a new key pair for signing badges. + * + * Returns a private key (to be kept secret!) and a public key (to publish to a + * DID document so others can verify your signatures). + */ +export async function generateSigningKeys(): Promise { + const keypair = await P256Keypair.create({ exportable: true }); + const privateKeyBytes = await keypair.export(); + return { + privateKeyBase64url: toString(privateKeyBytes, "base64url"), + publicDidKey: keypair.did(), + }; +} + +/** + * Load a previously saved signing key so you can sign with it again. + * + * Pass the `privateKeyBase64url` string you got from + * `generateSigningKeys`. + * + * Returns a key you can pass to `createRecordSignature` or + * `createBadgeAwardRecord`. + */ +export async function loadSigningKey({ + privateKeyBase64url, +}: { + privateKeyBase64url: string; +}): Promise { + const bytes = fromString(privateKeyBase64url, "base64url"); + if (bytes.length !== 32) { + throw new Error( + `Invalid private key: expected 32 bytes, got ${bytes.length}`, + ); + } + return P256Keypair.import(bytes, { exportable: false }); +} diff --git a/atproto-badges/src/plc.ts b/atproto-badges/src/plc.ts new file mode 100644 index 0000000..f162b6a --- /dev/null +++ b/atproto-badges/src/plc.ts @@ -0,0 +1,72 @@ +import type { AtpAgent } from "@atproto/api"; + +/** + * Publish your public signing key to your DID document, so others can + * verify the badges you sign. + * + * This adds an `#attestations` verification method to your PLC + * document. + * + * Requires an email verification token: call + * `agent.com.atproto.identity.requestPlcOperationSignature()` first + * to trigger the email, then pass the token you receive as `token`. + * + * `publicDidKey` is the `did:key:...` string from + * `generateSigningKeys`. + */ +export async function addAttestationVerificationMethod({ + agent, + did, + publicDidKey, + token, + plcDirectoryUrl, +}: { + agent: AtpAgent; + did: string; + publicDidKey: string; + token: string; + plcDirectoryUrl?: string; +}): Promise { + const plcUrl = plcDirectoryUrl ?? "https://plc.directory"; + // Fetch current PLC state to carry forward existing values + const logRes = await fetch(`${plcUrl}/${did}/log/audit`); + if (!logRes.ok) { + throw new Error(`Failed to fetch PLC log: ${logRes.status}`); + } + const operations = (await logRes.json()) as Array<{ + operation: { + verificationMethods?: Record; + rotationKeys?: string[]; + alsoKnownAs?: string[]; + services?: Record; + }; + }>; + const lastOp = operations[operations.length - 1]?.operation; + if (!lastOp) { + throw new Error("No PLC operations found"); + } + + // Add attestation key to existing verification methods + const currentMethods = lastOp.verificationMethods ?? {}; + const newMethods = { ...currentMethods, attestations: publicDidKey.trim() }; + + // Ask the PDS to sign a PLC operation with the updated methods + const { data: signedOp } = await agent.com.atproto.identity.signPlcOperation({ + token: token.trim(), + verificationMethods: newMethods, + rotationKeys: lastOp.rotationKeys, + alsoKnownAs: lastOp.alsoKnownAs, + services: lastOp.services, + }); + + // Submit the signed operation to the PLC directory + const submitRes = await fetch(`${plcUrl}/${did}`, { + method: "POST", + headers: { "Content-Type": "application/json" }, + body: JSON.stringify(signedOp.operation), + }); + if (!submitRes.ok) { + const body = await submitRes.text(); + throw new Error(`PLC submission failed (${submitRes.status}): ${body}`); + } +} diff --git a/atproto-badges/src/signing.ts b/atproto-badges/src/signing.ts new file mode 100644 index 0000000..b1d325b --- /dev/null +++ b/atproto-badges/src/signing.ts @@ -0,0 +1,252 @@ +import { + encode as dagCborEncode, + code as DAG_CBOR_CODEC, +} from "@ipld/dag-cbor"; +import { CID } from "multiformats/cid"; +import { sha256 } from "multiformats/hashes/sha2"; +import type { P256Keypair } from "@atproto/crypto"; +import { verifySignature } from "@atproto/crypto"; +import { DidResolver } from "@atproto/identity"; +import { toString, fromString } from "uint8arrays"; +import type { StrongRef, AttestationSignature, BadgeAward } from "./types.ts"; + +interface SignatureMetadata { + $type: "community.lexicon.attestations.signature"; + key: string; + repository?: string; + signature?: { $bytes: string }; +} + +/** + * Compute the hash (CID) of an ATProto record, that is the value that + * gets signed. + * + * You'd use this to: + * - Verify an existing signature by recomputing the hash and checking it + * - Let multiple signers each sign the same hash independently + * + * You don't need this for normal badge issuing: `createRecordSignature` + * and `createBadgeAwardRecord` call it internally. + */ +export async function getRecordHash({ + record, + metadata, + repositoryDid, +}: { + record: Record; + metadata: SignatureMetadata; + repositoryDid: string; +}): Promise { + const { signatures: _, ...recordWithoutSigs } = record; + const { signature: _s, ...metadataClean } = metadata; + const sigPayload = { ...metadataClean, repository: repositoryDid }; + const attestationPayload = { ...recordWithoutSigs, $sig: sigPayload }; + + // Validate: CBOR cannot encode undefined, fail fast with a clear message + for (const [key, value] of Object.entries(attestationPayload)) { + if (value === undefined) { + throw new Error(`Cannot CBOR-encode record: field "${key}" is undefined`); + } + if ( + typeof value === "object" && + value !== null && + !ArrayBuffer.isView(value) + ) { + for (const [nested, nv] of Object.entries( + value as Record, + )) { + if (nv === undefined) { + throw new Error( + `Cannot CBOR-encode record: field "${key}.${nested}" is undefined`, + ); + } + } + } + } + + const encoded = dagCborEncode(attestationPayload); + const hash = await sha256.digest(encoded); + return CID.createV1(DAG_CBOR_CODEC, hash); +} + +/** + * Sign any ATProto record with your attestation key. + * + * This works on any record shape, NOT just badges. Use it when you're + * building your own record and just need a signature for it. + * + * If you want a ready-made badge award, use `createBadgeAwardRecord` + * instead. + */ +export async function createRecordSignature({ + record, + organizerDid, + recipientDid, + signingKey, +}: { + record: Record; + organizerDid: string; + recipientDid: string; + signingKey: P256Keypair; +}): Promise { + if (!organizerDid.startsWith("did:")) { + throw new Error( + `Invalid organizerDid: must be a DID, got "${organizerDid}"`, + ); + } + + const keyId = `${organizerDid}#attestations`; + const metadata: SignatureMetadata = { + $type: "community.lexicon.attestations.signature", + key: keyId, + }; + + const cid = await getRecordHash({ + record, + metadata, + repositoryDid: recipientDid, + }); + const sig = await signingKey.sign(cid.bytes); + + return { + $type: "community.lexicon.attestations.signature", + key: keyId, + signature: { $bytes: toString(sig, "base64") }, + }; +} + +/** + * Build a signed badge award record for a recipient, ready to write to + * their PDS. + * + * Creates a `community.lexicon.badge.award` record with the right + * `$type`, a timestamp, and your attestation signature attached. + * + * Write it to the recipient's PDS with `com.atproto.repo.putRecord`. + */ +export async function createBadgeAwardRecord({ + recipientDid, + badgeRef, + organizerDid, + signingKey, +}: { + recipientDid: string; + badgeRef: StrongRef; + organizerDid: string; + signingKey: P256Keypair; +}): Promise { + const record: BadgeAward = { + $type: "community.lexicon.badge.award", + did: recipientDid, + badge: badgeRef, + issued: new Date().toISOString(), + signatures: [], + }; + + const attestation = await createRecordSignature({ + record: record as unknown as Record, + organizerDid, + recipientDid, + signingKey, + }); + record.signatures.push(attestation); + + return record; +} + +export type VerifySuccess = { verified: true; issuerDid: string }; +export type VerifyFailure = { + verified: false; + error: string; + issuerDid?: string; +}; +export type VerifyResult = VerifySuccess | VerifyFailure; + +/** + * Check whether a badge award's signature is legit. + * + * Looks up the issuer's DID document, finds their `#attestations` key, + * recomputes the record hash, and checks the signature against it. + * + * Pass `resolveDidDoc` if you want to supply your own DID resolution + * (handy for tests or `did:web` issuers). Otherwise it uses the PLC + * directory. + */ +export async function verifyBadgeAward({ + award, + plcDirectoryUrl, + resolveDidDoc, +}: { + award: BadgeAward; + plcDirectoryUrl?: string; + resolveDidDoc?: (did: string) => Promise; +}): Promise { + if (!award.signatures.length) { + return { verified: false, error: "no_signatures" }; + } + + const sig = award.signatures.find((s) => /^did:.+#attestations$/.test(s.key)); + if (!sig) { + return { verified: false, error: "invalid_key_id" }; + } + + const issuerDid = sig.key.replace(/#attestations$/, ""); + + // Resolve DID document + let didDoc: any; + try { + if (resolveDidDoc) { + didDoc = await resolveDidDoc(issuerDid); + } else { + const resolver = new DidResolver({ plcUrl: plcDirectoryUrl }); + didDoc = await resolver.resolveNoCheck(issuerDid); + } + } catch { + return { verified: false, error: "plc_fetch_failed", issuerDid }; + } + + // Extract #attestations key to handle both PLC format (flat object) + // and W3C DID doc format (verificationMethod array) + const attestationKey = didDoc?.verificationMethod?.find( + (vm: any) => + vm.id === `${issuerDid}#attestations` || vm.id === "#attestations", + ); + const didKey = attestationKey?.publicKeyMultibase + ? `did:key:${attestationKey.publicKeyMultibase}` + : didDoc?.verificationMethods?.attestations; + + if (!didKey) { + return { verified: false, error: "no_attestation_key", issuerDid }; + } + + // Reconstruct hash + const metadata: SignatureMetadata = { + $type: "community.lexicon.attestations.signature", + key: sig.key, + }; + const cid = await getRecordHash({ + record: award as unknown as Record, + metadata, + repositoryDid: award.did, + }); + + // Normalize signature bytes: after a CBOR round-trip (read back from PDS), + // the signature may be a raw Uint8Array instead of { $bytes: "base64..." }. + let sigBytes: Uint8Array; + if (sig.signature instanceof Uint8Array) { + sigBytes = sig.signature; + } else if (sig.signature.$bytes instanceof Uint8Array) { + sigBytes = sig.signature.$bytes; + } else { + sigBytes = fromString(sig.signature.$bytes, "base64"); + } + + try { + const valid = await verifySignature(didKey, cid.bytes, sigBytes); + return valid + ? { verified: true, issuerDid } + : { verified: false, error: "signature_invalid", issuerDid }; + } catch { + return { verified: false, error: "signature_invalid", issuerDid }; + } +} diff --git a/atproto-badges/src/types.ts b/atproto-badges/src/types.ts new file mode 100644 index 0000000..38eef95 --- /dev/null +++ b/atproto-badges/src/types.ts @@ -0,0 +1,48 @@ +export interface StrongRef { + uri: string; + cid: string; +} + +/** + * Signature as written to a PDS (JSON: `{ $bytes: "base64..." }`). + * After a CBOR round-trip (read back from PDS via getRecord/listRecords), + * the `signature` field may be a raw `Uint8Array` instead. + */ +export interface AttestationSignature { + $type: "community.lexicon.attestations.signature"; + key: string; + signature: { $bytes: string | Uint8Array } | Uint8Array; +} + +export interface BadgeAward { + $type: "community.lexicon.badge.award"; + did: string; + badge: StrongRef; + issued: string; + signatures: AttestationSignature[]; +} + +/** + * Hydrated view-model of a badge award, suitable for rendering in a UI. + * Distinct from `BadgeAward`, which is the on-network record shape. + */ +export interface ClickingButtonAward { + uri: string; + badgeDefinitionUri: string | undefined; + issuedAt: string | undefined; + pdsUrl: string | undefined; + badgeName: string | undefined; + badgeDescription: string | undefined; +} + +export type BadgeVerifyResult = + | { + verified: true; + issuerDid?: string; + issuerHandle?: string; + issuerDisplayName?: string; + } + | { + verified: false; + error: string; + }; diff --git a/atproto-badges/tsconfig.json b/atproto-badges/tsconfig.json new file mode 100644 index 0000000..95cc5b5 --- /dev/null +++ b/atproto-badges/tsconfig.json @@ -0,0 +1,23 @@ +{ + "compilerOptions": { + /* Base Options: */ + "esModuleInterop": true, + "skipLibCheck": true, + "target": "es2022", + "allowJs": true, + "resolveJsonModule": true, + "moduleDetection": "force", + "isolatedModules": true, + /* Strictness */ + "strict": true, + "noUncheckedIndexedAccess": true, + /* If transpiling with TypeScript: */ + "moduleResolution": "NodeNext", + "module": "NodeNext", + "outDir": "dist", + "sourceMap": true, + "declaration": true, + "allowImportingTsExtensions": true, + "noEmit": true + } +} diff --git a/atproto-badges/tsdown.config.ts b/atproto-badges/tsdown.config.ts new file mode 100644 index 0000000..33279de --- /dev/null +++ b/atproto-badges/tsdown.config.ts @@ -0,0 +1,11 @@ +import { defineConfig } from "tsdown"; + +export default defineConfig({ + entry: ["src/index.ts"], + unbundle: true, + fixedExtension: false, + dts: { + sideEffects: true, + }, + clean: true, +}); diff --git a/atproto-badges/vitest.config.ts b/atproto-badges/vitest.config.ts new file mode 100644 index 0000000..9d5d30e --- /dev/null +++ b/atproto-badges/vitest.config.ts @@ -0,0 +1,2 @@ +import { defineConfig } from "vitest/config"; +export default defineConfig({ test: {} }); diff --git a/package-lock.json b/package-lock.json index 60d6d26..810af84 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1010,6 +1010,369 @@ "tsdown": "^0.14.1" } }, + "atproto-badges": { + "name": "@fujocoded/atproto-badges", + "version": "0.1.0", + "license": "MIT", + "dependencies": { + "@atproto/api": "^0.17.3", + "@atproto/crypto": "^0.4.3", + "@atproto/identity": "^0.4.3", + "@atproto/xrpc": "^0.7.7", + "@ipld/dag-cbor": "^9.2.2", + "multiformats": "^13.3.1", + "uint8arrays": "^5.1.0" + }, + "devDependencies": { + "tsdown": "^0.17.2", + "vitest": "^3.1.1" + } + }, + "atproto-badges/node_modules/@oxc-project/types": { + "version": "0.101.0", + "resolved": "https://registry.npmjs.org/@oxc-project/types/-/types-0.101.0.tgz", + "integrity": "sha512-nuFhqlUzJX+gVIPPfuE6xurd4lST3mdcWOhyK/rZO0B9XWMKm79SuszIQEnSMmmDhq1DC8WWVYGVd+6F93o1gQ==", + "dev": true, + "license": "MIT", + "funding": { + "url": "https://github.com/sponsors/Boshen" + } + }, + "atproto-badges/node_modules/@rolldown/binding-android-arm64": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-android-arm64/-/binding-android-arm64-1.0.0-beta.53.tgz", + "integrity": "sha512-Ok9V8o7o6YfSdTTYA/uHH30r3YtOxLD6G3wih/U9DO0ucBBFq8WPt/DslU53OgfteLRHITZny9N/qCUxMf9kjQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-darwin-arm64": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-arm64/-/binding-darwin-arm64-1.0.0-beta.53.tgz", + "integrity": "sha512-yIsKqMz0CtRnVa6x3Pa+mzTihr4Ty+Z6HfPbZ7RVbk1Uxnco4+CUn7Qbm/5SBol1JD/7nvY8rphAgyAi7Lj6Vg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-darwin-x64": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-darwin-x64/-/binding-darwin-x64-1.0.0-beta.53.tgz", + "integrity": "sha512-GTXe+mxsCGUnJOFMhfGWmefP7Q9TpYUseHvhAhr21nCTgdS8jPsvirb0tJwM3lN0/u/cg7bpFNa16fQrjKrCjQ==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-freebsd-x64": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-freebsd-x64/-/binding-freebsd-x64-1.0.0-beta.53.tgz", + "integrity": "sha512-9Tmp7bBvKqyDkMcL4e089pH3RsjD3SUungjmqWtyhNOxoQMh0fSmINTyYV8KXtE+JkxYMPWvnEt+/mfpVCkk8w==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-linux-arm-gnueabihf": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm-gnueabihf/-/binding-linux-arm-gnueabihf-1.0.0-beta.53.tgz", + "integrity": "sha512-a1y5fiB0iovuzdbjUxa7+Zcvgv+mTmlGGC4XydVIsyl48eoxgaYkA3l9079hyTyhECsPq+mbr0gVQsFU11OJAQ==", + "cpu": [ + "arm" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-linux-arm64-gnu": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-gnu/-/binding-linux-arm64-gnu-1.0.0-beta.53.tgz", + "integrity": "sha512-bpIGX+ov9PhJYV+wHNXl9rzq4F0QvILiURn0y0oepbQx+7stmQsKA0DhPGwmhfvF856wq+gbM8L92SAa/CBcLg==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-linux-arm64-musl": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-arm64-musl/-/binding-linux-arm64-musl-1.0.0-beta.53.tgz", + "integrity": "sha512-bGe5EBB8FVjHBR1mOLOPEFg1Lp3//7geqWkU5NIhxe+yH0W8FVrQ6WRYOap4SUTKdklD/dC4qPLREkMMQ855FA==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-linux-x64-gnu": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-gnu/-/binding-linux-x64-gnu-1.0.0-beta.53.tgz", + "integrity": "sha512-qL+63WKVQs1CMvFedlPt0U9PiEKJOAL/bsHMKUDS6Vp2Q+YAv/QLPu8rcvkfIMvQ0FPU2WL0aX4eWwF6e/GAnA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-linux-x64-musl": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-linux-x64-musl/-/binding-linux-x64-musl-1.0.0-beta.53.tgz", + "integrity": "sha512-VGl9JIGjoJh3H8Mb+7xnVqODajBmrdOOb9lxWXdcmxyI+zjB2sux69br0hZJDTyLJfvBoYm439zPACYbCjGRmw==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-openharmony-arm64": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-openharmony-arm64/-/binding-openharmony-arm64-1.0.0-beta.53.tgz", + "integrity": "sha512-B4iIserJXuSnNzA5xBLFUIjTfhNy7d9sq4FUMQY3GhQWGVhS2RWWzzDnkSU6MUt7/aHUrep0CdQfXUJI9D3W7A==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "openharmony" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-wasm32-wasi": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-wasm32-wasi/-/binding-wasm32-wasi-1.0.0-beta.53.tgz", + "integrity": "sha512-BUjAEgpABEJXilGq/BPh7jeU3WAJ5o15c1ZEgHaDWSz3LB881LQZnbNJHmUiM4d1JQWMYYyR1Y490IBHi2FPJg==", + "cpu": [ + "wasm32" + ], + "dev": true, + "license": "MIT", + "optional": true, + "dependencies": { + "@napi-rs/wasm-runtime": "^1.1.0" + }, + "engines": { + "node": ">=14.0.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-win32-arm64-msvc": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-arm64-msvc/-/binding-win32-arm64-msvc-1.0.0-beta.53.tgz", + "integrity": "sha512-s27uU7tpCWSjHBnxyVXHt3rMrQdJq5MHNv3BzsewCIroIw3DJFjMH1dzCPPMUFxnh1r52Nf9IJ/eWp6LDoyGcw==", + "cpu": [ + "arm64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/binding-win32-x64-msvc": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/binding-win32-x64-msvc/-/binding-win32-x64-msvc-1.0.0-beta.53.tgz", + "integrity": "sha512-cjWL/USPJ1g0en2htb4ssMjIycc36RvdQAx1WlXnS6DpULswiUTVXPDesTifSKYSyvx24E0YqQkEm0K/M2Z/AA==", + "cpu": [ + "x64" + ], + "dev": true, + "license": "MIT", + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": "^20.19.0 || >=22.12.0" + } + }, + "atproto-badges/node_modules/@rolldown/pluginutils": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/@rolldown/pluginutils/-/pluginutils-1.0.0-beta.53.tgz", + "integrity": "sha512-vENRlFU4YbrwVqNDZ7fLvy+JR1CRkyr01jhSiDpE1u6py3OMzQfztQU2jxykW3ALNxO4kSlqIDeYyD0Y9RcQeQ==", + "dev": true, + "license": "MIT" + }, + "atproto-badges/node_modules/multiformats": { + "version": "13.4.2", + "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-13.4.2.tgz", + "integrity": "sha512-eh6eHCrRi1+POZ3dA+Dq1C6jhP1GNtr9CRINMb67OKzqW9I5DUuZM/3jLPlzhgpGeiNUlEGEbkCYChXMCc/8DQ==", + "license": "Apache-2.0 OR MIT" + }, + "atproto-badges/node_modules/rolldown": { + "version": "1.0.0-beta.53", + "resolved": "https://registry.npmjs.org/rolldown/-/rolldown-1.0.0-beta.53.tgz", + "integrity": "sha512-Qd9c2p0XKZdgT5AYd+KgAMggJ8ZmCs3JnS9PTMWkyUfteKlfmKtxJbWTHkVakxwXs1Ub7jrRYVeFeF7N0sQxyw==", + "dev": true, + "license": "MIT", + "dependencies": { + "@oxc-project/types": "=0.101.0", + "@rolldown/pluginutils": "1.0.0-beta.53" + }, + "bin": { + "rolldown": "bin/cli.mjs" + }, + "engines": { + "node": "^20.19.0 || >=22.12.0" + }, + "optionalDependencies": { + "@rolldown/binding-android-arm64": "1.0.0-beta.53", + "@rolldown/binding-darwin-arm64": "1.0.0-beta.53", + "@rolldown/binding-darwin-x64": "1.0.0-beta.53", + "@rolldown/binding-freebsd-x64": "1.0.0-beta.53", + "@rolldown/binding-linux-arm-gnueabihf": "1.0.0-beta.53", + "@rolldown/binding-linux-arm64-gnu": "1.0.0-beta.53", + "@rolldown/binding-linux-arm64-musl": "1.0.0-beta.53", + "@rolldown/binding-linux-x64-gnu": "1.0.0-beta.53", + "@rolldown/binding-linux-x64-musl": "1.0.0-beta.53", + "@rolldown/binding-openharmony-arm64": "1.0.0-beta.53", + "@rolldown/binding-wasm32-wasi": "1.0.0-beta.53", + "@rolldown/binding-win32-arm64-msvc": "1.0.0-beta.53", + "@rolldown/binding-win32-x64-msvc": "1.0.0-beta.53" + } + }, + "atproto-badges/node_modules/tsdown": { + "version": "0.17.2", + "resolved": "https://registry.npmjs.org/tsdown/-/tsdown-0.17.2.tgz", + "integrity": "sha512-SuU+0CWm/95KfXqojHTVuwcouIsdn7HpYcwDyOdKktJi285NxKwysjFUaxYLxpCNqqPvcFvokXLO4dZThRwzkw==", + "dev": true, + "license": "MIT", + "dependencies": { + "ansis": "^4.2.0", + "cac": "^6.7.14", + "empathic": "^2.0.0", + "hookable": "^5.5.3", + "import-without-cache": "^0.2.2", + "obug": "^2.1.1", + "rolldown": "1.0.0-beta.53", + "rolldown-plugin-dts": "^0.18.3", + "semver": "^7.7.3", + "tinyexec": "^1.0.2", + "tinyglobby": "^0.2.15", + "tree-kill": "^1.2.2", + "unconfig-core": "^7.4.2", + "unrun": "^0.2.19" + }, + "bin": { + "tsdown": "dist/run.mjs" + }, + "engines": { + "node": ">=20.19.0" + }, + "funding": { + "url": "https://github.com/sponsors/sxzz" + }, + "peerDependencies": { + "@arethetypeswrong/core": "^0.18.1", + "@vitejs/devtools": "^0.0.0-alpha.19", + "publint": "^0.3.0", + "typescript": "^5.0.0", + "unplugin-lightningcss": "^0.4.0", + "unplugin-unused": "^0.5.0" + }, + "peerDependenciesMeta": { + "@arethetypeswrong/core": { + "optional": true + }, + "@vitejs/devtools": { + "optional": true + }, + "publint": { + "optional": true + }, + "typescript": { + "optional": true + }, + "unplugin-lightningcss": { + "optional": true + }, + "unplugin-unused": { + "optional": true + } + } + }, + "atproto-badges/node_modules/uint8arrays": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/uint8arrays/-/uint8arrays-5.1.1.tgz", + "integrity": "sha512-9muQwa4wZG4dKi9gMAIBtnk2Pw87SRpvWTH6lOGm19V2Uqxr4uomUf2PGqPnWc+qs06sN8owUU4jfcoWOcfwVQ==", + "license": "Apache-2.0 OR MIT", + "dependencies": { + "multiformats": "^13.0.0" + } + }, "expressive-code-caption": { "name": "@fujocoded/expressive-code-caption", "version": "0.0.3", @@ -2897,6 +3260,10 @@ "resolved": "astro-smooth-actions", "link": true }, + "node_modules/@fujocoded/atproto-badges": { + "resolved": "atproto-badges", + "link": true + }, "node_modules/@fujocoded/authproto": { "resolved": "astro-authproto", "link": true @@ -3508,6 +3875,26 @@ } } }, + "node_modules/@ipld/dag-cbor": { + "version": "9.2.6", + "resolved": "https://registry.npmjs.org/@ipld/dag-cbor/-/dag-cbor-9.2.6.tgz", + "integrity": "sha512-vZGJ84Em2jCVAS7td5gc08YTVN8/s4bTQxg4pU77PAXDAR/yLYOthOvkCu01fdl1lrZwz47RdUterxdkrs3p5A==", + "license": "Apache-2.0 OR MIT", + "dependencies": { + "cborg": "^5.0.1", + "multiformats": "^13.1.0" + }, + "engines": { + "node": ">=16.0.0", + "npm": ">=7.0.0" + } + }, + "node_modules/@ipld/dag-cbor/node_modules/multiformats": { + "version": "13.4.2", + "resolved": "https://registry.npmjs.org/multiformats/-/multiformats-13.4.2.tgz", + "integrity": "sha512-eh6eHCrRi1+POZ3dA+Dq1C6jhP1GNtr9CRINMb67OKzqW9I5DUuZM/3jLPlzhgpGeiNUlEGEbkCYChXMCc/8DQ==", + "license": "Apache-2.0 OR MIT" + }, "node_modules/@jridgewell/gen-mapping": { "version": "0.3.13", "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.13.tgz", @@ -5773,6 +6160,7 @@ "os": [ "android" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5790,6 +6178,7 @@ "os": [ "darwin" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5807,6 +6196,7 @@ "os": [ "darwin" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5824,6 +6214,7 @@ "os": [ "freebsd" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5841,6 +6232,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5858,6 +6250,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5875,6 +6268,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5892,6 +6286,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5909,6 +6304,7 @@ "os": [ "linux" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5926,6 +6322,7 @@ "os": [ "openharmony" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5940,6 +6337,7 @@ "dev": true, "license": "MIT", "optional": true, + "peer": true, "dependencies": { "@napi-rs/wasm-runtime": "^1.1.0" }, @@ -5960,6 +6358,7 @@ "os": [ "win32" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -5977,6 +6376,7 @@ "os": [ "win32" ], + "peer": true, "engines": { "node": "^20.19.0 || >=22.12.0" } @@ -6591,13 +6991,6 @@ "dev": true, "license": "MIT" }, - "node_modules/@types/title": { - "version": "3.4.3", - "resolved": "https://registry.npmjs.org/@types/title/-/title-3.4.3.tgz", - "integrity": "sha512-mjupLOb4kwUuoUFokkacy/VMRVBH2qtqZ5AX7K7iha6+iKIkX80n/Y4EoNVEVRmer8dYJU/ry+fppUaDFVQh7Q==", - "dev": true, - "license": "MIT" - }, "node_modules/@types/unist": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/@types/unist/-/unist-3.0.3.tgz", @@ -6816,12 +7209,6 @@ "node": ">= 8" } }, - "node_modules/arg": { - "version": "5.0.2", - "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", - "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", - "license": "MIT" - }, "node_modules/argparse": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", @@ -10080,6 +10467,15 @@ "integrity": "sha512-yWq+R3U3jE+coOeEb3a3GgE2j/0MMiDKM/QpLb6h9ihf5fGY9UXtvK9o4vNqjWXoZz7/3EaSVU3IX53TvFFUOw==", "license": "MIT" }, + "node_modules/cborg": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/cborg/-/cborg-5.1.1.tgz", + "integrity": "sha512-BDbSRIp6XrQXkTc7g+DN0RB9RrDPTUfals2ecWUlt3juPLjbAvy/V72mJcXY0Ehu0Dq/3WpNCOCT68HUTbW+lw==", + "license": "Apache-2.0", + "bin": { + "cborg": "lib/bin.js" + } + }, "node_modules/ccount": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/ccount/-/ccount-2.0.1.tgz", @@ -10320,23 +10716,6 @@ "node": ">= 12" } }, - "node_modules/clipboardy": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/clipboardy/-/clipboardy-4.0.0.tgz", - "integrity": "sha512-5mOlNS0mhX0707P2I0aZ2V/cmHUEO/fL7VFLqszkhUsxt7RwnmrInf/eEQKlf5GzvYeHIjT+Ov1HRfNmymlG0w==", - "license": "MIT", - "dependencies": { - "execa": "^8.0.1", - "is-wsl": "^3.1.0", - "is64bit": "^2.0.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/cliui": { "version": "8.0.1", "resolved": "https://registry.npmjs.org/cliui/-/cliui-8.0.1.tgz", @@ -11662,44 +12041,6 @@ "integrity": "sha512-GWkBvjiSZK87ELrYOSESUYeVIc9mvLLf/nXalMOS5dYrgZq9o5OVkbZAVM06CVxYsCwH9BDZFPlQTlPA1j4ahA==", "license": "MIT" }, - "node_modules/execa": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/execa/-/execa-8.0.1.tgz", - "integrity": "sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==", - "license": "MIT", - "dependencies": { - "cross-spawn": "^7.0.3", - "get-stream": "^8.0.1", - "human-signals": "^5.0.0", - "is-stream": "^3.0.0", - "merge-stream": "^2.0.0", - "npm-run-path": "^5.1.0", - "onetime": "^6.0.0", - "signal-exit": "^4.1.0", - "strip-final-newline": "^3.0.0" - }, - "engines": { - "node": ">=16.17" - }, - "funding": { - "url": "https://github.com/sindresorhus/execa?sponsor=1" - } - }, - "node_modules/execa/node_modules/onetime": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/onetime/-/onetime-6.0.0.tgz", - "integrity": "sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==", - "license": "MIT", - "dependencies": { - "mimic-fn": "^4.0.0" - }, - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/expect-type": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/expect-type/-/expect-type-1.3.0.tgz", @@ -12013,18 +12354,6 @@ "node": ">=6" } }, - "node_modules/get-stream": { - "version": "8.0.1", - "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-8.0.1.tgz", - "integrity": "sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==", - "license": "MIT", - "engines": { - "node": ">=16" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/get-tsconfig": { "version": "4.13.0", "resolved": "https://registry.npmjs.org/get-tsconfig/-/get-tsconfig-4.13.0.tgz", @@ -12547,15 +12876,6 @@ "human-id": "dist/cli.js" } }, - "node_modules/human-signals": { - "version": "5.0.0", - "resolved": "https://registry.npmjs.org/human-signals/-/human-signals-5.0.0.tgz", - "integrity": "sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==", - "license": "Apache-2.0", - "engines": { - "node": ">=16.17.0" - } - }, "node_modules/hyperdyperid": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/hyperdyperid/-/hyperdyperid-1.2.0.tgz", @@ -12845,18 +13165,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is-stream": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/is-stream/-/is-stream-3.0.0.tgz", - "integrity": "sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==", - "license": "MIT", - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/is-subdir": { "version": "1.2.0", "resolved": "https://registry.npmjs.org/is-subdir/-/is-subdir-1.2.0.tgz", @@ -12908,21 +13216,6 @@ "url": "https://github.com/sponsors/sindresorhus" } }, - "node_modules/is64bit": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/is64bit/-/is64bit-2.0.0.tgz", - "integrity": "sha512-jv+8jaWCl0g2lSBkNSVXdzfBA0npK1HGC2KtWM9FumFRoGS94g3NbCCLVnCYHLjp4GrW2KZeeSTMo5ddtznmGw==", - "license": "MIT", - "dependencies": { - "system-architecture": "^0.1.0" - }, - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/isexe": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", @@ -13659,12 +13952,6 @@ "url": "https://github.com/sponsors/streamich" } }, - "node_modules/merge-stream": { - "version": "2.0.0", - "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", - "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==", - "license": "MIT" - }, "node_modules/merge2": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", @@ -14960,18 +15247,6 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, - "node_modules/mimic-fn": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/mimic-fn/-/mimic-fn-4.0.0.tgz", - "integrity": "sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/mimic-function": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/mimic-function/-/mimic-function-5.0.1.tgz", @@ -15238,33 +15513,6 @@ "node": ">=0.10.0" } }, - "node_modules/npm-run-path": { - "version": "5.3.0", - "resolved": "https://registry.npmjs.org/npm-run-path/-/npm-run-path-5.3.0.tgz", - "integrity": "sha512-ppwTtiJZq0O/ai0z7yfudtBpWIoxM8yE6nHi1X47eFR2EWORqfbu6CnPlNsjeN683eT0qG6H/Pyf9fCcvjnnnQ==", - "license": "MIT", - "dependencies": { - "path-key": "^4.0.0" - }, - "engines": { - "node": "^12.20.0 || ^14.13.1 || >=16.0.0" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, - "node_modules/npm-run-path/node_modules/path-key": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/path-key/-/path-key-4.0.0.tgz", - "integrity": "sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/nth-check": { "version": "2.1.1", "resolved": "https://registry.npmjs.org/nth-check/-/nth-check-2.1.1.tgz", @@ -17145,6 +17393,7 @@ "version": "4.1.0", "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, "license": "ISC", "engines": { "node": ">=14" @@ -17355,18 +17604,6 @@ "node": ">=0.10.0" } }, - "node_modules/strip-final-newline": { - "version": "3.0.0", - "resolved": "https://registry.npmjs.org/strip-final-newline/-/strip-final-newline-3.0.0.tgz", - "integrity": "sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==", - "license": "MIT", - "engines": { - "node": ">=12" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/strip-literal": { "version": "3.1.0", "resolved": "https://registry.npmjs.org/strip-literal/-/strip-literal-3.1.0.tgz", @@ -17470,18 +17707,6 @@ "node": ">=16" } }, - "node_modules/system-architecture": { - "version": "0.1.0", - "resolved": "https://registry.npmjs.org/system-architecture/-/system-architecture-0.1.0.tgz", - "integrity": "sha512-ulAk51I9UVUyJgxlv9M6lFot2WP3e7t8Kz9+IS6D4rVba1tR9kON+Ey69f+1R4Q8cd45Lod6a4IcJIxnzGc/zA==", - "license": "MIT", - "engines": { - "node": ">=18" - }, - "funding": { - "url": "https://github.com/sponsors/sindresorhus" - } - }, "node_modules/tagged-tag": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/tagged-tag/-/tagged-tag-1.0.0.tgz", @@ -17645,20 +17870,6 @@ "node": ">=14.0.0" } }, - "node_modules/title": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/title/-/title-4.0.1.tgz", - "integrity": "sha512-xRnPkJx9nvE5MF6LkB5e8QJjE2FW8269wTu/LQdf7zZqBgPly0QJPf/CWAo7srj5so4yXfoLEdCFgurlpi47zg==", - "license": "MIT", - "dependencies": { - "arg": "^5.0.0", - "chalk": "^5.0.0", - "clipboardy": "^4.0.0" - }, - "bin": { - "title": "dist/esm/bin.js" - } - }, "node_modules/tlds": { "version": "1.261.0", "resolved": "https://registry.npmjs.org/tlds/-/tlds-1.261.0.tgz", @@ -19727,11 +19938,9 @@ "version": "0.0.13", "license": "MIT", "dependencies": { - "title": "^4.0.1", "unist-util-visit": "^5.0.0" }, "devDependencies": { - "@types/title": "^3.4.3", "mdast": "^3.0.0", "mdast-util-mdx-jsx": "^3.1.2", "remark": "^15.0.1", diff --git a/remark-capitalize-titles/index.ts b/remark-capitalize-titles/index.ts index 7b5bcc7..269000a 100644 --- a/remark-capitalize-titles/index.ts +++ b/remark-capitalize-titles/index.ts @@ -1,4 +1,3 @@ -import { default as libraryTitle } from "title"; import { visit } from "unist-util-visit"; import type { Plugin } from "unified"; @@ -7,59 +6,296 @@ import type { MdxJsxFlowElement } from "mdast-util-mdx-jsx"; import { DEFAULT_CAPITALIZATIONS as DEFAULT_CAPITALIZATIONS_ } from "./capitalizations.ts"; -// Astro's MDX integration runs remark-smartypants before user plugins, so -// straight quotes arrive here as curly. The `title` library's regex only lists -// straight quotes as punctuation, so curly quotes would otherwise prevent the -// next word from being capitalized. -const CURLY_TO_STRAIGHT: Record = { - "\u201C": '"', - "\u201D": '"', - "\u2018": "'", - "\u2019": "'", +// Articles, conjunctions, and prepositions that stay lowercase mid-title +// (AP-style title casing). Sourced from the same set as `vercel/title`. +const SMALL_WORDS = new Set([ + "a", + "an", + "the", + "aboard", + "about", + "above", + "across", + "after", + "against", + "along", + "amid", + "among", + "anti", + "around", + "as", + "at", + "before", + "behind", + "and", + "but", + "or", + "nor", + "for", + "yet", + "so", + "below", + "beneath", + "beside", + "besides", + "between", + "beyond", + "by", + "concerning", + "considering", + "despite", + "down", + "during", + "except", + "excepting", + "excluding", + "following", + "from", + "in", + "inside", + "into", + "like", + "minus", + "near", + "of", + "off", + "on", + "onto", + "opposite", + "over", + "past", + "per", + "plus", + "regarding", + "round", + "save", + "since", + "than", + "through", + "to", + "toward", + "towards", + "under", + "underneath", + "unlike", + "until", + "up", + "upon", + "versus", + "via", + "with", + "within", + "without", +]); + +// Characters that ride along with an adjacent word but don't break it: +// Markdown inline delimiters (`~~strike~~`, `**bold**`, `_em_`) and parens. +const TRANSPARENT = new Set(["(", ")", "*", "_", "~"]); + +// Joiners that fuse atoms into a single compound word with multiple +// segments. The first segment gets capitalized; later segments stay +// lowercase (AP style: "Three-way", "Up-to-date", "Push/pull"). +const COMPOUND_JOINERS = new Set(["-", "/"]); + +// Punctuation that, when present in the gap between two atoms, force-caps +// the next word even if it would otherwise be small. +const HARD_CAP = new Set([":", ";", "!", "?", "—", "–"]); + +// An atom is a maximal run of letters/digits with optional intra-word +// apostrophes (straight or curly). Required to start with a letter/digit +// so a leading quote doesn't get absorbed into the word. +const ATOM_RE = /[\p{L}\p{N}][\p{L}\p{N}'’]*/gu; + +interface Atom { + start: number; + end: number; + value: string; +} + +interface Segment { + startPos: number; + endPos: number; + letters: string; +} + +interface Word { + segments: Segment[]; + forceCap: boolean; +} + +const findAtoms = (text: string): Atom[] => { + const atoms: Atom[] = []; + for (const m of text.matchAll(ATOM_RE)) { + atoms.push({ + start: m.index!, + end: m.index! + m[0].length, + value: m[0], + }); + } + return atoms; }; -const CURLY_QUOTE_REGEX = /[\u201C\u201D\u2018\u2019]/g; - -// Matches a hyphenated compound like "Three-Way" or "Up-To-Date" so the -// second-and-later segments can be lowercased (AP-style: "Three-way"). -const HYPHENATED_COMPOUND_REGEX = /[A-Za-z][A-Za-z']*(?:-[A-Za-z][A-Za-z']*)+/g; - -const lowercaseHyphenatedTails = (text: string, special: string[]) => - text.replace(HYPHENATED_COMPOUND_REGEX, (match) => { - if (special.includes(match)) return match; - const parts = match.split("-"); - return parts - .map((part, index) => { - if (index === 0) return part; - if (special.includes(part)) return part; - return part.charAt(0).toLowerCase() + part.slice(1); - }) - .join("-"); + +type GapKind = "JOIN" | "COMPOUND" | "BOUNDARY"; + +const classifyGap = (gap: string): { kind: GapKind; forceCap: boolean } => { + if (gap.length === 0) return { kind: "JOIN", forceCap: false }; + let allTransparent = true; + for (const c of gap) { + if (!TRANSPARENT.has(c)) { + allTransparent = false; + break; + } + } + if (allTransparent) return { kind: "JOIN", forceCap: false }; + if (gap.length === 1 && COMPOUND_JOINERS.has(gap)) { + return { kind: "COMPOUND", forceCap: false }; + } + for (const c of gap) { + if (HARD_CAP.has(c)) return { kind: "BOUNDARY", forceCap: true }; + } + return { kind: "BOUNDARY", forceCap: false }; +}; + +const buildWords = (text: string, atoms: Atom[]): Word[] => { + const [first, ...rest] = atoms; + if (!first) return []; + const newSegment = (a: Atom): Segment => ({ + startPos: a.start, + endPos: a.end, + letters: a.value, }); + const words: Word[] = []; + let currSeg = newSegment(first); + let currWord: Word = { segments: [currSeg], forceCap: false }; + let prev = first; + for (const curr of rest) { + const gap = classifyGap(text.slice(prev.end, curr.start)); + if (gap.kind === "JOIN") { + currSeg.endPos = curr.end; + currSeg.letters += curr.value; + } else if (gap.kind === "COMPOUND") { + currSeg = newSegment(curr); + currWord.segments.push(currSeg); + } else { + words.push(currWord); + currSeg = newSegment(curr); + currWord = { segments: [currSeg], forceCap: gap.forceCap }; + } + prev = curr; + } + words.push(currWord); + return words; +}; -const title = (...params: Parameters) => { - const [text, options] = params; - const curlyPositions: Array<[number, string]> = []; - const normalized = text.replace( - CURLY_QUOTE_REGEX, - (match, offset: number) => { - curlyPositions.push([offset, match]); - return CURLY_TO_STRAIGHT[match] ?? match; - }, - ); - const textChunks = normalized.split(")"); - const titleCased = textChunks - .map((chunk) => libraryTitle(chunk, options)) - .join(")"); - const intermediateTitle = lowercaseHyphenatedTails( - titleCased, - options?.special ?? [], - ); - if (curlyPositions.length === 0) return intermediateTitle; - const chars = intermediateTitle.split(""); - for (const [offset, original] of curlyPositions) { - chars[offset] = original; +// Expand each word's outer segments to absorb adjacent transparent +// characters so `cat(s)`, `~~don't~~`, and `**only**` ride along as one +// unit during reassembly. +const isTransparentAt = (text: string, idx: number): boolean => { + const c = text[idx]; + return c !== undefined && TRANSPARENT.has(c); +}; + +const firstSegmentOf = (word: Word): Segment => word.segments[0]!; +const lastSegmentOf = (word: Word): Segment => + word.segments[word.segments.length - 1]!; + +const attachTransparent = (text: string, words: Word[]): void => { + const firstWord = words[0]; + if (!firstWord) return; + const firstSeg = firstSegmentOf(firstWord); + let i = firstSeg.startPos - 1; + while (i >= 0 && isTransparentAt(text, i)) i--; + firstSeg.startPos = i + 1; + const lastSeg = lastSegmentOf(words[words.length - 1]!); + let j = lastSeg.endPos; + while (j < text.length && isTransparentAt(text, j)) j++; + lastSeg.endPos = j; + for (let w = 0; w < words.length - 1; w++) { + const leftSeg = lastSegmentOf(words[w]!); + const rightSeg = firstSegmentOf(words[w + 1]!); + let k = leftSeg.endPos; + while (k < rightSeg.startPos && isTransparentAt(text, k)) k++; + leftSeg.endPos = k; + let l = rightSeg.startPos - 1; + while (l >= leftSeg.endPos && isTransparentAt(text, l)) l--; + rightSeg.startPos = l + 1; + } +}; + +const capFirstLetter = (s: string): string => { + for (let i = 0; i < s.length; i++) { + const c = s[i]; + if (c && /\p{L}/u.test(c)) { + return s.slice(0, i) + c.toUpperCase() + s.slice(i + 1); + } + } + return s; +}; + +const escapeRegExp = (s: string): string => + s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); + +const applySpecials = (text: string, specials: string[]): string => { + for (const s of specials) { + text = text.replace(new RegExp(`\\b${escapeRegExp(s)}\\b`, "gi"), s); } - return chars.join(""); + return text; +}; + +interface TitleOptions { + special: string[]; + isFirstTextNode: boolean; + isLastTextNode: boolean; +} + +const titleCase = (input: string, options: TitleOptions): string => { + const text = input.toLowerCase(); + const atoms = findAtoms(text); + const firstAtom = atoms[0]; + if (!firstAtom) return applySpecials(text, options.special); + const words = buildWords(text, atoms); + attachTransparent(text, words); + + const leadingText = text.slice(0, firstAtom.start); + // A leading ellipsis (`...and more`) signals a continuation, so the + // first-word force-cap is suppressed and small-word rules apply normally. + const leadingHasMultiDot = /\.{2,}/.test(leadingText); + const leadingHasHardCap = Array.from(HARD_CAP).some((c) => + leadingText.includes(c), + ); + + const out: string[] = []; + let cursor = 0; + words.forEach((word, idx) => { + const isFirst = idx === 0; + const isLast = idx === words.length - 1; + let shouldCap: boolean; + if (word.segments.length > 1) { + shouldCap = true; + } else if (word.forceCap) { + shouldCap = true; + } else if (isFirst && leadingHasHardCap) { + shouldCap = true; + } else if (isFirst && options.isFirstTextNode && !leadingHasMultiDot) { + shouldCap = true; + } else if (isLast && options.isLastTextNode) { + shouldCap = true; + } else if (SMALL_WORDS.has(firstSegmentOf(word).letters)) { + shouldCap = false; + } else { + shouldCap = true; + } + + word.segments.forEach((seg, segIdx) => { + out.push(text.slice(cursor, seg.startPos)); + let segStr = text.slice(seg.startPos, seg.endPos); + if (shouldCap && segIdx === 0) segStr = capFirstLetter(segStr); + out.push(segStr); + cursor = seg.endPos; + }); + }); + out.push(text.slice(cursor)); + return applySpecials(out.join(""), options.special); }; type PluginArgs = { special: string[]; componentNames: string[] }; @@ -75,8 +311,16 @@ const plugin: Plugin = ) => (tree) => { visit(tree, "heading", (node) => { + const textNodes: { value?: string }[] = []; visit(node, "text", (textNode) => { - textNode.value = title(textNode.value ?? "", { special }); + textNodes.push(textNode); + }); + textNodes.forEach((textNode, i) => { + textNode.value = titleCase(textNode.value ?? "", { + special, + isFirstTextNode: i === 0, + isLastTextNode: i === textNodes.length - 1, + }); }); }); if (componentNames.length > 0) { @@ -94,24 +338,17 @@ const plugin: Plugin = (attribute) => "name" in attribute && attribute.name == "title", ); if (titleAttribute) { - const titleWithSplitCode = (titleAttribute.value as string).split( + const parts = (titleAttribute.value as string).split( new RegExp(CODE_REGEX), ); - titleAttribute.value = titleWithSplitCode - .map((part) => { - if (part.startsWith("`") && part.endsWith("`")) { - return part; - // } else if (index > 0) { - // // If this comes after a code split it will always capitalize the first word, - // // even if it shouldn't be. So, for anything that's not index 0, we add "a" - // // in front of it then remove it so it will treat the first word as it would - // // in the middle of a sentence. - // const intermediateTitle = title("a " + part, { special }); - // console.log(intermediateTitle); - // return intermediateTitle.substring(2); - } else { - return title(part, { special }); - } + titleAttribute.value = parts + .map((part, idx) => { + if (part.startsWith("`") && part.endsWith("`")) return part; + return titleCase(part, { + special, + isFirstTextNode: idx === 0, + isLastTextNode: idx === parts.length - 1, + }); }) .join(""); } diff --git a/remark-capitalize-titles/package.json b/remark-capitalize-titles/package.json index e93977c..d382243 100644 --- a/remark-capitalize-titles/package.json +++ b/remark-capitalize-titles/package.json @@ -42,11 +42,9 @@ "typecheck": "tsc --noEmit" }, "dependencies": { - "title": "^4.0.1", "unist-util-visit": "^5.0.0" }, "devDependencies": { - "@types/title": "^3.4.3", "mdast": "^3.0.0", "mdast-util-mdx-jsx": "^3.1.2", "remark": "^15.0.1", diff --git a/remark-capitalize-titles/tests/index.test.ts b/remark-capitalize-titles/tests/index.test.ts index c31bb73..186f2d8 100644 --- a/remark-capitalize-titles/tests/index.test.ts +++ b/remark-capitalize-titles/tests/index.test.ts @@ -85,10 +85,82 @@ describe("Respects tricky punctuations", () => { expect(await processMarkdown("### ...and more!")).toBe("### ...and More!"); }); - test("handles parenthesized possessives", async () => { + test("handles an inner ellipsis with no trailing space", async () => { + expect( + await processMarkdown("### check it out before you...check it out!"), + ).toBe("### Check It Out before You...Check It Out!"); + }); + + test("treats inner ellipsis as transparent for small words", async () => { + expect(await processMarkdown("# wait...and then more")).toBe( + "# Wait...and Then More", + ); + expect(await processMarkdown("# stop...but think first")).toBe( + "# Stop...but Think First", + ); + }); + + // Parens are transparent: title-case as if they weren't there. So small + // words after `)` stay lowercase, content words get capped, and a letter + // inside `(...)` is cased like the rest of its word. + test("treats parens as transparent for title-casing", async () => { expect( await processMarkdown("### traveling through (your code's) history"), ).toBe("### Traveling Through (Your Code's) History"); + + expect( + await processMarkdown("# employment region(s) for my application"), + ).toBe("# Employment Region(s) for My Application"); + + expect(await processMarkdown("# (s)omething or other")).toBe( + "# (S)omething or Other", + ); + + expect(await processMarkdown("# cat(s) can be a pain")).toBe( + "# Cat(s) Can Be a Pain", + ); + + expect(await processMarkdown("# (s)omethin(g)")).toBe("# (S)omethin(g)"); + }); +}); + +describe("Handles markdown inline delimiters", () => { + test("capitalizes a strikethrough leading word", async () => { + expect( + await processMarkdown("### ~~don't~~ forget your github addresses!"), + ).toBe("### ~~Don't~~ Forget Your GitHub Addresses!"); + }); + + test("capitalizes a bold-wrapped word mid-title", async () => { + expect( + await processMarkdown("## the **only** thing you'll ever need"), + ).toBe("## The **Only** Thing You'll Ever Need"); + }); + + // remark's stringifier normalizes `_em_` to `*em*`; the leading-letter + // capitalization is what matters here. + test("capitalizes an underscore-emphasized leading word", async () => { + expect(await processMarkdown("# _really_ important changes")).toBe( + "# *Really* Important Changes", + ); + }); + + test("capitalizes an asterisk-emphasized word", async () => { + expect(await processMarkdown("## a *very* good idea")).toBe( + "## A *Very* Good Idea", + ); + }); + + test("capitalizes a word adjacent to an em dash", async () => { + expect(await processMarkdown("## merging—your final boss")).toBe( + "## Merging—Your Final Boss", + ); + }); + + test("capitalizes a word adjacent to an en dash", async () => { + expect(await processMarkdown("## merging–your final boss")).toBe( + "## Merging–Your Final Boss", + ); }); }); diff --git a/remark-capitalize-titles/tests/vercel.test.ts b/remark-capitalize-titles/tests/vercel.test.ts new file mode 100644 index 0000000..4bb1d12 --- /dev/null +++ b/remark-capitalize-titles/tests/vercel.test.ts @@ -0,0 +1,107 @@ +import { describe, expect, test } from "vitest"; +import { remark } from "remark"; +import type { Compatible } from "vfile"; +import remarkCapitalizeTitles from "../index.ts"; + +const processMarkdown = async (value: Compatible) => { + const file = await remark().use(remarkCapitalizeTitles).process(value); + return file.toString().slice(0, -1); +}; + +// Test cases lifted verbatim from the upstream `title` package's own suite +// (vercel/title, test/index.test.js) so we can see exactly how our wrapper +// diverges from `title`'s standalone behavior. Some expected outputs have +// been adapted where our wrapper intentionally differs (e.g. +// `lowercaseHyphenatedTails` lowercases the second part of a hyphenated +// compound, so `Log-In` becomes `Log-in`). +describe("Imported from `title` package's test suite", () => { + const processWith = async (value: string, special: string[]) => { + const file = await remark() + .use(remarkCapitalizeTitles, { special, componentNames: [] }) + .process(value); + return file.toString().slice(0, -1); + }; + + test("capitalizes the first letter of relevant words", async () => { + expect(await processMarkdown("# capitalize your titles")).toBe( + "# Capitalize Your Titles", + ); + }); + + test("lowercases letters in addition to capitalizing them", async () => { + expect( + await processMarkdown("# updates TO hAndLinG of Failed paYMEnts"), + ).toBe("# Updates to Handling of Failed Payments"); + }); + + test("handles titles with special characters", async () => { + expect( + await processMarkdown("# seattle’S BEST coffee & grandma's cookies"), + ).toBe("# Seattle’s Best Coffee & Grandma's Cookies"); + }); + + test("understands Vercel product names (with explicit specials)", async () => { + expect( + await processWith("# noW deSktop and now cLI are prODUCts of zeIt", [ + "CLI", + "ZEIT", + ]), + ).toBe("# Now Desktop and Now CLI Are Products of ZEIT"); + }); + + test("handles Vercel product names with other special characters", async () => { + expect( + await processWith("# aPi 2.0: lOG-in with zeit, new dOCs & more", [ + "API", + "ZEIT", + ]), + // NOTE: upstream `title` produces "Log-In"; our wrapper applies + // AP-style hyphenated-tail lowercasing, so we expect "Log-in". + ).toBe("# API 2.0: Log-in with ZEIT, New Docs & More"); + + expect( + await processWith("# toWArds NEXT.JS 5: Introducing cANaRY Updates", [ + "Next.js", + ]), + ).toBe("# Towards Next.js 5: Introducing Canary Updates"); + }); + + test("modifies custom special words", async () => { + expect( + await processWith("# mY cusToM brand is awesome", ["BRAnD", "awesoMe"]), + ).toBe("# My Custom BRAnD Is awesoMe"); + + expect( + await processWith("# modify speCials like Facebook or microsoft", [ + "facebook", + "Microsoft", + ]), + ).toBe("# Modify Specials like facebook or Microsoft"); + }); + + test("capitalizes the last word regardless of syntax", async () => { + expect(await processMarkdown("# there and beyond")).toBe( + "# There and Beyond", + ); + + expect(await processMarkdown("# be careful what you wish for")).toBe( + "# Be Careful What You Wish For", + ); + + expect(await processWith("# XYZ: what is it good for", ["XYZ"])).toBe( + "# XYZ: What Is It Good For", + ); + }); + + test("supports international characters", async () => { + expect(await processMarkdown("# çeşme city")).toBe("# Çeşme City"); + expect(await processMarkdown("# la niña esta aquí")).toBe( + "# La Niña Esta Aquí", + ); + expect(await processMarkdown("# forhandlingsmøde")).toBe( + "# Forhandlingsmøde", + ); + expect(await processMarkdown("# đội")).toBe("# Đội"); + expect(await processMarkdown("# tuyển")).toBe("# Tuyển"); + }); +});