anno: support shard annotations in annotations-index endpoint
This commit is contained in:
@@ -1,23 +1,81 @@
|
||||
// src/pages/annotations-index.json.ts
|
||||
import type { APIRoute } from "astro";
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
import { parse as parseYAML } from "yaml";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import YAML from "yaml";
|
||||
|
||||
const CWD = process.cwd();
|
||||
const ANNO_DIR = path.join(CWD, "src", "annotations");
|
||||
const ANNO_ROOT = path.join(CWD, "src", "annotations");
|
||||
|
||||
// Strict en CI (ou override explicite)
|
||||
const STRICT =
|
||||
process.env.ANNOTATIONS_STRICT === "1" ||
|
||||
process.env.CI === "1" ||
|
||||
process.env.CI === "true";
|
||||
const isObj = (x: any) => !!x && typeof x === "object" && !Array.isArray(x);
|
||||
const isArr = (x: any) => Array.isArray(x);
|
||||
|
||||
async function exists(p: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
function normPath(s: string) {
|
||||
return String(s || "").replace(/\\/g, "/").replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
function paraNum(pid: string) {
|
||||
const m = String(pid).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||
}
|
||||
function toIso(v: any) {
|
||||
if (v instanceof Date) return v.toISOString();
|
||||
return typeof v === "string" ? v : "";
|
||||
}
|
||||
function stableSortByTs(arr: any[]) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(toIso(a?.ts)) || 0;
|
||||
const tb = Date.parse(toIso(b?.ts)) || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function keyMedia(x: any) { return String(x?.src || ""); }
|
||||
function keyRef(x: any) {
|
||||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||
}
|
||||
function keyComment(x: any) { return String(x?.text || "").trim(); }
|
||||
|
||||
function uniqUnion(dst: any[], src: any[], keyFn: (x:any)=>string) {
|
||||
const out = isArr(dst) ? [...dst] : [];
|
||||
const seen = new Set(out.map((x) => keyFn(x)));
|
||||
for (const it of (isArr(src) ? src : [])) {
|
||||
const k = keyFn(it);
|
||||
if (!k) continue;
|
||||
if (!seen.has(k)) { seen.add(k); out.push(it); }
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function deepMergeEntry(dst: any, src: any) {
|
||||
if (!isObj(dst) || !isObj(src)) return;
|
||||
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||
|
||||
if (isObj(v)) {
|
||||
if (!isObj(dst[k])) dst[k] = {};
|
||||
deepMergeEntry(dst[k], v);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isArr(v)) {
|
||||
const cur = isArr(dst[k]) ? dst[k] : [];
|
||||
const seen = new Set(cur.map((x:any) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
for (const it of v) {
|
||||
const s = JSON.stringify(it);
|
||||
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||
}
|
||||
dst[k] = out;
|
||||
continue;
|
||||
}
|
||||
|
||||
// scalar: set only if missing/empty
|
||||
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,154 +84,93 @@ async function walk(dir: string): Promise<string[]> {
|
||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const e of ents) {
|
||||
const p = path.join(dir, e.name);
|
||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
||||
else out.push(p);
|
||||
if (e.isDirectory()) out.push(...await walk(p));
|
||||
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function isPlainObject(x: unknown): x is Record<string, unknown> {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function normalizePageKey(s: unknown): string {
|
||||
return String(s ?? "")
|
||||
.replace(/^\/+/, "")
|
||||
.replace(/\/+$/, "")
|
||||
.trim();
|
||||
}
|
||||
|
||||
function inferPageKeyFromFile(inDirAbs: string, fileAbs: string): string {
|
||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
}
|
||||
|
||||
function parseDoc(raw: string, fileAbs: string): unknown {
|
||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
||||
return parseYAML(raw);
|
||||
}
|
||||
|
||||
function hardFailOrCollect(errors: string[], msg: string): void {
|
||||
if (STRICT) throw new Error(msg);
|
||||
errors.push(msg);
|
||||
}
|
||||
|
||||
function sanitizeEntry(
|
||||
fileRel: string,
|
||||
paraId: string,
|
||||
entry: unknown,
|
||||
errors: string[]
|
||||
): Record<string, unknown> {
|
||||
if (entry == null) return {};
|
||||
|
||||
if (!isPlainObject(entry)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: paras.${paraId} must be an object`);
|
||||
return {};
|
||||
}
|
||||
|
||||
const e: Record<string, unknown> = { ...entry };
|
||||
|
||||
const arrayFields = [
|
||||
"refs",
|
||||
"authors",
|
||||
"quotes",
|
||||
"media",
|
||||
"comments_editorial",
|
||||
] as const;
|
||||
|
||||
for (const k of arrayFields) {
|
||||
if (e[k] == null) continue;
|
||||
if (!Array.isArray(e[k])) {
|
||||
errors.push(`${fileRel}: paras.${paraId}.${k} must be an array (coerced to [])`);
|
||||
e[k] = [];
|
||||
}
|
||||
}
|
||||
|
||||
return e;
|
||||
function inferExpected(relNoExt: string) {
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const last = parts.at(-1) || "";
|
||||
const isShard = /^p-\d+-/i.test(last);
|
||||
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||
const paraId = isShard ? last : null;
|
||||
return { isShard, pageKey, paraId };
|
||||
}
|
||||
|
||||
export const GET: APIRoute = async () => {
|
||||
if (!(await exists(ANNO_DIR))) {
|
||||
const out = {
|
||||
schema: 1,
|
||||
generatedAt: new Date().toISOString(),
|
||||
pages: {},
|
||||
stats: { pages: 0, paras: 0, errors: 0 },
|
||||
errors: [] as string[],
|
||||
};
|
||||
const pages: Record<string, { paras: Record<string, any> }> = {};
|
||||
const errors: Array<{ file: string; error: string }> = [];
|
||||
|
||||
return new Response(JSON.stringify(out), {
|
||||
headers: {
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Cache-Control": "no-store",
|
||||
},
|
||||
});
|
||||
let files: string[] = [];
|
||||
try {
|
||||
files = await walk(ANNO_ROOT);
|
||||
} catch (e: any) {
|
||||
throw new Error(`Missing annotations root: ${ANNO_ROOT} (${e?.message || e})`);
|
||||
}
|
||||
|
||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
for (const fp of files) {
|
||||
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||
const { isShard, pageKey, paraId } = inferExpected(relNoExt);
|
||||
|
||||
const pages: Record<string, { paras: Record<string, Record<string, unknown>> }> =
|
||||
Object.create(null);
|
||||
|
||||
const errors: string[] = [];
|
||||
let paraCount = 0;
|
||||
|
||||
for (const f of files) {
|
||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||
const pageKey = normalizePageKey(inferPageKeyFromFile(ANNO_DIR, f));
|
||||
|
||||
if (!pageKey) {
|
||||
hardFailOrCollect(errors, `${fileRel}: cannot infer page key`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let doc: unknown;
|
||||
try {
|
||||
const raw = await fs.readFile(f, "utf8");
|
||||
doc = parseDoc(raw, f);
|
||||
} catch (e) {
|
||||
hardFailOrCollect(errors, `${fileRel}: parse failed: ${String((e as any)?.message ?? e)}`);
|
||||
continue;
|
||||
}
|
||||
const raw = await fs.readFile(fp, "utf8");
|
||||
const doc = YAML.parse(raw) || {};
|
||||
|
||||
if (!isPlainObject(doc) || (doc as any).schema !== 1) {
|
||||
hardFailOrCollect(errors, `${fileRel}: schema must be 1`);
|
||||
continue;
|
||||
}
|
||||
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||
|
||||
if ((doc as any).page != null) {
|
||||
const declared = normalizePageKey((doc as any).page);
|
||||
if (declared !== pageKey) {
|
||||
hardFailOrCollect(
|
||||
errors,
|
||||
`${fileRel}: page mismatch (page="${declared}" vs path="${pageKey}")`
|
||||
);
|
||||
const docPage = normPath(doc.page || "");
|
||||
if (docPage && docPage !== pageKey) {
|
||||
throw new Error(`page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
||||
}
|
||||
}
|
||||
if (!doc.page) doc.page = pageKey;
|
||||
|
||||
const parasAny = (doc as any).paras;
|
||||
if (!isPlainObject(parasAny)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: missing object key "paras"`);
|
||||
continue;
|
||||
}
|
||||
if (!isObj(doc.paras)) throw new Error(`missing object key "paras"`);
|
||||
|
||||
if (pages[pageKey]) {
|
||||
hardFailOrCollect(errors, `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
||||
continue;
|
||||
}
|
||||
const pg = pages[pageKey] ??= { paras: {} };
|
||||
|
||||
const parasOut: Record<string, Record<string, unknown>> = Object.create(null);
|
||||
if (isShard) {
|
||||
if (!paraId) throw new Error("internal: missing paraId");
|
||||
if (!(paraId in doc.paras)) {
|
||||
throw new Error(`shard mismatch: file must contain paras["${paraId}"]`);
|
||||
}
|
||||
const entry = doc.paras[paraId];
|
||||
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||
|
||||
for (const [paraId, entry] of Object.entries(parasAny)) {
|
||||
if (!/^p-\d+-/i.test(paraId)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: invalid para id "${paraId}"`);
|
||||
continue;
|
||||
stableSortByTs(pg.paras[paraId].media);
|
||||
stableSortByTs(pg.paras[paraId].refs);
|
||||
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||
} else {
|
||||
for (const [pid, entry] of Object.entries(doc.paras)) {
|
||||
const p = String(pid);
|
||||
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||
|
||||
stableSortByTs(pg.paras[p].media);
|
||||
stableSortByTs(pg.paras[p].refs);
|
||||
stableSortByTs(pg.paras[p].comments_editorial);
|
||||
}
|
||||
}
|
||||
parasOut[paraId] = sanitizeEntry(fileRel, paraId, entry, errors);
|
||||
} catch (e: any) {
|
||||
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||
}
|
||||
}
|
||||
|
||||
pages[pageKey] = { paras: parasOut };
|
||||
paraCount += Object.keys(parasOut).length;
|
||||
// sort paras
|
||||
for (const [pageKey, pg] of Object.entries(pages)) {
|
||||
const keys = Object.keys(pg.paras || {});
|
||||
keys.sort((a, b) => {
|
||||
const ia = paraNum(a);
|
||||
const ib = paraNum(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
const next: Record<string, any> = {};
|
||||
for (const k of keys) next[k] = pg.paras[k];
|
||||
pg.paras = next;
|
||||
}
|
||||
|
||||
const out = {
|
||||
@@ -182,16 +179,18 @@ export const GET: APIRoute = async () => {
|
||||
pages,
|
||||
stats: {
|
||||
pages: Object.keys(pages).length,
|
||||
paras: paraCount,
|
||||
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||
errors: errors.length,
|
||||
},
|
||||
errors,
|
||||
};
|
||||
|
||||
// 🔥 comportement “pro CI” : si erreurs => build fail
|
||||
if (errors.length) {
|
||||
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||
}
|
||||
|
||||
return new Response(JSON.stringify(out), {
|
||||
headers: {
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Cache-Control": "no-store",
|
||||
},
|
||||
headers: { "Content-Type": "application/json; charset=utf-8" },
|
||||
});
|
||||
};
|
||||
};
|
||||
Reference in New Issue
Block a user