Seed from NAS prod snapshot 20260130-190531
All checks were successful
CI / build-and-anchors (push) Successful in 1m25s
SMOKE / smoke (push) Successful in 11s
CI / build-and-anchors (pull_request) Successful in 1m20s

This commit is contained in:
archicratia
2026-01-31 10:51:38 +00:00
commit 60d88939b0
142 changed files with 33443 additions and 0 deletions

View File

@@ -0,0 +1 @@
[]

733
scripts/apply-ticket.mjs Normal file
View File

@@ -0,0 +1,733 @@
#!/usr/bin/env node
import fs from "node:fs/promises";
import path from "node:path";
import process from "node:process";
import { spawnSync } from "node:child_process";
/**
* apply-ticket — applique une proposition de correction depuis un ticket Gitea
*
* Conçu pour:
* - prendre un ticket [Correction]/[Fact-check] (issue) avec Chemin + Ancre + Proposition
* - retrouver le bon paragraphe dans le .mdx
* - remplacer proprement
* - optionnel: écrire un alias dancre old->new (build-time) dans src/anchors/anchor-aliases.json
* - optionnel: committer automatiquement
* - optionnel: fermer le ticket (après commit)
*/
function usage(exitCode = 0) {
console.log(`
apply-ticket — applique une proposition de correction depuis un ticket Gitea (robuste)
Usage:
node scripts/apply-ticket.mjs <issue_number> [--dry-run] [--no-build] [--alias] [--commit] [--close]
Flags:
--dry-run : ne modifie rien, affiche BEFORE/AFTER
--no-build : n'exécute pas "npm run build" (INCOMPATIBLE avec --alias)
--alias : après application, ajoute l'alias d'ancre (old -> new) dans src/anchors/anchor-aliases.json
--commit : git add + git commit automatiquement (inclut alias si --alias)
--close : ferme automatiquement le ticket après commit (+ commentaire avec SHA)
Env (recommandé):
FORGE_API = base API (LAN) ex: http://192.168.1.20:3000
FORGE_BASE = base web ex: https://gitea.xxx.tld (fallback si FORGE_API absent)
FORGE_TOKEN = PAT (accès repo + issues)
GITEA_OWNER = owner (optionnel si auto-détecté depuis git remote)
GITEA_REPO = repo (optionnel si auto-détecté depuis git remote)
Notes:
- Si dist/<chemin>/index.html est absent, le script lance "npm run build" sauf si --no-build.
- Sauvegarde automatique: <fichier>.bak.issue-<N> (uniquement si on écrit)
- Avec --alias : le script rebuild pour identifier le NOUVEL id, puis écrit l'alias old->new.
- Refuse automatiquement les Pull Requests (PR) : ce ne sont pas des tickets éditoriaux.
`);
process.exit(exitCode);
}
const argv = process.argv.slice(2);
if (argv.length === 0 || argv.includes("--help") || argv.includes("-h")) usage(0);
const issueNum = Number(argv[0]);
if (!Number.isFinite(issueNum) || issueNum <= 0) {
console.error("❌ Numéro de ticket invalide.");
usage(1);
}
const DRY_RUN = argv.includes("--dry-run");
const NO_BUILD = argv.includes("--no-build");
const DO_ALIAS = argv.includes("--alias");
const DO_COMMIT = argv.includes("--commit");
const DO_CLOSE = argv.includes("--close");
if (DO_ALIAS && NO_BUILD) {
console.error("❌ --alias est incompatible avec --no-build (risque d'alias faux).");
console.error("➡️ Relance sans --no-build.");
process.exit(1);
}
if (DRY_RUN && (DO_ALIAS || DO_COMMIT || DO_CLOSE)) {
console.warn(" --dry-run : --alias/--commit/--close sont ignorés (aucune écriture).");
}
if (DO_CLOSE && DRY_RUN) {
console.error("❌ --close est incompatible avec --dry-run.");
process.exit(1);
}
if (DO_CLOSE && !DO_COMMIT) {
console.error("❌ --close nécessite --commit (on ne ferme jamais un ticket sans commit).");
process.exit(1);
}
if (typeof fetch !== "function") {
console.error("❌ fetch() indisponible dans ce Node. Utilise Node 18+ (ou plus).");
process.exit(1);
}
const CWD = process.cwd();
const CONTENT_ROOT = path.join(CWD, "src", "content");
const DIST_ROOT = path.join(CWD, "dist");
const ALIASES_FILE = path.join(CWD, "src", "anchors", "anchor-aliases.json");
/* -------------------------- utils texte / matching -------------------------- */
function normalizeText(s) {
return String(s ?? "")
.normalize("NFKD")
.replace(/\p{Diacritic}/gu, "")
.replace(/[]/g, "'")
.replace(/[“”]/g, '"')
.replace(/[–—]/g, "-")
.replace(/…/g, "...")
.replace(/\s+/g, " ")
.trim()
.toLowerCase();
}
// stripping très pragmatique
function stripMd(mdx) {
let s = String(mdx ?? "");
s = s.replace(/`[^`]*`/g, " "); // inline code
s = s.replace(/!\[[^\]]*\]\([^)]+\)/g, " "); // images
s = s.replace(/\[[^\]]*\]\([^)]+\)/g, " "); // links
s = s.replace(/[*_~]/g, " "); // emphasis-ish
s = s.replace(/<[^>]+>/g, " "); // html tags
s = s.replace(/\s+/g, " ").trim();
return s;
}
function tokenize(s) {
const n = normalizeText(stripMd(s));
return n
.replace(/[^a-z0-9'\- ]+/g, " ")
.split(" ")
.filter((w) => w.length >= 4);
}
function scoreText(candidate, targetText) {
const tgt = tokenize(targetText);
const blk = tokenize(candidate);
if (!tgt.length || !blk.length) return 0;
const tgtSet = new Set(tgt);
const blkSet = new Set(blk);
let hit = 0;
for (const w of tgtSet) if (blkSet.has(w)) hit++;
// Bonus si un long préfixe ressemble
const tgtNorm = normalizeText(stripMd(targetText));
const blkNorm = normalizeText(stripMd(candidate));
const prefix = tgtNorm.slice(0, Math.min(180, tgtNorm.length));
const prefixBonus = prefix && blkNorm.includes(prefix) ? 1000 : 0;
// Ratio bonus (0..100)
const ratio = hit / Math.max(1, tgtSet.size);
const ratioBonus = Math.round(ratio * 100);
return prefixBonus + hit + ratioBonus;
}
function bestBlockMatchIndex(blocks, targetText) {
let best = { i: -1, score: -1 };
for (let i = 0; i < blocks.length; i++) {
const sc = scoreText(blocks[i], targetText);
if (sc > best.score) best = { i, score: sc };
}
return best;
}
function splitParagraphBlocks(mdxText) {
const raw = String(mdxText ?? "").replace(/\r\n/g, "\n");
return raw.split(/\n{2,}/);
}
function isLikelyExcerpt(s) {
const t = String(s || "").trim();
if (!t) return true;
if (t.length < 120) return true;
if (/[.…]$/.test(t)) return true;
if (normalizeText(t).includes("tronqu")) return true;
return false;
}
/* ------------------------------ utils système ------------------------------ */
function run(cmd, args, opts = {}) {
const r = spawnSync(cmd, args, { stdio: "inherit", ...opts });
if (r.error) throw r.error;
if (r.status !== 0) throw new Error(`Command failed: ${cmd} ${args.join(" ")}`);
}
function runQuiet(cmd, args, opts = {}) {
const r = spawnSync(cmd, args, { encoding: "utf8", stdio: "pipe", ...opts });
if (r.error) throw r.error;
if (r.status !== 0) {
const out = (r.stdout || "") + (r.stderr || "");
throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${out}`);
}
return r.stdout || "";
}
async function fileExists(p) {
try {
await fs.access(p);
return true;
} catch {
return false;
}
}
function getEnv(name, fallback = "") {
return (process.env[name] ?? fallback).trim();
}
function inferOwnerRepoFromGit() {
const r = spawnSync("git", ["remote", "get-url", "origin"], { encoding: "utf-8" });
if (r.status !== 0) return null;
const u = (r.stdout || "").trim();
const m = u.match(/[:/](?<owner>[^/]+)\/(?<repo>[^/]+?)(?:\.git)?$/);
if (!m?.groups) return null;
return { owner: m.groups.owner, repo: m.groups.repo };
}
function gitHasStagedChanges() {
const r = spawnSync("git", ["diff", "--cached", "--quiet"]);
return r.status === 1;
}
/* ------------------------------ parsing ticket ----------------------------- */
function escapeRegExp(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function pickLine(body, key) {
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = String(body || "").match(re);
return m ? m[1].trim() : "";
}
function pickHeadingValue(body, headingKey) {
const re = new RegExp(
`^##\\s*${escapeRegExp(headingKey)}[^\\n]*\\n([\\s\\S]*?)(?=\\n##\\s|\\n\\s*$)`,
"mi"
);
const m = String(body || "").match(re);
if (!m) return "";
const lines = m[1].split(/\r?\n/).map((l) => l.trim());
for (const l of lines) {
if (!l) continue;
if (l.startsWith("<!--")) continue;
return l.replace(/^\/?/, "/").trim();
}
return "";
}
function pickSection(body, markers) {
const text = String(body || "").replace(/\r\n/g, "\n");
const idx = markers
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
.filter((x) => x.i >= 0)
.sort((a, b) => a.i - b.i)[0];
if (!idx) return "";
const start = idx.i + idx.m.length;
const tail = text.slice(start);
const stops = [
"\n## ",
"\nJustification",
"\n---",
"\n## Justification",
"\n## Sources",
"\nProblème identifié",
"\nSources proposées",
"\n## Proposition",
"\n## Problème",
];
let end = tail.length;
for (const s of stops) {
const j = tail.toLowerCase().indexOf(s.toLowerCase());
if (j >= 0 && j < end) end = j;
}
return tail.slice(0, end).trim();
}
function unquoteBlock(s) {
return String(s ?? "")
.split(/\r?\n/)
.map((l) => l.replace(/^\s*>\s?/, ""))
.join("\n")
.trim();
}
function normalizeChemin(chemin) {
let c = String(chemin || "").trim();
if (!c) return "";
if (!c.startsWith("/")) c = "/" + c;
if (!c.endsWith("/")) c = c + "/";
return c;
}
function extractAnchorIdAnywhere(text) {
const s = String(text || "");
const m = s.match(/#?(p-\d+-[0-9a-f]{8})/i);
return m ? m[1] : "";
}
function extractCheminFromAnyUrl(text) {
const s = String(text || "");
// Exemple: http://localhost:4321/archicratie/prologue/#p-3-xxxx
// ou: /archicratie/prologue/#p-3-xxxx
const m = s.match(/(\/[a-z0-9\-]+\/[a-z0-9\-\/]+\/)#p-\d+-[0-9a-f]{8}/i);
return m ? m[1] : "";
}
/* --------------------------- lecture HTML paragraphe ------------------------ */
function cleanHtmlInner(inner) {
let s = String(inner ?? "");
s = s.replace(
/<span[^>]*class=["'][^"']*para-tools[^"']*["'][^>]*>[\s\S]*?<\/span>/gi,
" "
);
s = s.replace(/<[^>]+>/g, " ");
s = s.replace(/\s+/g, " ").trim();
s = s.replace(/\b(¶|Citer|Proposer|Copié)\b/gi, "").replace(/\s+/g, " ").trim();
return s;
}
async function readHtmlParagraphText(htmlPath, anchorId) {
const html = await fs.readFile(htmlPath, "utf-8");
const re = new RegExp(
`<p[^>]*\\bid=["']${escapeRegExp(anchorId)}["'][^>]*>([\\s\\S]*?)<\\/p>`,
"i"
);
const m = html.match(re);
if (!m) return "";
return cleanHtmlInner(m[1]);
}
async function readAllHtmlParagraphs(htmlPath) {
const html = await fs.readFile(htmlPath, "utf-8");
const out = [];
const re = /<p\b[^>]*\sid=["'](p-\d+-[0-9a-f]{8})["'][^>]*>([\s\S]*?)<\/p>/gi;
let m;
while ((m = re.exec(html))) {
out.push({ id: m[1], text: cleanHtmlInner(m[2]) });
}
return out;
}
/* --------------------------- localisation fichier contenu ------------------- */
async function findContentFileFromChemin(chemin) {
const clean = normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
const parts = clean.split("/").filter(Boolean);
if (parts.length < 2) return null;
const collection = parts[0];
const slugPath = parts.slice(1).join("/");
const root = path.join(CONTENT_ROOT, collection);
if (!(await fileExists(root))) return null;
const exts = [".mdx", ".md"];
async function walk(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
for (const e of entries) {
const full = path.join(dir, e.name);
if (e.isDirectory()) {
const r = await walk(full);
if (r) return r;
} else {
for (const ext of exts) {
if (e.name.endsWith(ext)) {
const rel = path.relative(root, full).replace(/\\/g, "/");
const relNoExt = rel.slice(0, -ext.length);
if (relNoExt === slugPath) return full;
}
}
}
}
return null;
}
return await walk(root);
}
/* -------------------------------- build helper ----------------------------- */
async function ensureBuildIfNeeded(distHtmlPath) {
if (NO_BUILD) return;
if (await fileExists(distHtmlPath)) return;
console.log(" dist manquant pour cette page → build (npm run build) …");
run("npm", ["run", "build"], { cwd: CWD });
if (!(await fileExists(distHtmlPath))) {
throw new Error(`dist toujours introuvable après build: ${distHtmlPath}`);
}
}
/* ----------------------------- API Gitea helpers --------------------------- */
async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
const url = `${forgeApiBase.replace(/\/+$/, "")}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
const res = await fetch(url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/json",
"User-Agent": "archicratie-apply-ticket/2.0",
},
});
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`HTTP ${res.status} fetching issue: ${url}\n${t}`);
}
return await res.json();
}
async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment }) {
const base = forgeApiBase.replace(/\/+$/, "");
const headers = {
Authorization: `token ${token}`,
Accept: "application/json",
"Content-Type": "application/json",
"User-Agent": "archicratie-apply-ticket/2.0",
};
if (comment) {
const urlC = `${base}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/comments`;
await fetch(urlC, { method: "POST", headers, body: JSON.stringify({ body: comment }) });
}
const url = `${base}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
const res = await fetch(url, { method: "PATCH", headers, body: JSON.stringify({ state: "closed" }) });
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`HTTP ${res.status} closing issue: ${url}\n${t}`);
}
}
/* ------------------------------ Aliases helpers ---------------------------- */
async function loadAliases() {
try {
const s = await fs.readFile(ALIASES_FILE, "utf8");
const obj = JSON.parse(s);
return obj && typeof obj === "object" ? obj : {};
} catch {
return {};
}
}
function sortObjectKeys(obj) {
return Object.fromEntries(Object.keys(obj).sort().map((k) => [k, obj[k]]));
}
async function saveAliases(obj) {
let out = obj || {};
for (const k of Object.keys(out)) {
if (out[k] && typeof out[k] === "object") out[k] = sortObjectKeys(out[k]);
}
out = sortObjectKeys(out);
await fs.mkdir(path.dirname(ALIASES_FILE), { recursive: true });
await fs.writeFile(ALIASES_FILE, JSON.stringify(out, null, 2) + "\n", "utf8");
}
async function upsertAlias({ chemin, oldId, newId }) {
const route = normalizeChemin(chemin);
if (!oldId || !newId) throw new Error("Alias: oldId/newId requis");
if (oldId === newId) return { changed: false, reason: "same" };
const data = await loadAliases();
if (!data[route]) data[route] = {};
const prev = data[route][oldId];
if (prev && prev !== newId) {
throw new Error(
`Alias conflict: ${route}${oldId} already mapped to ${prev} (new=${newId})`
);
}
if (prev === newId) return { changed: false, reason: "already" };
data[route][oldId] = newId;
await saveAliases(data);
return { changed: true, reason: "written" };
}
async function computeNewIdFromDistByContent(distHtmlPath, afterBlock) {
const paras = await readAllHtmlParagraphs(distHtmlPath);
if (!paras.length) throw new Error(`Aucun <p id="p-..."> trouvé dans ${distHtmlPath}`);
let best = { id: null, score: -1 };
const target = stripMd(afterBlock).slice(0, 1200);
for (const p of paras) {
const sc = scoreText(p.text, target);
if (sc > best.score) best = { id: p.id, score: sc };
}
if (!best.id || best.score < 60) {
throw new Error(
`Impossible d'identifier le nouvel id dans dist (score trop faible: ${best.score}).\n` +
`➡️ Vérifie que la proposition correspond bien à UN paragraphe.`
);
}
return best.id;
}
/* ----------------------------------- MAIN ---------------------------------- */
async function main() {
const token = getEnv("FORGE_TOKEN");
if (!token) {
console.error("❌ FORGE_TOKEN manquant (PAT). Ex: export FORGE_TOKEN='...'");
process.exit(1);
}
const inferred = inferOwnerRepoFromGit() || {};
const owner = getEnv("GITEA_OWNER", inferred.owner || "");
const repo = getEnv("GITEA_REPO", inferred.repo || "");
if (!owner || !repo) {
console.error("❌ Impossible de déterminer owner/repo. Fix: export GITEA_OWNER=... GITEA_REPO=...");
process.exit(1);
}
const forgeApiBase = getEnv("FORGE_API") || getEnv("FORGE_BASE");
if (!forgeApiBase) {
console.error("❌ FORGE_API ou FORGE_BASE manquant. Ex: export FORGE_API='http://192.168.1.20:3000'");
process.exit(1);
}
console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo}`);
const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum });
// Guard PR (Pull Request = "Demande d'ajout" = pas un ticket éditorial)
if (issue?.pull_request) {
console.error(`❌ #${issueNum} est une Pull Request (demande dajout), pas un ticket éditorial.`);
console.error(`➡️ Ouvre un ticket [Correction]/[Fact-check] depuis le site (Proposer), puis relance apply-ticket sur ce numéro.`);
process.exit(2);
}
const body = String(issue.body || "").replace(/\r\n/g, "\n");
const title = String(issue.title || "");
let chemin =
pickLine(body, "Chemin") ||
pickHeadingValue(body, "Chemin") ||
extractCheminFromAnyUrl(body) ||
extractCheminFromAnyUrl(title);
let ancre =
pickLine(body, "Ancre") ||
pickHeadingValue(body, "Ancre paragraphe") ||
pickHeadingValue(body, "Ancre");
ancre = (ancre || "").trim();
if (ancre.startsWith("#")) ancre = ancre.slice(1);
// fallback si ticket mal formé
if (!ancre) ancre = extractAnchorIdAnywhere(title) || extractAnchorIdAnywhere(body);
chemin = normalizeChemin(chemin);
const currentFull = pickSection(body, [
"Texte actuel (copie exacte du paragraphe)",
"## Texte actuel (copie exacte du paragraphe)",
]);
const currentEx = pickSection(body, [
"Texte actuel (extrait)",
"## Assertion / passage à vérifier",
"Assertion / passage à vérifier",
]);
const texteActuel = unquoteBlock(currentFull || currentEx);
const prop1 = pickSection(body, [
"Proposition (texte corrigé complet)",
"## Proposition (texte corrigé complet)",
]);
const prop2 = pickSection(body, [
"Proposition (remplacer par):",
"## Proposition (remplacer par)",
]);
const proposition = (prop1 || prop2).trim();
if (!chemin) throw new Error("Ticket: Chemin introuvable dans le body.");
if (!ancre) throw new Error("Ticket: Ancre introuvable dans le body.");
if (!proposition) throw new Error("Ticket: Proposition introuvable dans le body.");
console.log("✅ Parsed:", { chemin, ancre: `#${ancre}`, hasTexteActuel: Boolean(texteActuel) });
const contentFile = await findContentFileFromChemin(chemin);
if (!contentFile) throw new Error(`Fichier contenu introuvable pour Chemin=${chemin}`);
console.log(`📄 Target content file: ${path.relative(CWD, contentFile)}`);
const distHtmlPath = path.join(DIST_ROOT, chemin.replace(/^\/+|\/+$/g, ""), "index.html");
await ensureBuildIfNeeded(distHtmlPath);
// Texte cible: préférence au texte complet (ticket), sinon dist si extrait probable
let targetText = texteActuel;
let distText = "";
if (await fileExists(distHtmlPath)) {
distText = await readHtmlParagraphText(distHtmlPath, ancre);
}
if (!targetText && distText) targetText = distText;
if (targetText && distText && isLikelyExcerpt(targetText) && distText.length > targetText.length) {
targetText = distText;
}
if (!targetText) {
throw new Error("Impossible de reconstruire le texte du paragraphe (ni texte actuel, ni dist html).");
}
const original = await fs.readFile(contentFile, "utf-8");
const blocks = splitParagraphBlocks(original);
const best = bestBlockMatchIndex(blocks, targetText);
// seuil de sécurité
if (best.i < 0 || best.score < 40) {
console.error("❌ Match trop faible: je refuse de remplacer automatiquement.");
console.error(`➡️ Score=${best.score}. Recommandation: ticket avec 'Texte actuel (copie exacte du paragraphe)'.`);
const ranked = blocks
.map((b, i) => ({ i, score: scoreText(b, targetText), excerpt: stripMd(b).slice(0, 140) }))
.sort((a, b) => b.score - a.score)
.slice(0, 5);
console.error("Top candidates:");
for (const r of ranked) {
console.error(` #${r.i + 1} score=${r.score} ${r.excerpt}${r.excerpt.length >= 140 ? "…" : ""}`);
}
process.exit(2);
}
const beforeBlock = blocks[best.i];
const afterBlock = proposition.trim();
const nextBlocks = blocks.slice();
nextBlocks[best.i] = afterBlock;
const updated = nextBlocks.join("\n\n");
console.log(`🧩 Matched block #${best.i + 1}/${blocks.length} score=${best.score}`);
if (DRY_RUN) {
console.log("\n--- DRY RUN (no write, no backup) ---\n");
console.log("=== BEFORE (excerpt) ===");
console.log(beforeBlock.slice(0, 400) + (beforeBlock.length > 400 ? "…" : ""));
console.log("\n=== AFTER (excerpt) ===");
console.log(afterBlock.slice(0, 400) + (afterBlock.length > 400 ? "…" : ""));
console.log("\n✅ Dry-run terminé.");
return;
}
// backup uniquement si on écrit
const bakPath = `${contentFile}.bak.issue-${issueNum}`;
if (!(await fileExists(bakPath))) {
await fs.writeFile(bakPath, original, "utf-8");
}
await fs.writeFile(contentFile, updated, "utf-8");
console.log("✅ Applied.");
let aliasChanged = false;
let newId = null;
if (DO_ALIAS) {
console.log("🔁 Rebuild to compute new anchor ids (npm run build) …");
run("npm", ["run", "build"], { cwd: CWD });
if (!(await fileExists(distHtmlPath))) {
throw new Error(`dist introuvable après build: ${distHtmlPath}`);
}
newId = await computeNewIdFromDistByContent(distHtmlPath, afterBlock);
const res = await upsertAlias({ chemin, oldId: ancre, newId });
aliasChanged = res.changed;
if (aliasChanged) {
console.log(`✅ Alias ajouté: ${chemin} ${ancre} -> ${newId}`);
// MàJ dist sans rebuild complet (inject seulement)
run("node", ["scripts/inject-anchor-aliases.mjs"], { cwd: CWD });
} else {
console.log(` Alias déjà présent ou inutile (${ancre} -> ${newId}).`);
}
// garde-fous rapides
run("npm", ["run", "test:anchors"], { cwd: CWD });
run("node", ["scripts/check-inline-js.mjs"], { cwd: CWD });
}
if (DO_COMMIT) {
const files = [path.relative(CWD, contentFile)];
if (DO_ALIAS && aliasChanged) files.push(path.relative(CWD, ALIASES_FILE));
run("git", ["add", ...files], { cwd: CWD });
if (!gitHasStagedChanges()) {
console.log(" Nothing to commit (aucun changement staged).");
return;
}
const msg = `edit: apply ticket #${issueNum} (${chemin}#${ancre})`;
run("git", ["commit", "-m", msg], { cwd: CWD });
const sha = runQuiet("git", ["rev-parse", "--short", "HEAD"], { cwd: CWD }).trim();
console.log(`✅ Committed: ${msg} (${sha})`);
if (DO_CLOSE) {
const comment = `✅ Appliqué par apply-ticket.\nCommit: ${sha}`;
await closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment });
console.log(`✅ Ticket #${issueNum} fermé.`);
}
return;
}
// mode manuel
console.log("Next (manuel) :");
console.log(` git diff -- ${path.relative(CWD, contentFile)}`);
console.log(
` git add ${path.relative(CWD, contentFile)}${
DO_ALIAS ? " src/anchors/anchor-aliases.json" : ""
}`
);
console.log(` git commit -m "edit: apply ticket #${issueNum} (${chemin}#${ancre})"`);
if (DO_CLOSE) {
console.log(" (puis relance avec --commit --close pour fermer automatiquement)");
}
}
main().catch((e) => {
console.error("💥", e?.message || e);
process.exit(1);
});

85
scripts/audit-dist.mjs Normal file
View File

@@ -0,0 +1,85 @@
#!/usr/bin/env node
import fs from "node:fs/promises";
import path from "node:path";
const DIST = path.join(process.cwd(), "dist");
function stripNonDomRegions(html) {
// Évite les faux positifs (JS/CSS/COMMENTS/TEMPLATE)
return html
.replace(/<!--[\s\S]*?-->/g, "") // ✅ NEW: commentaires HTML
.replace(/<script\b[^>]*>[\s\S]*?<\/script>/gi, "")
.replace(/<style\b[^>]*>[\s\S]*?<\/style>/gi, "")
.replace(/<template\b[^>]*>[\s\S]*?<\/template>/gi, ""); // ✅ NEW: templates
}
function extractRealIds(html) {
const cleaned = stripNonDomRegions(html);
// - exige un ESPACE avant id => match " id=" mais PAS "data-id="
// - cherche dans les tags seulement
const re = /<[^>]*\sid\s*=\s*(["'])(.*?)\1/gi;
const ids = [];
let m;
while ((m = re.exec(cleaned))) {
const id = (m[2] || "").trim();
if (id) ids.push(id);
}
return ids;
}
async function listHtmlFiles(dir) {
const out = [];
async function walk(d) {
const entries = await fs.readdir(d, { withFileTypes: true });
for (const e of entries) {
const p = path.join(d, e.name);
if (e.isDirectory()) await walk(p);
else if (e.isFile() && p.endsWith(".html")) out.push(p);
}
}
await walk(dir);
return out;
}
function countDuplicates(ids) {
const map = new Map();
for (const id of ids) map.set(id, (map.get(id) || 0) + 1);
const dups = [...map.entries()].filter(([, c]) => c > 1);
dups.sort((a, b) => b[1] - a[1] || a[0].localeCompare(b[0]));
return dups;
}
let failures = 0;
let files = 0;
let htmlFiles = [];
try {
htmlFiles = await listHtmlFiles(DIST);
} catch {
console.error(`❌ dist introuvable: ${DIST} (as-tu fait 'npm run build' ?)`);
process.exit(1);
}
for (const file of htmlFiles) {
files++;
const html = await fs.readFile(file, "utf8");
const ids = extractRealIds(html);
const dups = countDuplicates(ids);
if (dups.length) {
failures++;
console.error(`❌ DUP IDS: ${file}`);
for (const [id, c] of dups) {
console.error(` - ${id} x${c}`);
}
}
}
if (failures) {
console.error(`\n❌ audit-dist FAILED: files=${files} failures=${failures}`);
process.exit(1);
}
console.log(`✅ audit-dist OK: files=${files}`);

View File

@@ -0,0 +1,63 @@
import fs from "node:fs";
import path from "node:path";
const ALIASES_PATH = path.join(process.cwd(), "src", "anchors", "anchor-aliases.json");
if (!fs.existsSync(ALIASES_PATH)) {
console.log(" Aucun fichier d'aliases (src/anchors/anchor-aliases.json). Skip.");
process.exit(0);
}
let data;
try {
data = JSON.parse(fs.readFileSync(ALIASES_PATH, "utf8"));
} catch (e) {
console.error("❌ JSON invalide dans src/anchors/anchor-aliases.json");
console.error(e?.message || e);
process.exit(1);
}
if (!data || typeof data !== "object" || Array.isArray(data)) {
console.error("❌ Le JSON doit être un objet { route: { oldId: newId } }");
process.exit(1);
}
let pages = 0;
let aliases = 0;
for (const [route, mapping] of Object.entries(data)) {
pages++;
if (typeof route !== "string" || !route.trim()) {
console.error("❌ Route invalide (clé): doit être une string non vide", { route });
process.exit(1);
}
// Optionnel mais sain : routes de type "/xxx/yyy/"
if (!route.startsWith("/") || !route.endsWith("/")) {
console.error("❌ Route invalide: doit commencer et finir par '/'", { route });
process.exit(1);
}
if (!mapping || typeof mapping !== "object" || Array.isArray(mapping)) {
console.error("❌ Mapping invalide: doit être un objet { oldId: newId }", { route });
process.exit(1);
}
for (const [oldId, newId] of Object.entries(mapping)) {
if (typeof oldId !== "string" || typeof newId !== "string") {
console.error("❌ oldId/newId doivent être des strings", { route, oldId, newId });
process.exit(1);
}
if (!oldId.trim() || !newId.trim()) {
console.error("❌ oldId/newId ne doivent pas être vides", { route, oldId, newId });
process.exit(1);
}
if (oldId === newId) {
console.error("❌ oldId doit différer de newId", { route, oldId });
process.exit(1);
}
aliases++;
}
}
console.log(`✅ anchor-aliases.json OK: pages=${pages} aliases=${aliases}`);

195
scripts/check-anchors.mjs Normal file
View File

@@ -0,0 +1,195 @@
#!/usr/bin/env node
import fs from "node:fs/promises";
import fssync from "node:fs";
import path from "node:path";
const args = new Set(process.argv.slice(2));
const getArg = (name, fallback = null) => {
const i = process.argv.indexOf(name);
if (i >= 0 && process.argv[i + 1]) return process.argv[i + 1];
return fallback;
};
const DIST_DIR = getArg("--dist", "dist");
const BASELINE = getArg("--baseline", path.join("tests", "anchors-baseline.json"));
const UPDATE = args.has("--update");
// Ex: 0.2 => 20%
const THRESHOLD = Number(getArg("--threshold", process.env.ANCHORS_THRESHOLD ?? "0.2"));
const MIN_PREV = Number(getArg("--min-prev", process.env.ANCHORS_MIN_PREV ?? "10"));
const pct = (x) => (Math.round(x * 1000) / 10).toFixed(1) + "%";
async function walk(dir) {
const out = [];
const entries = await fs.readdir(dir, { withFileTypes: true });
for (const ent of entries) {
const p = path.join(dir, ent.name);
if (ent.isDirectory()) out.push(...(await walk(p)));
else if (ent.isFile() && ent.name.endsWith(".html")) out.push(p);
}
return out;
}
// Contrat :
// - paragraphes citables : .reading p[id^="p-"]
// - alias web-natifs : .reading span.para-alias[id^="p-"]
function extractIds(html) {
if (!html.includes('class="reading"')) return [];
const ids = [];
let m;
// 1) IDs principaux (paragraphes)
const reP = /<p\b[^>]*\sid="(p-[^"]+)"/g;
while ((m = reP.exec(html))) ids.push(m[1]);
// 2) IDs alias (spans injectés)
// cas A : id="..." avant class="...para-alias..."
const reA1 = /<span\b[^>]*\bid="(p-[^"]+)"[^>]*\bclass="[^"]*\bpara-alias\b[^"]*"/g;
while ((m = reA1.exec(html))) ids.push(m[1]);
// cas B : class="...para-alias..." avant id="..."
const reA2 = /<span\b[^>]*\bclass="[^"]*\bpara-alias\b[^"]*"[^>]*\bid="(p-[^"]+)"/g;
while ((m = reA2.exec(html))) ids.push(m[1]);
// Dé-doublonnage (on garde un ordre stable)
const seen = new Set();
const uniq = [];
for (const id of ids) {
if (seen.has(id)) continue;
seen.add(id);
uniq.push(id);
}
return uniq;
}
function loadAllowMissing() {
const p = path.resolve("scripts/anchors-allow-missing.json");
if (!fssync.existsSync(p)) return new Set();
const raw = fssync.readFileSync(p, "utf8").trim();
if (!raw) return new Set();
const arr = JSON.parse(raw);
if (!Array.isArray(arr)) throw new Error("anchors-allow-missing.json must be an array");
return new Set(arr.map(String));
}
const ALLOW_MISSING = loadAllowMissing();
async function buildSnapshot() {
const absDist = path.resolve(DIST_DIR);
const files = await walk(absDist);
const snap = {};
for (const f of files) {
const rel = path.relative(absDist, f).replace(/\\/g, "/");
const html = await fs.readFile(f, "utf8");
const ids = extractIds(html);
if (ids.length === 0) continue;
snap[rel] = ids;
}
// ordre stable
const ordered = {};
for (const k of Object.keys(snap).sort()) ordered[k] = snap[k];
return ordered;
}
async function readJson(p) {
const s = await fs.readFile(p, "utf8");
return JSON.parse(s);
}
async function writeJson(p, obj) {
await fs.mkdir(path.dirname(p), { recursive: true });
await fs.writeFile(p, JSON.stringify(obj, null, 2) + "\n", "utf8");
}
function diffPage(prevIds, curIds) {
const prev = new Set(prevIds);
const cur = new Set(curIds);
const added = curIds.filter((x) => !prev.has(x));
const removed = prevIds.filter((x) => !cur.has(x));
return { added, removed };
}
(async () => {
const snap = await buildSnapshot();
if (UPDATE) {
await writeJson(BASELINE, snap);
const pages = Object.keys(snap).length;
const total = Object.values(snap).reduce((a, xs) => a + xs.length, 0);
console.log(`OK baseline updated -> ${BASELINE}`);
console.log(`Pages: ${pages}, Total paragraph IDs: ${total}`);
process.exit(0);
}
let base;
try {
base = await readJson(BASELINE);
} catch {
console.error(`Baseline missing: ${BASELINE}`);
console.error(`Run: node scripts/check-anchors.mjs --update`);
process.exit(2);
}
const allPages = new Set([...Object.keys(base), ...Object.keys(snap)]);
const pages = Array.from(allPages).sort();
let failed = false;
let changedPages = 0;
for (const p of pages) {
const prevIds = base[p] || null;
const curIds = snap[p] || null;
// page nouvelle
if (!prevIds && curIds) {
console.log(`+ PAGE ${p} (new) ids=${curIds.length}`);
continue;
}
// page supprimée
if (prevIds && !curIds) {
if (ALLOW_MISSING.has(p)) {
console.log(`~ PAGE ${p} (missing now) ✅ allowed prevIds=${prevIds.length}`);
continue;
}
console.log(`- PAGE ${p} (missing now) prevIds=${prevIds.length}`);
failed = true;
continue;
}
// (sécurité) rien avant / rien maintenant
if (!prevIds && !curIds) continue;
const { added, removed } = diffPage(prevIds, curIds);
if (added.length === 0 && removed.length === 0) continue;
changedPages += 1;
const prevN = prevIds.length || 1;
const churn = (added.length + removed.length) / prevN;
const removedRatio = removed.length / prevN;
console.log(
`~ ${p} prev=${prevIds.length} now=${curIds.length}` +
` +${added.length} -${removed.length} churn=${pct(churn)}`
);
if (removed.length) {
console.log(` removed: ${removed.slice(0, 20).join(", ")}${removed.length > 20 ? " …" : ""}`);
}
if (prevIds.length >= MIN_PREV && churn > THRESHOLD) failed = true;
if (prevIds.length >= MIN_PREV && removedRatio > THRESHOLD) failed = true;
}
console.log(`\nSummary: pages compared=${pages.length}, pages changed=${changedPages}`);
if (failed) {
console.error(`FAIL: anchor churn above threshold (threshold=${pct(THRESHOLD)} minPrev=${MIN_PREV})`);
process.exit(1);
}
console.log("OK: anchors stable within threshold");
})();

View File

@@ -0,0 +1,70 @@
#!/usr/bin/env node
import fs from "node:fs/promises";
import path from "node:path";
import os from "node:os";
import { spawnSync } from "node:child_process";
const ROOT = process.cwd();
const SRC = path.join(ROOT, "src");
async function* walk(dir) {
const entries = await fs.readdir(dir, { withFileTypes: true });
for (const e of entries) {
const full = path.join(dir, e.name);
if (e.isDirectory()) yield* walk(full);
else yield full;
}
}
function extractInlineScripts(astroText) {
// capture <script ... is:inline ...> ... </script>
const re = /<script\b[^>]*\bis:inline\b[^>]*>([\s\S]*?)<\/script>/gi;
const out = [];
let m;
while ((m = re.exec(astroText))) out.push(m[1] ?? "");
return out;
}
function checkSyntax(js, label) {
const tmp = path.join(os.tmpdir(), `inline-js-check-${Date.now()}-${Math.random().toString(16).slice(2)}.mjs`);
const payload = `// ${label}\n${js}\n`;
return fs.writeFile(tmp, payload, "utf-8").then(() => {
const r = spawnSync(process.execPath, ["--check", tmp], { encoding: "utf-8" });
fs.unlink(tmp).catch(() => {});
if (r.status !== 0) {
const msg = (r.stderr || r.stdout || "").trim();
throw new Error(`${label}\n${msg}`);
}
});
}
async function main() {
const targets = [];
for await (const f of walk(SRC)) {
if (f.endsWith(".astro")) targets.push(f);
}
let checked = 0;
for (const file of targets) {
const txt = await fs.readFile(file, "utf-8");
const scripts = extractInlineScripts(txt);
if (!scripts.length) continue;
for (let i = 0; i < scripts.length; i++) {
const js = (scripts[i] || "").trim();
if (!js) continue;
const label = `${path.relative(ROOT, file)} :: <script is:inline> #${i + 1}`;
await checkSyntax(js, label);
checked++;
}
}
console.log(`OK inline-js: scripts checked=${checked}`);
}
main().catch((e) => {
console.error("❌ inline-js syntax check failed");
console.error(e?.message || e);
process.exit(1);
});

View File

@@ -0,0 +1,75 @@
#!/bin/sh
set -eu
ROOT="$(CDPATH= cd -- "$(dirname -- "$0")/.." && pwd)"
MODE="${1:-}"
if [ "$MODE" = "--help" ] || [ "$MODE" = "-h" ]; then
echo "Usage:"
echo " $0 # DRY-RUN (liste ce qui serait supprimé)"
echo " $0 --apply # supprime réellement"
exit 0
fi
if [ "$MODE" = "--apply" ]; then
DO_DELETE=1
echo "[clean-macos] MODE=APPLY (suppression réelle)"
else
DO_DELETE=0
echo "[clean-macos] MODE=DRY-RUN (aucune suppression)"
echo " Passe --apply pour supprimer réellement."
fi
cd "$ROOT"
# Conventions Mac à purger :
# - .DS_Store
# - fichiers AppleDouble : ._*
# - dossiers darchives : __MACOSX, PaxHeader
# Attention : on évite .git/, node_modules/, dist/
echo ""
echo "[clean-macos] Recherche dartefacts… (hors .git/, node_modules/, dist/)"
# Liste des cibles (robuste BusyBox : pas de -print0/-0)
find . \
\( -path "./.git" -o -path "./.git/*" -o -path "./node_modules" -o -path "./node_modules/*" -o -path "./dist" -o -path "./dist/*" \) -prune -o \
\( -name ".DS_Store" -o -name "._*" -o -name "__MACOSX" -o -name "PaxHeader" \) \
-print > /tmp/macos_artifacts.$$ || true
COUNT="$(wc -l < /tmp/macos_artifacts.$$ | tr -d ' ')"
if [ "${COUNT:-0}" = "0" ]; then
echo "[clean-macos] OK: aucun artefact détecté."
rm -f /tmp/macos_artifacts.$$
exit 0
fi
echo "[clean-macos] Trouvé: $COUNT"
sed 's|^\./||' /tmp/macos_artifacts.$$ | head -n 200
if [ "$COUNT" -gt 200 ]; then
echo "[clean-macos] … (liste tronquée à 200)"
fi
if [ "$DO_DELETE" -ne 1 ]; then
echo ""
echo "[clean-macos] DRY-RUN terminé. Pour supprimer:"
echo " $0 --apply"
rm -f /tmp/macos_artifacts.$$
exit 0
fi
echo ""
echo "[clean-macos] Suppression…"
# Suppression : fichiers + dossiers (si dossier, rm -rf)
# (on re-lit la liste pour éviter de re-run find)
while IFS= read -r p; do
[ -z "$p" ] && continue
if [ -d "$p" ]; then
rm -rf "$p"
else
rm -f "$p"
fi
done < /tmp/macos_artifacts.$$
rm -f /tmp/macos_artifacts.$$
echo "[clean-macos] OK: suppression terminée."

119
scripts/deploy-slot.sh Normal file
View File

@@ -0,0 +1,119 @@
#!/bin/sh
set -eu
SLOT="${1:-}"
FLAG="${2:-}"
if [ -z "$SLOT" ] || [ "$SLOT" = "--help" ] || [ "$SLOT" = "-h" ]; then
echo "Usage:"
echo " $0 green [--no-cache] # build+up+health+smoke sur green"
echo " $0 blue [--no-cache] # build+up+health+smoke sur blue"
echo ""
echo "Options:"
echo " --no-cache build sans cache (recommandé si vars .env changent)"
echo " --clean DRY-RUN nettoyage artefacts macOS"
echo " --clean-apply nettoyage APPLY (supprime réellement)"
exit 0
fi
case "$SLOT" in
blue) SERVICE="web_blue"; PORT="8081"; CNAME="archicratie-web-blue" ;;
green) SERVICE="web_green"; PORT="8082"; CNAME="archicratie-web-green" ;;
*) echo "Slot invalide: $SLOT (attendu: blue|green)"; exit 1 ;;
esac
ROOT="$(CDPATH= cd -- "$(dirname -- "$0")/.." && pwd)"
cd "$ROOT"
# Flags
NO_CACHE=0
DO_CLEAN=0
CLEAN_APPLY=0
# parse second arg only (simple & cockpit)
case "${FLAG:-}" in
--no-cache) NO_CACHE=1 ;;
--clean) DO_CLEAN=1 ;;
--clean-apply) DO_CLEAN=1; CLEAN_APPLY=1 ;;
"") ;;
*) echo "Flag invalide: $FLAG"; exit 1 ;;
esac
echo "== deploy-slot =="
echo "slot : $SLOT"
echo "service : $SERVICE"
echo "port : $PORT"
echo "no-cache : $NO_CACHE"
echo "clean : $DO_CLEAN (apply=$CLEAN_APPLY)"
echo ""
if [ ! -f "docker-compose.yml" ]; then
echo "ERREUR: docker-compose.yml introuvable dans $ROOT"
exit 1
fi
if [ "$DO_CLEAN" -eq 1 ]; then
if [ "$CLEAN_APPLY" -eq 1 ]; then
./scripts/clean-macos-artifacts.sh --apply
else
./scripts/clean-macos-artifacts.sh
fi
echo ""
fi
# BuildKit (meilleur sur DSM)
export DOCKER_BUILDKIT=1
export COMPOSE_DOCKER_CLI_BUILD=1
echo "== 1) Build $SERVICE =="
if [ "$NO_CACHE" -eq 1 ]; then
docker compose build --no-cache "$SERVICE"
else
docker compose build "$SERVICE"
fi
echo ""
echo "== 2) Up (recreate) $SERVICE =="
docker compose up -d --force-recreate "$SERVICE"
echo ""
echo "== 3) Wait health ($CNAME) =="
# attente max ~120s
i=0
while :; do
STATUS="$(docker inspect -f '{{.State.Health.Status}}' "$CNAME" 2>/dev/null || echo "unknown")"
echo "health: $STATUS"
if [ "$STATUS" = "healthy" ]; then
break
fi
i=$((i+1))
if [ "$i" -ge 24 ]; then
echo "ERREUR: health n'est pas devenu healthy (timeout)."
echo "Logs:"
docker compose logs --tail=120 "$SERVICE" || true
exit 2
fi
sleep 5
done
echo ""
echo "== 4) Smoke test (port $PORT) =="
if [ -x "./scripts/smoke.sh" ]; then
./scripts/smoke.sh "$PORT"
else
echo "WARN: scripts/smoke.sh absent ou non exécutable."
echo "Test minimal:"
curl -I "http://127.0.0.1:$PORT/" | head -n 8 || true
curl -I "http://127.0.0.1:$PORT/pagefind/pagefind.js" | head -n 8 || true
fi
echo ""
echo "== OK =="
echo "Slot $SLOT est prêt."
echo ""
echo "ACTION DSM (bascule prod / rollback 10s) :"
echo "DSM → Portail des applications → Proxy inversé → règle archicratie.trans-hands.synology.me"
echo "Destination: http://127.0.0.1:$PORT"
echo ""
echo "Vérif immédiate:"
echo "curl -kI https://archicratie.trans-hands.synology.me/ | head"

View File

@@ -0,0 +1,182 @@
// scripts/fix-pandoc-false-ordered-lists.mjs
// Convertit les "fausses listes" Pandoc (1. gros paragraphe / 2. gros paragraphe / ...)
// en paragraphes normaux, sans toucher aux petites listes légitimes.
// Sécurités :
// - ignore les blocs code fences
// - n'agit que sur des listes top-level
// - heuristique (taille/volume) + backup .bak
import fs from "node:fs/promises";
import path from "node:path";
const args = process.argv.slice(2);
const DRY = args.includes("--dry-run");
const files = args.filter((a) => !a.startsWith("--"));
if (files.length === 0) {
console.error("Usage: node scripts/fix-pandoc-false-ordered-lists.mjs <file1.mdx> [file2.mdx ...] [--dry-run]");
process.exit(1);
}
const MIN_ITEMS = 6; // en dessous -> on ne touche pas
const MIN_AVG_LEN = 140; // moyenne chars/item -> prose
const MAX_SHORT_RATIO = 0.25; // si trop d'items courts -> c'est une vraie liste
function splitFrontmatter(txt) {
const m = txt.match(/^---\n[\s\S]*?\n---\n/);
if (!m) return { front: "", body: txt };
return { front: m[0], body: txt.slice(m[0].length) };
}
function isFence(line) {
const t = line.trim();
return t.startsWith("```") || t.startsWith("~~~");
}
function isOlItemStart(line) {
// top-level only (pas d'indent)
return /^(\d{1,3})([.)])\s+/.test(line);
}
function stripOlMarker(line) {
return line.replace(/^(\d{1,3})([.)])\s+/, "");
}
function visibleLen(s) {
// approx : retire les backticks et liens markdown
return String(s)
.replace(/`+/g, "")
.replace(/\[([^\]]+)\]\([^)]+\)/g, "$1")
.replace(/\s+/g, " ")
.trim().length;
}
function looksLikeFalseList(items) {
if (items.length < MIN_ITEMS) return false;
const lens = items.map((it) => visibleLen(it.join("\n")));
const avg = lens.reduce((a, b) => a + b, 0) / Math.max(1, lens.length);
const short = lens.filter((n) => n < 60).length / Math.max(1, lens.length);
// si ça ressemble à une liste de “vrais points” (beaucoup de courts), on laisse
if (short > MAX_SHORT_RATIO) return false;
return avg >= MIN_AVG_LEN;
}
function rewriteFalseList(items) {
// paragraphes séparés
const out = [];
for (const it of items) {
const txt = it.join("\n").trimEnd();
if (txt) out.push(txt, ""); // blank line
}
return out.join("\n");
}
function processBody(body) {
const lines = body.split(/\r?\n/);
let inFence = false;
let changed = false;
const out = [];
for (let i = 0; i < lines.length; i++) {
const line = lines[i];
if (isFence(line)) {
inFence = !inFence;
out.push(line);
continue;
}
if (inFence) {
out.push(line);
continue;
}
if (!isOlItemStart(line)) {
out.push(line);
continue;
}
// Parse bloc de liste ordonnée top-level
const items = [];
let cur = [stripOlMarker(line)];
i++;
for (; i < lines.length; i++) {
const l = lines[i];
if (isFence(l)) {
// fin de bloc list (safe)
i--; // reprocess fence in outer loop
break;
}
if (isOlItemStart(l)) {
items.push(cur);
cur = [stripOlMarker(l)];
continue;
}
// continuation: indent => appartient à l'item
if (/^\s{2,}\S/.test(l)) {
cur.push(l.replace(/^\s{2}/, ""));
continue;
}
// ligne vide => conserve dans item (rare) mais ne termine pas forcément
if (l.trim() === "") {
cur.push("");
continue;
}
// ligne non indentée => fin de bloc de liste
i--; // reprocess this line outside
break;
}
items.push(cur);
// Décision heuristique
if (looksLikeFalseList(items)) {
changed = true;
out.push(rewriteFalseList(items));
} else {
// on remet tel quel (reconstruit en markdown list)
for (let k = 0; k < items.length; k++) {
out.push(`${k + 1}. ${items[k][0]}`);
for (const extra of items[k].slice(1)) out.push(extra === "" ? "" : ` ${extra}`);
}
}
}
return { text: out.join("\n"), changed };
}
for (const file of files) {
const p = path.resolve(file);
const raw = await fs.readFile(p, "utf8");
const { front, body } = splitFrontmatter(raw);
const res = processBody(body);
if (!res.changed) {
console.log(`${file}: no false ordered-lists detected`);
continue;
}
if (DRY) {
console.log(`🟡 ${file}: would rewrite false ordered-lists (dry-run)`);
continue;
}
const bak = `${p}.bak`;
await fs.writeFile(bak, raw, "utf8");
await fs.writeFile(p, front + res.text, "utf8");
console.log(`${file}: rewritten (backup -> ${path.basename(bak)})`);
}

285
scripts/import-docx.mjs Normal file
View File

@@ -0,0 +1,285 @@
// scripts/import-docx.mjs
// Import DOCX -> MDX (manifest-driven), preferring pandoc if available.
// Usage examples:
// node scripts/import-docx.mjs --manifest sources/manifest.yml --only archicrat-ia/prologue,archicrat-ia/chapitre-1
// node scripts/import-docx.mjs --manifest sources/manifest.yml --all
//
// Output: src/content/<collection>/<slug>.mdx
// Assets (images): public/imported/<collection>/<slug>/...
import fs from "node:fs/promises";
import path from "node:path";
import { spawnSync } from "node:child_process";
import process from "node:process";
import { parse as parseYAML } from "yaml";
import mammoth from "mammoth";
function parseArgs(argv) {
const out = { manifest: "sources/manifest.yml", only: null, all: false, dryRun: false, force: false };
for (let i = 2; i < argv.length; i++) {
const a = argv[i];
if (a === "--manifest") out.manifest = argv[++i];
else if (a === "--only") out.only = (argv[++i] || "").split(",").map(s => s.trim()).filter(Boolean);
else if (a === "--all") out.all = true;
else if (a === "--dry-run") out.dryRun = true;
else if (a === "--force") out.force = true;
else if (a === "--help" || a === "-h") {
console.log(`DOCX importer
--manifest <path> (default: sources/manifest.yml)
--only <slug,slug> import only those slugs
--all import all entries in manifest
--dry-run show what would be done, write nothing
--force overwrite existing mdx files
`);
process.exit(0);
}
}
if (!out.all && (!out.only || out.only.length === 0)) {
throw new Error("Missing --only or --all. Example: --only archicrat-ia/prologue,archicrat-ia/chapitre-1");
}
return out;
}
async function readManifest(filePath) {
const raw = await fs.readFile(filePath, "utf8");
const data = parseYAML(raw);
// Accept either:
// - root array
// - { items: [...] }
const items = Array.isArray(data) ? data : (data?.items || data?.docs || []);
if (!Array.isArray(items) || items.length === 0) {
throw new Error(`Manifest has no items: ${filePath}`);
}
// Normalize keys
return items.map((it, idx) => {
const source = it.source || it.src || it.path;
const collection = it.collection;
const slug = it.slug;
const title = it.title || it.titre || slug;
const order = it.order ?? it.ordre ?? idx;
if (!source || !collection || !slug) {
throw new Error(`Manifest item missing fields (need source+collection+slug). Item: ${JSON.stringify(it)}`);
}
return { source, collection, slug, title, order };
});
}
function havePandoc() {
const r = spawnSync("pandoc", ["--version"], { stdio: "ignore" });
return r.status === 0;
}
function runPandoc(docxPath, assetsOutDir) {
// Extract media into assetsOutDir (pandoc will create subfolders).
// We output GitHub-flavored markdown (good enough for MDX).
const args = [
docxPath,
"-f", "docx",
"-t", "gfm",
"--wrap=none",
"--extract-media", assetsOutDir,
];
const r = spawnSync("pandoc", args, { encoding: "utf8" });
if (r.status !== 0) {
throw new Error(`pandoc failed for ${docxPath}\n${r.stderr || ""}`);
}
return r.stdout || "";
}
async function runMammoth(docxPath, assetsOutDirWebRoot) {
// Mammoth -> HTML; we keep HTML inside MDX.
// Images are saved into public/... so they can be referenced.
const assetsDiskDir = path.resolve(assetsOutDirWebRoot);
await fs.mkdir(assetsDiskDir, { recursive: true });
let imgCount = 0;
const result = await mammoth.convertToHtml(
{ path: docxPath },
{
convertImage: mammoth.images.imgElement(async (image) => {
imgCount++;
const ext = image.contentType?.split("/")?.[1] || "png";
const fileName = `image-${String(imgCount).padStart(2, "0")}.${ext}`;
const buf = await image.read();
await fs.writeFile(path.join(assetsDiskDir, fileName), buf);
// Return public URL path (we'll set correct prefix outside)
return { src: fileName };
}),
}
);
let html = result.value || "";
// Mammoth gives relative src="image-xx.png" ; we will prefix later
return html;
}
function escapeFrontmatterString(s) {
return String(s).replace(/"/g, '\\"');
}
function stripDuplicateTitle(markdownOrHtml, title) {
const t = String(title || "").trim();
if (!t) return markdownOrHtml;
// Remove leading "# Title"
const md = markdownOrHtml.replace(/^\s*#\s+([^\n]+)\n+/, (m, h1) => {
return h1.trim() === t ? "" : m;
});
// Remove leading <h1>Title</h1>
return md.replace(/^\s*<h1[^>]*>(.*?)<\/h1>\s*/i, (m, h1) => {
const plain = String(h1).replace(/<[^>]+>/g, "").trim();
return plain === t ? "" : m;
});
}
function stripWordToc(markdownOrHtml) {
// Remove a Word-generated TOC block near the start of the document.
// Heuristic: in the first ~4000 chars, detect a "Sommaire" / "Table des matières" heading
// followed by a run of links/list items, and cut until the next real heading.
const text = String(markdownOrHtml || "");
const head = text.slice(0, 4000);
const m = head.match(/(^|\n)\s{0,3}(#{1,6}\s*)?(Sommaire|Table des matières)\s*\n/i);
if (!m) return markdownOrHtml;
const startIdx = m.index ?? 0;
const rest = text.slice(startIdx);
// Cut until next heading (markdown-style). If none, drop everything from startIdx.
const nextHeading = rest.slice(1).match(/\n#{1,6}\s+[^\n]+\n/);
if (!nextHeading) return (text.slice(0, startIdx)).trim() + "\n";
const endIdx = startIdx + 1 + (nextHeading.index ?? 0);
return (text.slice(0, startIdx) + text.slice(endIdx)).trim() + "\n";
}
function rewriteLocalImageLinks(text, publicPrefix) {
// pandoc tends to output ![](media/xxx) or ![](<dir>/media/xxx)
// mammoth fallback gives <img src="image-01.png">
let out = text;
// Markdown image links: ](media/...
out = out.replace(/\]\(\s*media\//g, `](${publicPrefix}/media/`);
// HTML img src="image-xx.ext"
out = out.replace(/src="(image-\d+\.[a-z0-9]+)"/gi, `src="${publicPrefix}/$1"`);
return out;
}
function stripHtmlComments(text) {
return String(text || "").replace(/<!--[\s\S]*?-->/g, "");
}
async function exists(p) {
try { await fs.access(p); return true; } catch { return false; }
}
async function main() {
const args = parseArgs(process.argv);
const manifestPath = path.resolve(args.manifest);
const items = await readManifest(manifestPath);
const selected = args.all ? items : items.filter(it => args.only.includes(it.slug));
if (!args.all && selected.length !== args.only.length) {
const found = new Set(selected.map(s => s.slug));
const missing = args.only.filter(s => !found.has(s));
throw new Error(`Some --only slugs not found in manifest: ${missing.join(", ")}`);
}
const pandocOk = havePandoc();
console.log(`Manifest: ${manifestPath}`);
console.log(`Pandoc: ${pandocOk ? "YES (preferred)" : "NO (fallback to mammoth/html)"}`);
console.log(`Items: ${selected.length}`);
for (const it of selected) {
const docxPath = path.resolve(it.source);
const outFile = path.resolve("src/content", it.collection, `${it.slug}.mdx`);
const outDir = path.dirname(outFile);
const assetsPublicDir = path.posix.join("/imported", it.collection, it.slug);
const assetsDiskDir = path.resolve("public", "imported", it.collection, it.slug);
if (!(await exists(docxPath))) {
throw new Error(`Missing source docx: ${docxPath}`);
}
if ((await exists(outFile)) && !args.force) {
throw new Error(`Refusing to overwrite existing: ${outFile} (use --force)`);
}
console.log(`\n${it.slug}`);
console.log(` source: ${it.source}`);
console.log(` out: ${path.relative(process.cwd(), outFile)}`);
if (args.dryRun) continue;
await fs.mkdir(outDir, { recursive: true });
await fs.mkdir(assetsDiskDir, { recursive: true });
let body = "";
if (pandocOk) {
// pandoc extract-media wants a directory; it will create media/ inside.
body = runPandoc(docxPath, assetsDiskDir);
body = stripDuplicateTitle(body, it.title);
body = stripWordToc(body);
body = stripHtmlComments(body);
body = rewriteLocalImageLinks(body, assetsPublicDir);
} else {
let html = await runMammoth(docxPath, assetsDiskDir);
html = stripDuplicateTitle(html, it.title);
html = stripWordToc(html);
html = stripHtmlComments(html);
html = rewriteLocalImageLinks(html, assetsPublicDir);
body = html.trim() ? html : "<p>(Import vide)</p>";
}
const defaultVersion = process.env.PUBLIC_RELEASE || "0.1.0";
const schemaDefaultsByCollection = {
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
ia: { edition: "ia", status: "cas_pratique", level: 1 },
traite: { edition: "traite", status: "ontodynamique", level: 1 },
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
atlas: { edition: "atlas", status: "atlas", level: 1 },
};
const defaults = schemaDefaultsByCollection[it.collection] || { edition: it.collection, status: "draft", level: 1 };
const fm = [
"---",
`title: "${escapeFrontmatterString(it.title)}"`,
`edition: "${escapeFrontmatterString(defaults.edition)}"`,
`status: "${escapeFrontmatterString(defaults.status)}"`,
`level: ${Number(defaults.level)}`,
`version: "${escapeFrontmatterString(defaultVersion)}"`,
`concepts: []`,
`links: []`,
`order: ${Number(it.order)}`,
`summary: ""`,
`source:`,
` kind: docx`,
` path: "${escapeFrontmatterString(it.source)}"`,
"---",
"",
].join("\n");
const mdx = fm + body.trim() + "\n";
await fs.writeFile(outFile, mdx, "utf8");
}
console.log("\nDone.");
}
main().catch((e) => {
console.error("\nERROR:", e?.message || e);
process.exit(1);
});

View File

@@ -0,0 +1,214 @@
#!/usr/bin/env node
import fs from "node:fs/promises";
import path from "node:path";
import process from "node:process";
const CWD = process.cwd();
const DIST_ROOT = path.join(CWD, "dist");
const ALIASES_PATH = path.join(CWD, "src", "anchors", "anchor-aliases.json");
const argv = process.argv.slice(2);
const DRY_RUN = argv.includes("--dry-run");
const STRICT = argv.includes("--strict") || process.env.CI === "1" || process.env.CI === "true";
function escRe(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function countIdAttr(html, id) {
const re = new RegExp(`\\bid=(["'])${escRe(id)}\\1`, "gi");
let c = 0;
while (re.exec(html)) c++;
return c;
}
function findStartTagWithId(html, id) {
// 1er élément qui porte id="..."
const re = new RegExp(
`<([a-zA-Z0-9:-]+)\\b[^>]*\\bid=(["'])${escRe(id)}\\2[^>]*>`,
"i"
);
const m = re.exec(html);
if (!m) return null;
return { tagName: String(m[1]).toLowerCase(), tag: m[0] };
}
function isInjectedAliasSpan(html, id) {
const found = findStartTagWithId(html, id);
if (!found) return false;
if (found.tagName !== "span") return false;
// class="... para-alias ..."
return /\bclass=(["'])(?:(?!\1).)*\bpara-alias\b(?:(?!\1).)*\1/i.test(found.tag);
}
function normalizeRoute(route) {
let r = String(route || "").trim();
if (!r.startsWith("/")) r = "/" + r;
if (!r.endsWith("/")) r = r + "/";
r = r.replace(/\/{2,}/g, "/");
return r;
}
async function exists(p) {
try {
await fs.access(p);
return true;
} catch {
return false;
}
}
function hasId(html, id) {
const re = new RegExp(`\\bid=(["'])${escRe(id)}\\1`, "i");
return re.test(html);
}
function injectBeforeId(html, newId, injectHtml) {
// insère juste avant la balise qui porte id="newId"
const re = new RegExp(
`(<[^>]+\\bid=(["'])${escRe(newId)}\\2[^>]*>)`,
"i"
);
const m = html.match(re);
if (!m || m.index == null) return { html, injected: false };
const i = m.index;
const out = html.slice(0, i) + injectHtml + "\n" + html.slice(i);
return { html: out, injected: true };
}
async function main() {
if (!(await exists(ALIASES_PATH))) {
console.log(" Aucun fichier d'aliases (src/anchors/anchor-aliases.json). Skip.");
return;
}
const raw = await fs.readFile(ALIASES_PATH, "utf-8");
/** @type {Record<string, Record<string,string>>} */
let aliases;
try {
aliases = JSON.parse(raw);
} catch (e) {
throw new Error(`JSON invalide: ${ALIASES_PATH} (${e?.message || e})`);
}
if (!aliases || typeof aliases !== "object" || Array.isArray(aliases)) {
throw new Error(`Format invalide: attendu { route: { oldId: newId } } dans ${ALIASES_PATH}`);
}
const routes = Object.keys(aliases || {});
if (routes.length === 0) {
console.log(" Aliases vides. Rien à injecter.");
return;
}
let changedFiles = 0;
let injectedCount = 0;
let warnCount = 0;
for (const routeKey of routes) {
const route = normalizeRoute(routeKey);
const map = aliases[routeKey] || {};
const entries = Object.entries(map);
if (route !== routeKey) {
const msg = `⚠️ routeKey non normalisée: "${routeKey}" → "${route}" (corrige anchor-aliases.json)`;
if (STRICT) throw new Error(msg);
console.log(msg);
warnCount++;
}
if (entries.length === 0) continue;
const rel = route.replace(/^\/+|\/+$/g, ""); // sans slash
const htmlPath = path.join(DIST_ROOT, rel, "index.html");
if (!(await exists(htmlPath))) {
const msg = `⚠️ dist introuvable pour route=${route} (${htmlPath})`;
if (STRICT) throw new Error(msg);
console.log(msg);
warnCount++;
continue;
}
let html = await fs.readFile(htmlPath, "utf-8");
let fileChanged = false;
for (const [oldId, newId] of entries) {
if (!oldId || !newId) continue;
const oldCount = countIdAttr(html, oldId);
if (oldCount > 0) {
// ✅ déjà injecté (idempotent)
if (isInjectedAliasSpan(html, oldId)) continue;
// ⛔️ oldId existe déjà "en vrai" (ex: <p id="oldId">)
// => alias inutile / inversé / obsolète
const found = findStartTagWithId(html, oldId);
const where = found ? `<${found.tagName} … id="${oldId}" …>` : `id="${oldId}"`;
const msg =
`⚠️ alias inutile/inversé: oldId déjà présent dans la page (${where}). ` +
`Supprime l'alias ${oldId} -> ${newId} (ou corrige le sens) pour route=${route}`;
if (STRICT) throw new Error(msg);
console.log(msg);
warnCount++;
continue;
}
// juste après avoir calculé oldCount
if (oldCount > 0 && isInjectedAliasSpan(html, oldId)) {
if (STRICT && oldCount !== 1) {
throw new Error(`oldId dupliqué (${oldCount}) alors qu'il est censé être unique: ${route} id=${oldId}`);
}
continue;
}
// avant l'injection, après hasId(newId)
const newCount = countIdAttr(html, newId);
if (newCount !== 1) {
const msg = `⚠️ newId non-unique (${newCount}) : ${route} new=${newId} (injection ambiguë)`;
if (STRICT) throw new Error(msg);
console.log(msg);
warnCount++;
continue;
}
if (!hasId(html, newId)) {
const msg = `⚠️ newId introuvable: ${route} old=${oldId} -> new=${newId}`;
if (STRICT) throw new Error(msg);
console.log(msg);
warnCount++;
continue;
}
const aliasSpan = `<span id="${oldId}" class="para-alias" aria-hidden="true"></span>`;
const r = injectBeforeId(html, newId, aliasSpan);
if (!r.injected) {
const msg = `⚠️ injection impossible (pattern non trouvé) : ${route} new=${newId}`;
if (STRICT) throw new Error(msg);
console.log(msg);
warnCount++;
continue;
}
html = r.html;
fileChanged = true;
injectedCount++;
}
if (fileChanged) {
changedFiles++;
if (!DRY_RUN) await fs.writeFile(htmlPath, html, "utf-8");
}
}
console.log(
`✅ inject-anchor-aliases: files_changed=${changedFiles} aliases_injected=${injectedCount} warnings=${warnCount}` +
(DRY_RUN ? " (dry-run)" : "")
);
if (STRICT && warnCount > 0) process.exit(2);
}
main().catch((e) => {
console.error("💥 inject-anchor-aliases:", e?.message || e);
process.exit(1);
});

View File

@@ -0,0 +1,76 @@
#!/bin/sh
set -eu
SLOT="${1:-}"
if [ -z "$SLOT" ] || [ "$SLOT" = "--help" ] || [ "$SLOT" = "-h" ]; then
echo "Usage:"
echo " $0 blue # inspecte web_blue"
echo " $0 green # inspecte web_green"
exit 0
fi
case "$SLOT" in
blue) SERVICE="web_blue"; CNAME="archicratie-web-blue" ;;
green) SERVICE="web_green"; CNAME="archicratie-web-green" ;;
*) echo "Slot invalide: $SLOT (attendu: blue|green)"; exit 1 ;;
esac
ROOT="$(CDPATH= cd -- "$(dirname -- "$0")/.." && pwd)"
cd "$ROOT"
echo "== .env (valeurs build) =="
if [ -f ".env" ]; then
cat .env
else
echo "(absent)"
fi
echo ""
# Récupère container id via compose si possible
CID="$(docker compose ps -q "$SERVICE" 2>/dev/null || true)"
if [ -n "$CID" ]; then
TARGET="$CID"
else
TARGET="$CNAME"
fi
echo "== Extraction dans le conteneur ($SLOT) =="
if ! docker inspect "$TARGET" >/dev/null 2>&1; then
echo "Conteneur introuvable: $TARGET"
echo "Astuce: docker compose up -d $SERVICE"
exit 1
fi
# Cherche la ligne JS injectée dans le HTML (format vu chez toi : const GITEA_BASE = "...")
docker exec -it "$TARGET" sh -lc '
set -eu
F="$(grep -Ril "const GITEA_BASE" /usr/share/nginx/html 2>/dev/null | head -n 1 || true)"
if [ -z "$F" ]; then
echo "ERREUR: impossible de trouver GITEA_BASE dans le HTML"
exit 2
fi
echo "Fichier exemple: $F"
LINE="$(grep -Rin "const GITEA_BASE" "$F" | head -n 1)"
echo "$LINE"
echo ""
echo "Snippet (GITEA_BASE/OWNER/REPO) :"
grep -Rin "const GITEA_BASE\|const GITEA_OWNER\|const GITEA_REPO" "$F" | head -n 20
' || true
echo ""
echo "== Tests HTTP (depuis le NAS) =="
# Parse .env proprement (sans source/bashex)
BASE="$(grep -E '^PUBLIC_GITEA_BASE=' .env 2>/dev/null | sed 's/^PUBLIC_GITEA_BASE=//' | tr -d '\r' || true)"
OWNER="$(grep -E '^PUBLIC_GITEA_OWNER=' .env 2>/dev/null | sed 's/^PUBLIC_GITEA_OWNER=//' | tr -d '\r' || true)"
REPO="$(grep -E '^PUBLIC_GITEA_REPO=' .env 2>/dev/null | sed 's/^PUBLIC_GITEA_REPO=//' | tr -d '\r' || true)"
if [ -z "$BASE" ] || [ -z "$OWNER" ] || [ -z "$REPO" ]; then
echo "WARN: .env incomplet (BASE/OWNER/REPO)."
exit 0
fi
echo "Repo: $BASE/$OWNER/$REPO/"
curl -kI "$BASE/$OWNER/$REPO/" | head -n 12 || true
echo ""
echo "Issue new: $BASE/$OWNER/$REPO/issues/new"
curl -kI "$BASE/$OWNER/$REPO/issues/new" | head -n 15 || true

View File

@@ -0,0 +1,164 @@
// scripts/rehype-details-sections.mjs
// Rehype plugin: regroupe chaque section H2 en <details>/<summary> (accordion).
// FIX: anti-récursion / anti-boucle infinie -> post-order traversal (children first, then transform)
// et on ne retraverse pas les <details> insérés.
function isElement(node) {
return node && node.type === "element" && typeof node.tagName === "string";
}
function headingLevel(node) {
if (!isElement(node)) return null;
const m = node.tagName.match(/^h([1-6])$/i);
return m ? Number(m[1]) : null;
}
function nodeText(node) {
if (!node) return "";
if (node.type === "text") return String(node.value || "");
if (isElement(node) && Array.isArray(node.children)) {
return node.children.map(nodeText).join("");
}
return "";
}
function classList(v) {
if (!v) return [];
if (Array.isArray(v)) return v.map(String);
if (typeof v === "string") return v.split(/\s+/).filter(Boolean);
return [];
}
export default function rehypeDetailsSections(options = {}) {
const {
// Par défaut : sections sur H2 uniquement
minDepth = 2,
maxDepth = 2,
openByDefault = false,
detailsClass = "details-section",
summaryClass = "details-summary",
bodyClass = "details-body",
anchorClass = "details-anchor",
} = options;
const DETAILS_SENTINEL_ATTR = "data-details-sections";
function transformChildren(parent) {
if (!parent || !Array.isArray(parent.children) || parent.children.length === 0) return;
const children = parent.children;
const out = [];
for (let i = 0; i < children.length; i++) {
const n = children[i];
const lvl = headingLevel(n);
// Wrap uniquement les headings min/max
if (lvl && lvl >= minDepth && lvl <= maxDepth) {
const heading = n;
const props = heading.properties || {};
const id = typeof props.id === "string" ? props.id : "";
const title = nodeText(heading).trim() || "Section";
// Capture jusquau prochain heading de niveau <= lvl
const bodyNodes = [];
i++;
for (; i < children.length; i++) {
const nxt = children[i];
const nlvl = headingLevel(nxt);
if (nlvl && nlvl <= lvl) {
i--; // rendre la main au for principal
break;
}
bodyNodes.push(nxt);
}
// On garde le heading dans le body mais on retire l'id
// (lancre est portée par un <span id=...> au-dessus)
const headingClone = {
...heading,
properties: { ...(heading.properties || {}) },
};
if (headingClone.properties) delete headingClone.properties.id;
const details = {
type: "element",
tagName: "details",
properties: {
[DETAILS_SENTINEL_ATTR]: "1",
className: [detailsClass],
...(openByDefault ? { open: true } : {}),
"data-section-level": String(lvl),
},
children: [
{
type: "element",
tagName: "summary",
properties: { className: [summaryClass] },
children: [
...(id ? [{
type: "element",
tagName: "span",
properties: { id, className: [anchorClass], "aria-hidden": "true" },
children: [],
}] : []),
(id
? {
type: "element",
tagName: "a",
properties: { href: `#${id}` },
children: [{ type: "text", value: title }],
}
: { type: "text", value: title }),
],
},
{
type: "element",
tagName: "div",
properties: { className: [bodyClass] },
children: [headingClone, ...bodyNodes],
},
],
};
out.push(details);
continue;
}
out.push(n);
}
parent.children = out;
}
function walk(node) {
if (!node) return;
// IMPORTANT: si cest un <details> déjà produit par nous,
// on ne descend pas dedans (sinon boucle infinie).
if (isElement(node) && node.tagName === "details") {
const props = node.properties || {};
if (props[DETAILS_SENTINEL_ATTR] === "1") return;
// Si jamais quelquun a mis une class identique manuellement
const cls = classList(props.className);
if (cls.includes(detailsClass)) return;
}
// post-order : dabord les enfants
if (node.type === "root" || isElement(node)) {
const kids = node.children || [];
for (const k of kids) walk(k);
}
// puis on transforme le parent
transformChildren(node);
}
return function transformer(tree) {
walk(tree);
};
}

View File

@@ -0,0 +1,261 @@
import fs from "node:fs/promises";
import path from "node:path";
function escapeRegExp(s) {
return s.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function routeToHtmlPath(distDir, route) {
if (typeof route !== "string") throw new Error(`Route must be a string, got ${typeof route}`);
// Normalise: route must be like "/a/b/" or "/"
let r = route.trim();
if (!r.startsWith("/")) r = "/" + r;
if (r !== "/" && !r.endsWith("/")) r = r + "/";
const segments = r.split("/").filter(Boolean); // removes empty
if (segments.length === 0) return path.join(distDir, "index.html");
return path.join(distDir, ...segments, "index.html");
}
function countIdAttr(html, id) {
const re = new RegExp(`\\bid=(["'])${escapeRegExp(id)}\\1`, "gi");
let c = 0;
while (re.exec(html)) c++;
return c;
}
function findStartTagWithId(html, id) {
const re = new RegExp(
`<([a-zA-Z0-9:-]+)\\b[^>]*\\bid=(["'])${escapeRegExp(id)}\\2[^>]*>`,
"i"
);
const m = re.exec(html);
if (!m) return null;
return { tagName: String(m[1]).toLowerCase(), tag: m[0], index: m.index ?? -1 };
}
function isAliasSpanTag(tagName, tagHtml) {
if (tagName !== "span") return false;
return /\bclass=(["'])(?:(?!\1).)*\bpara-alias\b(?:(?!\1).)*\1/i.test(tagHtml);
}
function extractTrailingAliasBlock(beforeTargetHtml) {
// Récupère les <span class="para-alias">...</span> contigus juste avant la cible
let s = beforeTargetHtml;
const spans = [];
while (true) {
s = s.replace(/\s+$/g, "");
const m = s.match(/<span\b[^>]*\bclass=(["'])(?:(?!\1).)*\bpara-alias\b(?:(?!\1).)*\1[^>]*>\s*<\/span>$/i);
if (!m) break;
spans.push(m[0]);
s = s.slice(0, s.length - m[0].length);
}
return spans.reverse();
}
function snippetAround(html, idx, beforeLines = 2, afterLines = 4) {
const lines = html.split("\n");
// compute line number
const upto = html.slice(0, Math.max(0, idx));
const lineNo = upto.split("\n").length; // 1-based
const start = Math.max(1, lineNo - beforeLines);
const end = Math.min(lines.length, lineNo + afterLines);
const out = [];
for (let i = start; i <= end; i++) {
out.push(`${String(i).padStart(5, " ")}| ${lines[i - 1]}`);
}
return out.join("\n");
}
function parseArgs(argv) {
const args = {
dist: "dist",
aliases: path.join("src", "anchors", "anchor-aliases.json"),
strict: true,
};
for (let i = 2; i < argv.length; i++) {
const a = argv[i];
if (a === "--dist" && argv[i + 1]) args.dist = argv[++i];
else if (a === "--aliases" && argv[i + 1]) args.aliases = argv[++i];
else if (a === "--non-strict") args.strict = false;
else if (a === "-h" || a === "--help") {
console.log(`Usage:
node scripts/verify-anchor-aliases-in-dist.mjs [--dist dist] [--aliases src/anchors/anchor-aliases.json] [--non-strict]
Checks that every (route, oldId->newId) alias is injected into the built HTML in dist.`);
process.exit(0);
} else {
console.error("Unknown arg:", a);
process.exit(2);
}
}
return args;
}
const { dist, aliases, strict } = parseArgs(process.argv);
const CWD = process.cwd();
const distDir = path.isAbsolute(dist) ? dist : path.join(CWD, dist);
const aliasesPath = path.isAbsolute(aliases) ? aliases : path.join(CWD, aliases);
let data;
try {
data = JSON.parse(await fs.readFile(aliasesPath, "utf8"));
} catch (e) {
console.error(`❌ Cannot read/parse aliases JSON: ${aliasesPath}`);
console.error(e?.message || e);
process.exit(1);
}
if (!data || typeof data !== "object" || Array.isArray(data)) {
console.error("❌ anchor-aliases.json must be an object of { route: { oldId: newId } }");
process.exit(1);
}
let pages = 0;
let aliasesCount = 0;
let checked = 0;
const failures = [];
for (const [route, mapping] of Object.entries(data)) {
pages++;
if (!mapping || typeof mapping !== "object" || Array.isArray(mapping)) {
failures.push({ route, msg: "Mapping must be an object oldId->newId." });
continue;
}
const htmlPath = routeToHtmlPath(distDir, route);
let html;
try {
html = await fs.readFile(htmlPath, "utf8");
} catch (e) {
failures.push({
route,
msg: `Missing built page: ${htmlPath}. Did you run 'npm run build'?`,
});
continue;
}
for (const [oldId, newId] of Object.entries(mapping)) {
aliasesCount++;
checked++;
if (typeof oldId !== "string" || typeof newId !== "string") {
failures.push({ route, oldId, newId, htmlPath, msg: "oldId/newId must be strings." });
continue;
}
const oldCount = countIdAttr(html, oldId);
const newCount = countIdAttr(html, newId);
if (oldCount === 0) {
failures.push({
route,
oldId,
newId,
htmlPath,
msg: `oldId not found in HTML (expected injected alias span).`,
});
continue;
}
if (newCount === 0) {
failures.push({
route,
oldId,
newId,
htmlPath,
msg: `newId not found in HTML (target missing).`,
});
continue;
}
// Strictness: ensure uniqueness
if (strict && oldCount !== 1) {
failures.push({
route,
oldId,
newId,
htmlPath,
msg: `oldId occurs ${oldCount} times (expected exactly 1).`,
});
continue;
}
if (strict && newCount !== 1) {
failures.push({
route,
oldId,
newId,
htmlPath,
msg: `newId occurs ${newCount} times (expected exactly 1).`,
});
continue;
}
// Require para-alias class on the injected span (contract)
const oldStart = findStartTagWithId(html, oldId);
if (!oldStart || !isAliasSpanTag(oldStart.tagName, oldStart.tag)) {
const seen = oldStart ? oldStart.tag.slice(0, 140) : "(not found)";
failures.push({
route,
oldId,
newId,
htmlPath,
msg:
`oldId present but is NOT an injected alias span (<span class="para-alias">).</n` +
`Saw: ${seen}`,
});
continue;
}
// Adjacency (robuste): oldId doit être dans le "bloc d'alias" contigu juste avant la cible newId.
const newStart = findStartTagWithId(html, newId);
if (!newStart || newStart.index < 0) {
failures.push({ route, oldId, newId, htmlPath, msg: `newId tag not found (unexpected).` });
continue;
}
const before = html.slice(0, newStart.index);
const block = extractTrailingAliasBlock(before);
const reOld = new RegExp(`\\bid=(["'])${escapeRegExp(oldId)}\\1`, "i");
const ok = block.some((span) => reOld.test(span));
if (!ok) {
const oldIdx = html.search(new RegExp(`\\bid=(["'])${escapeRegExp(oldId)}\\1`, "i"));
const newIdx = html.search(new RegExp(`\\bid=(["'])${escapeRegExp(newId)}\\1`, "i"));
failures.push({
route,
oldId,
newId,
htmlPath,
msg:
`oldId & newId are present, but oldId is NOT in the contiguous alias block right before target.\n` +
`--- Context around oldId (line approx) ---\n${snippetAround(html, oldIdx)}\n\n` +
`--- Context around newId (line approx) ---\n${snippetAround(html, newIdx)}\n`,
});
continue;
}
}
}
if (failures.length) {
console.error(`❌ Alias injection verification FAILED.`);
console.error(`Checked: pages=${pages}, aliases=${aliasesCount}, verified_pairs=${checked}, strict=${strict}`);
console.error("");
for (const f of failures) {
console.error("------------------------------------------------------------");
console.error(`Route: ${f.route}`);
if (f.htmlPath) console.error(`HTML: ${f.htmlPath}`);
if (f.oldId) console.error(`oldId: ${f.oldId}`);
if (f.newId) console.error(`newId: ${f.newId}`);
console.error(`Reason: ${f.msg}`);
}
process.exit(1);
}
console.log(`✅ verify-anchor-aliases-in-dist OK: pages=${pages} aliases=${aliasesCount} strict=${strict}`);