#!/usr/bin/env node // scripts/apply-annotation-ticket.mjs // Applique un ticket Gitea "type/media | type/reference | type/comment" vers: // // ✅ src/annotations///.yml (sharding par paragraphe) // ✅ public/media//// // // Robuste, idempotent, non destructif. // // DRY RUN si --dry-run // Options: --dry-run --no-download --verify --strict --commit --close // // Env requis: // FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000 // FORGE_TOKEN = PAT Gitea (repo + issues) // // Env optionnel: // GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote) // ANNO_DIR (défaut: src/annotations) // PUBLIC_DIR (défaut: public) // MEDIA_ROOT (défaut URL: /media) // // Ticket attendu (body): // Chemin: /archicrat-ia/chapitre-4/ // Ancre: #p-0-xxxxxxxx // Type: type/media | type/reference | type/comment // // Exit codes: // 0 ok // 1 erreur fatale // 2 refus (strict/verify/usage) import fs from "node:fs/promises"; import path from "node:path"; import process from "node:process"; import { spawnSync } from "node:child_process"; import YAML from "yaml"; /* ---------------------------------- usage --------------------------------- */ function usage(exitCode = 0) { console.log(` apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/ (shard par paragraphe) Usage: node scripts/apply-annotation-ticket.mjs [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close] Flags: --dry-run : n'écrit rien (affiche un aperçu) --no-download : n'essaie pas de télécharger les pièces jointes (media) --verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline) --strict : refuse si URL ref invalide (http/https) OU caption media vide --commit : git add + git commit (le script commit dans la branche courante) --close : ferme le ticket (nécessite --commit) Env requis: FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000 FORGE_TOKEN = PAT Gitea (repo + issues) Env optionnel: GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote) ANNO_DIR (défaut: src/annotations) PUBLIC_DIR (défaut: public) MEDIA_ROOT (défaut URL: /media) Exit codes: 0 ok 1 erreur fatale 2 refus (strict/verify/close sans commit / incohérence) `); process.exit(exitCode); } /* ---------------------------------- args ---------------------------------- */ const argv = process.argv.slice(2); if (argv.length === 0 || argv.includes("--help") || argv.includes("-h")) usage(0); const issueNum = Number(argv[0]); if (!Number.isFinite(issueNum) || issueNum <= 0) { console.error("❌ Numéro de ticket invalide."); usage(2); } const DRY_RUN = argv.includes("--dry-run"); const NO_DOWNLOAD = argv.includes("--no-download"); const DO_VERIFY = argv.includes("--verify"); const STRICT = argv.includes("--strict"); const DO_COMMIT = argv.includes("--commit"); const DO_CLOSE = argv.includes("--close"); if (DO_CLOSE && !DO_COMMIT) { console.error("❌ --close nécessite --commit."); process.exit(2); } if (typeof fetch !== "function") { console.error("❌ fetch() indisponible. Utilise Node 18+."); process.exit(1); } /* --------------------------------- config --------------------------------- */ const CWD = process.cwd(); const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations"); const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public"); const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, ""); /* --------------------------------- helpers -------------------------------- */ function getEnv(name, fallback = "") { return (process.env[name] ?? fallback).trim(); } function run(cmd, args, opts = {}) { const r = spawnSync(cmd, args, { stdio: "inherit", ...opts }); if (r.error) throw r.error; if (r.status !== 0) throw new Error(`Command failed: ${cmd} ${args.join(" ")}`); } function runQuiet(cmd, args, opts = {}) { const r = spawnSync(cmd, args, { encoding: "utf8", stdio: "pipe", ...opts }); if (r.error) throw r.error; if (r.status !== 0) { const out = (r.stdout || "") + (r.stderr || ""); throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${out}`); } return r.stdout || ""; } async function exists(p) { try { await fs.access(p); return true; } catch { return false; } } function inferOwnerRepoFromGit() { const r = spawnSync("git", ["remote", "get-url", "origin"], { encoding: "utf-8" }); if (r.status !== 0) return null; const u = (r.stdout || "").trim(); const m = u.match(/[:/](?[^/]+)\/(?[^/]+?)(?:\.git)?$/); if (!m?.groups) return null; return { owner: m.groups.owner, repo: m.groups.repo }; } function gitHasStagedChanges() { const r = spawnSync("git", ["diff", "--cached", "--quiet"]); return r.status === 1; } function escapeRegExp(s) { return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); } function pickLine(body, key) { const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi"); const m = String(body || "").match(re); return m ? m[1].trim() : ""; } function pickSection(body, markers) { const text = String(body || "").replace(/\r\n/g, "\n"); const idx = markers .map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) })) .filter((x) => x.i >= 0) .sort((a, b) => a.i - b.i)[0]; if (!idx) return ""; const start = idx.i + idx.m.length; const tail = text.slice(start); const stops = ["\n## ", "\n---", "\nJustification", "\nProposition", "\nSources"]; let end = tail.length; for (const s of stops) { const j = tail.toLowerCase().indexOf(s.toLowerCase()); if (j >= 0 && j < end) end = j; } return tail.slice(0, end).trim(); } function normalizeChemin(chemin) { let c = String(chemin || "").trim(); if (!c) return ""; if (!c.startsWith("/")) c = "/" + c; if (!c.endsWith("/")) c = c + "/"; c = c.replace(/\/{2,}/g, "/"); return c; } function normalizePageKeyFromChemin(chemin) { return normalizeChemin(chemin).replace(/^\/+|\/+$/g, ""); } function normalizeAnchorId(s) { let a = String(s || "").trim(); if (a.startsWith("#")) a = a.slice(1); return a; } function assert(cond, msg, code = 1) { if (!cond) { const e = new Error(msg); e.__exitCode = code; throw e; } } function isPlainObject(x) { return !!x && typeof x === "object" && !Array.isArray(x); } function paraIndexFromId(id) { const m = String(id).match(/^p-(\d+)-/i); return m ? Number(m[1]) : Number.NaN; } function isHttpUrl(u) { try { const x = new URL(String(u)); return x.protocol === "http:" || x.protocol === "https:"; } catch { return false; } } /* ------------------------------ para-index (verify + sort) ------------------------------ */ async function loadParaOrderFromDist(pageKey) { const distIdx = path.join(CWD, "dist", "para-index.json"); if (!(await exists(distIdx))) return null; let j; try { j = JSON.parse(await fs.readFile(distIdx, "utf8")); } catch { return null; } // Support several shapes: // A) { items:[{id,page,...}, ...] } if (Array.isArray(j?.items)) { const ids = []; for (const it of j.items) { const p = String(it?.page || it?.pageKey || ""); const id = String(it?.id || it?.paraId || ""); if (p === pageKey && id) ids.push(id); } if (ids.length) return ids; } // B) { byId: { "p-...": { page:"archicrat-ia/chapitre-4", ... }, ... } } if (j?.byId && typeof j.byId === "object") { // cannot rebuild full order; but can verify existence // return a pseudo-order map from known ids sorted by p-- then alpha const ids = Object.keys(j.byId).filter((id) => String(j.byId[id]?.page || "") === pageKey); if (ids.length) { ids.sort((a, b) => { const ia = paraIndexFromId(a); const ib = paraIndexFromId(b); if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib; return String(a).localeCompare(String(b)); }); return ids; } } // C) { pages: { "archicrat-ia/chapitre-4": { ids:[...]} } } if (j?.pages && typeof j.pages === "object") { const pg = j.pages[pageKey]; if (Array.isArray(pg?.ids)) return pg.ids.map(String); if (Array.isArray(pg?.paras)) return pg.paras.map(String); } return null; } async function tryVerifyAnchor(pageKey, anchorId) { // 1) dist/para-index.json const order = await loadParaOrderFromDist(pageKey); if (order) return order.includes(anchorId); // 2) tests/anchors-baseline.json (fallback) const base = path.join(CWD, "tests", "anchors-baseline.json"); if (await exists(base)) { try { const j = JSON.parse(await fs.readFile(base, "utf8")); const candidates = []; if (j?.pages && typeof j.pages === "object") { for (const [k, v] of Object.entries(j.pages)) { if (!Array.isArray(v)) continue; if (String(k).includes(pageKey)) candidates.push(...v); } } if (Array.isArray(j?.entries)) { for (const it of j.entries) { const p = String(it?.page || ""); const ids = it?.ids; if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids); } } if (candidates.length) return candidates.some((x) => String(x) === anchorId); } catch { // ignore } } return null; // cannot verify } /* ----------------------------- deep merge helpers (non destructive) ----------------------------- */ function keyMedia(x) { return String(x?.src || ""); } function keyRef(x) { return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`; } function keyComment(x) { return String(x?.text || "").trim(); } function uniqUnion(dstArr, srcArr, keyFn) { const out = Array.isArray(dstArr) ? [...dstArr] : []; const seen = new Set(out.map((x) => keyFn(x))); for (const it of (Array.isArray(srcArr) ? srcArr : [])) { const k = keyFn(it); if (!k) continue; if (!seen.has(k)) { seen.add(k); out.push(it); } } return out; } function deepMergeEntry(dst, src) { if (!isPlainObject(dst) || !isPlainObject(src)) return; for (const [k, v] of Object.entries(src)) { if (k === "media" && Array.isArray(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; } if (k === "refs" && Array.isArray(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; } if (k === "comments_editorial" && Array.isArray(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; } if (isPlainObject(v)) { if (!isPlainObject(dst[k])) dst[k] = {}; deepMergeEntry(dst[k], v); continue; } if (Array.isArray(v)) { // fallback: union by JSON string const cur = Array.isArray(dst[k]) ? dst[k] : []; const seen = new Set(cur.map((x) => JSON.stringify(x))); const out = [...cur]; for (const it of v) { const s = JSON.stringify(it); if (!seen.has(s)) { seen.add(s); out.push(it); } } dst[k] = out; continue; } // scalar: set only if missing/empty if (!(k in dst) || dst[k] == null || dst[k] === "") { dst[k] = v; } } } function stableSortByTs(arr) { if (!Array.isArray(arr)) return; arr.sort((a, b) => { const ta = Date.parse(a?.ts || "") || 0; const tb = Date.parse(b?.ts || "") || 0; if (ta !== tb) return ta - tb; return JSON.stringify(a).localeCompare(JSON.stringify(b)); }); } /* ----------------------------- annotations I/O ----------------------------- */ async function loadAnnoDocYaml(fileAbs, pageKey) { if (!(await exists(fileAbs))) { return { schema: 1, page: pageKey, paras: {} }; } const raw = await fs.readFile(fileAbs, "utf8"); let doc; try { doc = YAML.parse(raw); } catch (e) { throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`); } assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`, 2); assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`, 2); assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`, 2); if (doc.page != null) { const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, ""); assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`, 2); } else { doc.page = pageKey; } return doc; } function sortParasObject(paras, order) { const keys = Object.keys(paras || {}); const idx = new Map(); if (Array.isArray(order)) { order.forEach((id, i) => idx.set(String(id), i)); } keys.sort((a, b) => { const ha = idx.has(a); const hb = idx.has(b); if (ha && hb) return idx.get(a) - idx.get(b); if (ha && !hb) return -1; if (!ha && hb) return 1; const ia = paraIndexFromId(a); const ib = paraIndexFromId(b); if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib; return String(a).localeCompare(String(b)); }); const out = {}; for (const k of keys) out[k] = paras[k]; return out; } async function saveAnnoDocYaml(fileAbs, doc, order = null) { await fs.mkdir(path.dirname(fileAbs), { recursive: true }); doc.paras = sortParasObject(doc.paras, order); // also sort known lists inside each para for stable diffs for (const e of Object.values(doc.paras || {})) { if (!isPlainObject(e)) continue; stableSortByTs(e.media); stableSortByTs(e.refs); stableSortByTs(e.comments_editorial); } const out = YAML.stringify(doc); await fs.writeFile(fileAbs, out, "utf8"); } /* ------------------------------ gitea helpers ------------------------------ */ function apiBaseNorm(forgeApiBase) { return forgeApiBase.replace(/\/+$/, ""); } async function giteaGET(url, token) { const res = await fetch(url, { headers: { Authorization: `token ${token}`, Accept: "application/json", "User-Agent": "archicratie-apply-annotation/1.0", }, }); if (!res.ok) { const t = await res.text().catch(() => ""); throw new Error(`HTTP ${res.status} GET ${url}\n${t}`); } return await res.json(); } async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) { const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`; return await giteaGET(url, token); } async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) { // Gitea: /issues/{index}/assets const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`; try { const json = await giteaGET(url, token); return Array.isArray(json) ? json : []; } catch { return []; } } async function postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment }) { const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/comments`; const res = await fetch(url, { method: "POST", headers: { Authorization: `token ${token}`, Accept: "application/json", "Content-Type": "application/json", "User-Agent": "archicratie-apply-annotation/1.0", }, body: JSON.stringify({ body: comment }), }); if (!res.ok) { const t = await res.text().catch(() => ""); throw new Error(`HTTP ${res.status} POST comment ${url}\n${t}`); } } async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment }) { if (comment) await postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment }); const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`; const res = await fetch(url, { method: "PATCH", headers: { Authorization: `token ${token}`, Accept: "application/json", "Content-Type": "application/json", "User-Agent": "archicratie-apply-annotation/1.0", }, body: JSON.stringify({ state: "closed" }), }); if (!res.ok) { const t = await res.text().catch(() => ""); throw new Error(`HTTP ${res.status} closing issue: ${url}\n${t}`); } } /* ------------------------------ media helpers ------------------------------ */ function inferMediaTypeFromFilename(name) { const n = String(name || "").toLowerCase(); if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image"; if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video"; if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio"; return "link"; } function sanitizeFilename(name) { return String(name || "file") .replace(/[\/\\]/g, "_") .replace(/[^\w.\-]+/g, "_") .replace(/_+/g, "_") .slice(0, 180); } async function downloadToFile(url, token, destAbs) { const res = await fetch(url, { headers: { Authorization: `token ${token}`, "User-Agent": "archicratie-apply-annotation/1.0", }, redirect: "follow", }); if (!res.ok) { const t = await res.text().catch(() => ""); throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`); } const buf = Buffer.from(await res.arrayBuffer()); await fs.mkdir(path.dirname(destAbs), { recursive: true }); await fs.writeFile(destAbs, buf); return buf.length; } /* ------------------------------ type parsers ------------------------------ */ function parseReferenceBlock(body) { const block = pickSection(body, ["Référence (à compléter):", "Reference (à compléter):"]) || pickSection(body, ["Référence:", "Reference:"]); const lines = String(block || "").split(/\r?\n/).map((l) => l.trim()); const get = (k) => { const re = new RegExp(`^[-*]\\s*${escapeRegExp(k)}\\s*:\\s*(.*)$`, "i"); const m = lines.map((l) => l.match(re)).find(Boolean); return (m?.[1] ?? "").trim(); }; return { url: get("URL") || "", label: get("Label") || "", kind: get("Kind") || "", citation: get("Citation") || get("Passage") || get("Extrait") || "", rawBlock: block || "", }; } /* ----------------------------------- main ---------------------------------- */ async function main() { const token = getEnv("FORGE_TOKEN"); assert(token, "❌ FORGE_TOKEN manquant.", 2); const forgeApiBase = getEnv("FORGE_API") || getEnv("FORGE_BASE"); assert(forgeApiBase, "❌ FORGE_API (ou FORGE_BASE) manquant.", 2); const inferred = inferOwnerRepoFromGit() || {}; const owner = getEnv("GITEA_OWNER", inferred.owner || ""); const repo = getEnv("GITEA_REPO", inferred.repo || ""); assert(owner && repo, "❌ Impossible de déterminer owner/repo. Fix: export GITEA_OWNER=... GITEA_REPO=...", 2); console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo} …`); const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum }); if (issue?.pull_request) { console.error(`❌ #${issueNum} est une Pull Request, pas un ticket annotations.`); process.exit(2); } const body = String(issue.body || "").replace(/\r\n/g, "\n"); const title = String(issue.title || ""); const type = pickLine(body, "Type").toLowerCase(); const chemin = normalizeChemin(pickLine(body, "Chemin")); const ancre = normalizeAnchorId(pickLine(body, "Ancre")); assert(chemin, "Ticket: Chemin manquant.", 2); assert(ancre && /^p-\d+-/i.test(ancre), `Ticket: Ancre invalide ("${ancre}")`, 2); assert(type, "Ticket: Type manquant.", 2); const pageKey = normalizePageKeyFromChemin(chemin); assert(pageKey, "Ticket: impossible de dériver pageKey.", 2); // para order (used for verify + sorting) const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null; if (DO_VERIFY) { const ok = await tryVerifyAnchor(pageKey, ancre); if (ok === false) { throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 }); } if (ok === null) { if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`), { __exitCode: 2 }); console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue."); } } // ✅ SHARD FILE: src/annotations//.yml const annoShardFileAbs = path.join(ANNO_DIR, pageKey, `${ancre}.yml`); const annoShardFileRel = path.relative(CWD, annoShardFileAbs).replace(/\\/g, "/"); // legacy (read-only, used as base to avoid losing previously stored data) const annoLegacyFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`); console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: annoShardFileRel, }); // load shard doc const doc = await loadAnnoDocYaml(annoShardFileAbs, pageKey); // merge legacy para into shard as base (non destructive) if (await exists(annoLegacyFileAbs)) { try { const legacy = await loadAnnoDocYaml(annoLegacyFileAbs, pageKey); const legacyEntry = legacy?.paras?.[ancre]; if (isPlainObject(legacyEntry)) { if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {}; deepMergeEntry(doc.paras[ancre], legacyEntry); } } catch { // ignore legacy parse issues (shard still works) } } if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {}; const entry = doc.paras[ancre]; const touchedFiles = []; const notes = []; let changed = false; const nowIso = new Date().toISOString(); if (type === "type/comment") { const comment = pickSection(body, ["Commentaire:", "Comment:", "Commentaires:"]) || ""; const text = comment.trim(); assert(text.length >= 3, "Ticket comment: bloc 'Commentaire:' introuvable ou trop court.", 2); if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = []; const item = { text, status: "new", ts: nowIso, fromIssue: issueNum }; const before = entry.comments_editorial.length; entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment); changed = changed || entry.comments_editorial.length !== before; stableSortByTs(entry.comments_editorial); notes.push(changed ? `+ comment added (len=${text.length})` : `~ comment already present (dedup)`); } else if (type === "type/reference") { const ref = parseReferenceBlock(body); assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2); if (STRICT && ref.url && !isHttpUrl(ref.url)) { throw Object.assign(new Error(`Ticket reference (strict): URL invalide (http/https requis): "${ref.url}"`), { __exitCode: 2 }); } if (!Array.isArray(entry.refs)) entry.refs = []; const item = { url: ref.url || "", label: ref.label || (ref.url ? ref.url : "Référence"), kind: ref.kind || "", ts: nowIso, fromIssue: issueNum, }; if (ref.citation) item.citation = ref.citation; const before = entry.refs.length; entry.refs = uniqUnion(entry.refs, [item], keyRef); changed = changed || entry.refs.length !== before; stableSortByTs(entry.refs); notes.push(changed ? `+ reference added (${item.url ? "url" : "label"})` : `~ reference already present (dedup)`); } else if (type === "type/media") { if (!Array.isArray(entry.media)) entry.media = []; const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }); if (!atts.length) notes.push("! no assets found (nothing to download)."); for (const a of atts) { const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`); const dl = a?.browser_download_url || a?.download_url || ""; if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; } const caption = (title || "").trim(); if (STRICT && !caption) { throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 }); } const captionFinal = caption || "."; const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre); const destAbs = path.join(mediaDirAbs, name); const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/"); if (await exists(destAbs)) { notes.push(`~ media already exists: ${urlPath}`); } else if (!DRY_RUN) { const bytes = await downloadToFile(dl, token, destAbs); notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`); touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/")); changed = true; } else { notes.push(`(dry) would download ${name} -> ${urlPath}`); changed = true; } const item = { type: inferMediaTypeFromFilename(name), src: urlPath, caption: captionFinal, credit: "", ts: nowIso, fromIssue: issueNum, }; const before = entry.media.length; entry.media = uniqUnion(entry.media, [item], keyMedia); if (entry.media.length !== before) changed = true; } stableSortByTs(entry.media); } else { throw Object.assign(new Error(`Type non supporté: "${type}"`), { __exitCode: 2 }); } if (!changed) { console.log("ℹ️ No changes to apply."); for (const n of notes) console.log(" ", n); return; } if (DRY_RUN) { console.log("\n--- DRY RUN (no write) ---"); console.log(`Would update: ${annoShardFileRel}`); for (const n of notes) console.log(" ", n); console.log("\nExcerpt (resulting entry):"); console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd()); console.log("\n✅ Dry-run terminé."); return; } await saveAnnoDocYaml(annoShardFileAbs, doc, paraOrder); touchedFiles.unshift(annoShardFileRel); console.log(`✅ Updated: ${annoShardFileRel}`); for (const n of notes) console.log(" ", n); if (DO_COMMIT) { run("git", ["add", ...touchedFiles], { cwd: CWD }); if (!gitHasStagedChanges()) { console.log("ℹ️ Nothing to commit (aucun changement staged)."); return; } const msg = `anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})`; run("git", ["commit", "-m", msg], { cwd: CWD }); const sha = runQuiet("git", ["rev-parse", "--short", "HEAD"], { cwd: CWD }).trim(); console.log(`✅ Committed: ${msg} (${sha})`); if (DO_CLOSE) { const comment = `✅ Appliqué par apply-annotation-ticket.\nCommit: ${sha}`; await closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment }); console.log(`✅ Ticket #${issueNum} fermé.`); } } else { console.log("\nNext (manuel) :"); console.log(` git diff -- ${touchedFiles[0]}`); console.log(` git add ${touchedFiles.join(" ")}`); console.log(` git commit -m "anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})"`); } } main().catch((e) => { const code = e?.__exitCode || 1; console.error("💥", e?.message || e); process.exit(code); });