899 lines
28 KiB
JavaScript
899 lines
28 KiB
JavaScript
#!/usr/bin/env node
|
||
// scripts/apply-annotation-ticket.mjs
|
||
//
|
||
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
|
||
//
|
||
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
|
||
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
|
||
//
|
||
// Compat rétro : lit (si présent) l'ancien monolithe:
|
||
// src/annotations/<oeuvre>/<chapitre>.yml
|
||
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
|
||
// pour permettre une migration progressive sans perte.
|
||
//
|
||
// Robuste, idempotent, non destructif.
|
||
// DRY RUN si --dry-run
|
||
// Options: --dry-run --no-download --verify --strict --commit --close
|
||
//
|
||
// Env requis:
|
||
// FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
|
||
// FORGE_TOKEN = PAT Gitea (repo + issues)
|
||
//
|
||
// Env optionnel:
|
||
// GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||
// ANNO_DIR (défaut: src/annotations)
|
||
// PUBLIC_DIR (défaut: public)
|
||
// MEDIA_ROOT (défaut URL: /media)
|
||
//
|
||
// Ticket attendu (body):
|
||
// Chemin: /archicrat-ia/chapitre-4/
|
||
// Ancre: #p-0-xxxxxxxx
|
||
// Type: type/media | type/reference | type/comment
|
||
//
|
||
// Exit codes:
|
||
// 0 ok
|
||
// 1 erreur fatale
|
||
// 2 refus (strict/verify/usage)
|
||
|
||
import fs from "node:fs/promises";
|
||
import path from "node:path";
|
||
import process from "node:process";
|
||
import { spawnSync } from "node:child_process";
|
||
import YAML from "yaml";
|
||
|
||
/* ---------------------------------- usage --------------------------------- */
|
||
|
||
function usage(exitCode = 0) {
|
||
console.log(`
|
||
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/ (shard par paragraphe)
|
||
|
||
Usage:
|
||
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
|
||
|
||
Flags:
|
||
--dry-run : n'écrit rien (affiche un aperçu)
|
||
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
||
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
|
||
--strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
|
||
--commit : git add + git commit (commit dans la branche courante)
|
||
--close : ferme le ticket (nécessite --commit)
|
||
|
||
Env requis:
|
||
FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
|
||
FORGE_TOKEN = PAT Gitea (repo + issues)
|
||
|
||
Env optionnel:
|
||
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||
ANNO_DIR (défaut: src/annotations)
|
||
PUBLIC_DIR (défaut: public)
|
||
MEDIA_ROOT (défaut URL: /media)
|
||
|
||
Exit codes:
|
||
0 ok
|
||
1 erreur fatale
|
||
2 refus (strict/verify/close sans commit / incohérence)
|
||
`);
|
||
process.exit(exitCode);
|
||
}
|
||
|
||
/* ---------------------------------- args ---------------------------------- */
|
||
|
||
const argv = process.argv.slice(2);
|
||
if (argv.length === 0 || argv.includes("--help") || argv.includes("-h")) usage(0);
|
||
|
||
const issueNum = Number(argv[0]);
|
||
if (!Number.isFinite(issueNum) || issueNum <= 0) {
|
||
console.error("❌ Numéro de ticket invalide.");
|
||
usage(2);
|
||
}
|
||
|
||
const DRY_RUN = argv.includes("--dry-run");
|
||
const NO_DOWNLOAD = argv.includes("--no-download");
|
||
const DO_VERIFY = argv.includes("--verify");
|
||
const STRICT = argv.includes("--strict");
|
||
const DO_COMMIT = argv.includes("--commit");
|
||
const DO_CLOSE = argv.includes("--close");
|
||
|
||
if (DO_CLOSE && !DO_COMMIT) {
|
||
console.error("❌ --close nécessite --commit.");
|
||
process.exit(2);
|
||
}
|
||
|
||
if (typeof fetch !== "function") {
|
||
console.error("❌ fetch() indisponible. Utilise Node 18+.");
|
||
process.exit(1);
|
||
}
|
||
|
||
/* --------------------------------- config --------------------------------- */
|
||
|
||
const CWD = process.cwd();
|
||
const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations");
|
||
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
|
||
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
|
||
|
||
/* --------------------------------- helpers -------------------------------- */
|
||
|
||
function getEnv(name, fallback = "") {
|
||
return (process.env[name] ?? fallback).trim();
|
||
}
|
||
|
||
function run(cmd, args, opts = {}) {
|
||
const r = spawnSync(cmd, args, { stdio: "inherit", ...opts });
|
||
if (r.error) throw r.error;
|
||
if (r.status !== 0) throw new Error(`Command failed: ${cmd} ${args.join(" ")}`);
|
||
}
|
||
|
||
function runQuiet(cmd, args, opts = {}) {
|
||
const r = spawnSync(cmd, args, { encoding: "utf8", stdio: "pipe", ...opts });
|
||
if (r.error) throw r.error;
|
||
if (r.status !== 0) {
|
||
const out = (r.stdout || "") + (r.stderr || "");
|
||
throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${out}`);
|
||
}
|
||
return r.stdout || "";
|
||
}
|
||
|
||
async function exists(p) {
|
||
try {
|
||
await fs.access(p);
|
||
return true;
|
||
} catch {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
function inferOwnerRepoFromGit() {
|
||
const r = spawnSync("git", ["remote", "get-url", "origin"], { encoding: "utf-8" });
|
||
if (r.status !== 0) return null;
|
||
const u = (r.stdout || "").trim();
|
||
const m = u.match(/[:/](?<owner>[^/]+)\/(?<repo>[^/]+?)(?:\.git)?$/);
|
||
if (!m?.groups) return null;
|
||
return { owner: m.groups.owner, repo: m.groups.repo };
|
||
}
|
||
|
||
function gitHasStagedChanges() {
|
||
const r = spawnSync("git", ["diff", "--cached", "--quiet"]);
|
||
return r.status === 1;
|
||
}
|
||
|
||
function escapeRegExp(s) {
|
||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||
}
|
||
|
||
function pickLine(body, key) {
|
||
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||
const m = String(body || "").match(re);
|
||
return m ? m[1].trim() : "";
|
||
}
|
||
|
||
function pickSection(body, markers) {
|
||
const text = String(body || "").replace(/\r\n/g, "\n");
|
||
const idx = markers
|
||
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
||
.filter((x) => x.i >= 0)
|
||
.sort((a, b) => a.i - b.i)[0];
|
||
if (!idx) return "";
|
||
|
||
const start = idx.i + idx.m.length;
|
||
const tail = text.slice(start);
|
||
|
||
const stops = ["\n## ", "\n---", "\nJustification", "\nProposition", "\nSources"];
|
||
let end = tail.length;
|
||
for (const s of stops) {
|
||
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
||
if (j >= 0 && j < end) end = j;
|
||
}
|
||
return tail.slice(0, end).trim();
|
||
}
|
||
|
||
function normalizeChemin(chemin) {
|
||
let c = String(chemin || "").trim();
|
||
if (!c) return "";
|
||
if (!c.startsWith("/")) c = "/" + c;
|
||
if (!c.endsWith("/")) c = c + "/";
|
||
c = c.replace(/\/{2,}/g, "/");
|
||
return c;
|
||
}
|
||
|
||
function normalizePageKeyFromChemin(chemin) {
|
||
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
|
||
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||
}
|
||
|
||
function normalizeAnchorId(s) {
|
||
let a = String(s || "").trim();
|
||
if (a.startsWith("#")) a = a.slice(1);
|
||
return a;
|
||
}
|
||
|
||
function assert(cond, msg, code = 1) {
|
||
if (!cond) {
|
||
const e = new Error(msg);
|
||
e.__exitCode = code;
|
||
throw e;
|
||
}
|
||
}
|
||
|
||
function isPlainObject(x) {
|
||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||
}
|
||
|
||
function paraIndexFromId(id) {
|
||
const m = String(id).match(/^p-(\d+)-/i);
|
||
return m ? Number(m[1]) : Number.NaN;
|
||
}
|
||
|
||
function isHttpUrl(u) {
|
||
try {
|
||
const x = new URL(String(u));
|
||
return x.protocol === "http:" || x.protocol === "https:";
|
||
} catch {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
function stableSortByTs(arr) {
|
||
if (!Array.isArray(arr)) return;
|
||
arr.sort((a, b) => {
|
||
const ta = Date.parse(a?.ts || "") || 0;
|
||
const tb = Date.parse(b?.ts || "") || 0;
|
||
if (ta !== tb) return ta - tb;
|
||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||
});
|
||
}
|
||
|
||
function normPage(s) {
|
||
let x = String(s || "").trim();
|
||
if (!x) return "";
|
||
// retire origin si on a une URL complète
|
||
x = x.replace(/^https?:\/\/[^/]+/i, "");
|
||
// enlève query/hash
|
||
x = x.split("#")[0].split("?")[0];
|
||
// enlève index.html
|
||
x = x.replace(/index\.html$/i, "");
|
||
// enlève slashs de bord
|
||
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
|
||
return x;
|
||
}
|
||
|
||
/* ------------------------------ para-index (verify + order) ------------------------------ */
|
||
|
||
async function loadParaOrderFromDist(pageKey) {
|
||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||
if (!(await exists(distIdx))) return null;
|
||
|
||
let j;
|
||
try {
|
||
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
|
||
} catch {
|
||
return null;
|
||
}
|
||
|
||
const want = normPage(pageKey);
|
||
|
||
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
|
||
const items = Array.isArray(j?.items)
|
||
? j.items
|
||
: Array.isArray(j?.index?.items)
|
||
? j.index.items
|
||
: null;
|
||
|
||
if (items) {
|
||
const ids = [];
|
||
for (const it of items) {
|
||
// page peut être dans plein de clés différentes
|
||
const pageCand = normPage(
|
||
it?.page ??
|
||
it?.pageKey ??
|
||
it?.path ??
|
||
it?.route ??
|
||
it?.href ??
|
||
it?.url ??
|
||
""
|
||
);
|
||
|
||
// id peut être dans plein de clés différentes
|
||
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
|
||
if (id.startsWith("#")) id = id.slice(1);
|
||
|
||
if (pageCand === want && id) ids.push(id);
|
||
}
|
||
if (ids.length) return ids;
|
||
}
|
||
|
||
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
|
||
if (j?.byId && typeof j.byId === "object") {
|
||
const ids = Object.keys(j.byId)
|
||
.filter((id) => {
|
||
const meta = j.byId[id] || {};
|
||
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
|
||
return pageCand === want;
|
||
});
|
||
|
||
if (ids.length) {
|
||
ids.sort((a, b) => {
|
||
const ia = paraIndexFromId(a);
|
||
const ib = paraIndexFromId(b);
|
||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||
return String(a).localeCompare(String(b));
|
||
});
|
||
return ids;
|
||
}
|
||
}
|
||
|
||
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
|
||
if (j?.pages && typeof j.pages === "object") {
|
||
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
|
||
const keys = Object.keys(j.pages);
|
||
const hit = keys.find((k) => normPage(k) === want);
|
||
if (hit) {
|
||
const pg = j.pages[hit];
|
||
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
|
||
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
|
||
}
|
||
}
|
||
|
||
return null;
|
||
}
|
||
|
||
async function tryVerifyAnchor(pageKey, anchorId) {
|
||
// 1) dist/para-index.json : order complet si possible
|
||
const order = await loadParaOrderFromDist(pageKey);
|
||
if (order) return order.includes(anchorId);
|
||
|
||
// 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
|
||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||
if (await exists(distIdx)) {
|
||
try {
|
||
const raw = await fs.readFile(distIdx, "utf8");
|
||
if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
|
||
return true;
|
||
}
|
||
} catch {
|
||
// ignore
|
||
}
|
||
}
|
||
|
||
// 2) tests/anchors-baseline.json (fallback)
|
||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||
if (await exists(base)) {
|
||
try {
|
||
const j = JSON.parse(await fs.readFile(base, "utf8"));
|
||
const candidates = [];
|
||
if (j?.pages && typeof j.pages === "object") {
|
||
for (const [k, v] of Object.entries(j.pages)) {
|
||
if (!Array.isArray(v)) continue;
|
||
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
|
||
}
|
||
}
|
||
if (Array.isArray(j?.entries)) {
|
||
for (const it of j.entries) {
|
||
const p = String(it?.page || "");
|
||
const ids = it?.ids;
|
||
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
|
||
}
|
||
}
|
||
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
|
||
} catch {
|
||
// ignore
|
||
}
|
||
}
|
||
|
||
return null; // cannot verify
|
||
}
|
||
|
||
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
|
||
|
||
function keyMedia(x) {
|
||
return String(x?.src || "");
|
||
}
|
||
function keyRef(x) {
|
||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||
}
|
||
function keyComment(x) {
|
||
return String(x?.text || "").trim();
|
||
}
|
||
|
||
function uniqUnion(dstArr, srcArr, keyFn) {
|
||
const out = Array.isArray(dstArr) ? [...dstArr] : [];
|
||
const seen = new Set(out.map((x) => keyFn(x)));
|
||
for (const it of (Array.isArray(srcArr) ? srcArr : [])) {
|
||
const k = keyFn(it);
|
||
if (!k) continue;
|
||
if (!seen.has(k)) {
|
||
seen.add(k);
|
||
out.push(it);
|
||
}
|
||
}
|
||
return out;
|
||
}
|
||
|
||
function deepMergeEntry(dst, src) {
|
||
if (!isPlainObject(dst) || !isPlainObject(src)) return;
|
||
|
||
for (const [k, v] of Object.entries(src)) {
|
||
if (k === "media" && Array.isArray(v)) {
|
||
dst.media = uniqUnion(dst.media, v, keyMedia);
|
||
continue;
|
||
}
|
||
if (k === "refs" && Array.isArray(v)) {
|
||
dst.refs = uniqUnion(dst.refs, v, keyRef);
|
||
continue;
|
||
}
|
||
if (k === "comments_editorial" && Array.isArray(v)) {
|
||
dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment);
|
||
continue;
|
||
}
|
||
|
||
if (isPlainObject(v)) {
|
||
if (!isPlainObject(dst[k])) dst[k] = {};
|
||
deepMergeEntry(dst[k], v);
|
||
continue;
|
||
}
|
||
|
||
if (Array.isArray(v)) {
|
||
const cur = Array.isArray(dst[k]) ? dst[k] : [];
|
||
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||
const out = [...cur];
|
||
for (const it of v) {
|
||
const s = JSON.stringify(it);
|
||
if (!seen.has(s)) {
|
||
seen.add(s);
|
||
out.push(it);
|
||
}
|
||
}
|
||
dst[k] = out;
|
||
continue;
|
||
}
|
||
|
||
// scalar: set only if missing/empty
|
||
if (!(k in dst) || dst[k] == null || dst[k] === "") {
|
||
dst[k] = v;
|
||
}
|
||
}
|
||
}
|
||
|
||
/* ----------------------------- annotations I/O ----------------------------- */
|
||
|
||
async function loadAnnoDocYaml(fileAbs, pageKey) {
|
||
if (!(await exists(fileAbs))) {
|
||
return { schema: 1, page: pageKey, paras: {} };
|
||
}
|
||
|
||
const raw = await fs.readFile(fileAbs, "utf8");
|
||
let doc;
|
||
try {
|
||
doc = YAML.parse(raw);
|
||
} catch (e) {
|
||
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
|
||
}
|
||
|
||
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`, 2);
|
||
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`, 2);
|
||
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`, 2);
|
||
|
||
if (doc.page != null) {
|
||
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
|
||
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`, 2);
|
||
} else {
|
||
doc.page = pageKey;
|
||
}
|
||
|
||
return doc;
|
||
}
|
||
|
||
function sortParasObject(paras, order) {
|
||
const keys = Object.keys(paras || {});
|
||
const idx = new Map();
|
||
if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
|
||
|
||
keys.sort((a, b) => {
|
||
const ha = idx.has(a);
|
||
const hb = idx.has(b);
|
||
if (ha && hb) return idx.get(a) - idx.get(b);
|
||
if (ha && !hb) return -1;
|
||
if (!ha && hb) return 1;
|
||
|
||
const ia = paraIndexFromId(a);
|
||
const ib = paraIndexFromId(b);
|
||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||
return String(a).localeCompare(String(b));
|
||
});
|
||
|
||
const out = {};
|
||
for (const k of keys) out[k] = paras[k];
|
||
return out;
|
||
}
|
||
|
||
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
|
||
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||
|
||
doc.paras = sortParasObject(doc.paras, order);
|
||
|
||
for (const e of Object.values(doc.paras || {})) {
|
||
if (!isPlainObject(e)) continue;
|
||
stableSortByTs(e.media);
|
||
stableSortByTs(e.refs);
|
||
stableSortByTs(e.comments_editorial);
|
||
}
|
||
|
||
const out = YAML.stringify(doc);
|
||
await fs.writeFile(fileAbs, out, "utf8");
|
||
}
|
||
|
||
/* ------------------------------ gitea helpers ------------------------------ */
|
||
|
||
function apiBaseNorm(forgeApiBase) {
|
||
return forgeApiBase.replace(/\/+$/, "");
|
||
}
|
||
|
||
async function giteaGET(url, token) {
|
||
const res = await fetch(url, {
|
||
headers: {
|
||
Authorization: `token ${token}`,
|
||
Accept: "application/json",
|
||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||
},
|
||
});
|
||
if (!res.ok) {
|
||
const t = await res.text().catch(() => "");
|
||
throw new Error(`HTTP ${res.status} GET ${url}\n${t}`);
|
||
}
|
||
return await res.json();
|
||
}
|
||
|
||
async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
|
||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||
return await giteaGET(url, token);
|
||
}
|
||
|
||
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
|
||
// Gitea: /issues/{index}/assets
|
||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
|
||
try {
|
||
const json = await giteaGET(url, token);
|
||
return Array.isArray(json) ? json : [];
|
||
} catch {
|
||
return [];
|
||
}
|
||
}
|
||
|
||
async function postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment }) {
|
||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/comments`;
|
||
const res = await fetch(url, {
|
||
method: "POST",
|
||
headers: {
|
||
Authorization: `token ${token}`,
|
||
Accept: "application/json",
|
||
"Content-Type": "application/json",
|
||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||
},
|
||
body: JSON.stringify({ body: comment }),
|
||
});
|
||
if (!res.ok) {
|
||
const t = await res.text().catch(() => "");
|
||
throw new Error(`HTTP ${res.status} POST comment ${url}\n${t}`);
|
||
}
|
||
}
|
||
|
||
async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment }) {
|
||
if (comment) await postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment });
|
||
|
||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||
const res = await fetch(url, {
|
||
method: "PATCH",
|
||
headers: {
|
||
Authorization: `token ${token}`,
|
||
Accept: "application/json",
|
||
"Content-Type": "application/json",
|
||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||
},
|
||
body: JSON.stringify({ state: "closed" }),
|
||
});
|
||
if (!res.ok) {
|
||
const t = await res.text().catch(() => "");
|
||
throw new Error(`HTTP ${res.status} closing issue: ${url}\n${t}`);
|
||
}
|
||
}
|
||
|
||
/* ------------------------------ media helpers ------------------------------ */
|
||
|
||
function inferMediaTypeFromFilename(name) {
|
||
const n = String(name || "").toLowerCase();
|
||
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
|
||
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
|
||
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
|
||
return "link";
|
||
}
|
||
|
||
function sanitizeFilename(name) {
|
||
return String(name || "file")
|
||
.replace(/[\/\\]/g, "_")
|
||
.replace(/[^\w.\-]+/g, "_")
|
||
.replace(/_+/g, "_")
|
||
.slice(0, 180);
|
||
}
|
||
|
||
async function downloadToFile(url, token, destAbs) {
|
||
const res = await fetch(url, {
|
||
headers: {
|
||
Authorization: `token ${token}`,
|
||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||
},
|
||
redirect: "follow",
|
||
});
|
||
if (!res.ok) {
|
||
const t = await res.text().catch(() => "");
|
||
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
|
||
}
|
||
const buf = Buffer.from(await res.arrayBuffer());
|
||
await fs.mkdir(path.dirname(destAbs), { recursive: true });
|
||
await fs.writeFile(destAbs, buf);
|
||
return buf.length;
|
||
}
|
||
|
||
/* ------------------------------ type parsers ------------------------------ */
|
||
|
||
function parseReferenceBlock(body) {
|
||
const block =
|
||
pickSection(body, ["Référence (à compléter):", "Reference (à compléter):"]) ||
|
||
pickSection(body, ["Référence:", "Reference:"]);
|
||
|
||
const lines = String(block || "").split(/\r?\n/).map((l) => l.trim());
|
||
const get = (k) => {
|
||
const re = new RegExp(`^[-*]\\s*${escapeRegExp(k)}\\s*:\\s*(.*)$`, "i");
|
||
const m = lines.map((l) => l.match(re)).find(Boolean);
|
||
return (m?.[1] ?? "").trim();
|
||
};
|
||
|
||
return {
|
||
url: get("URL") || "",
|
||
label: get("Label") || "",
|
||
kind: get("Kind") || "",
|
||
citation: get("Citation") || get("Passage") || get("Extrait") || "",
|
||
rawBlock: block || "",
|
||
};
|
||
}
|
||
|
||
/* ----------------------------------- main ---------------------------------- */
|
||
|
||
async function main() {
|
||
const token = getEnv("FORGE_TOKEN");
|
||
assert(token, "❌ FORGE_TOKEN manquant.", 2);
|
||
|
||
const forgeApiBase = getEnv("FORGE_API") || getEnv("FORGE_BASE");
|
||
assert(forgeApiBase, "❌ FORGE_API (ou FORGE_BASE) manquant.", 2);
|
||
|
||
const inferred = inferOwnerRepoFromGit() || {};
|
||
const owner = getEnv("GITEA_OWNER", inferred.owner || "");
|
||
const repo = getEnv("GITEA_REPO", inferred.repo || "");
|
||
assert(owner && repo, "❌ Impossible de déterminer owner/repo. Fix: export GITEA_OWNER=... GITEA_REPO=...", 2);
|
||
|
||
console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo} …`);
|
||
const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum });
|
||
|
||
if (issue?.pull_request) {
|
||
console.error(`❌ #${issueNum} est une Pull Request, pas un ticket annotations.`);
|
||
process.exit(2);
|
||
}
|
||
|
||
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||
const title = String(issue.title || "");
|
||
|
||
const type = pickLine(body, "Type").toLowerCase();
|
||
const chemin = normalizeChemin(pickLine(body, "Chemin"));
|
||
const ancre = normalizeAnchorId(pickLine(body, "Ancre"));
|
||
|
||
assert(chemin, "Ticket: Chemin manquant.", 2);
|
||
assert(ancre && /^p-\d+-/i.test(ancre), `Ticket: Ancre invalide ("${ancre}")`, 2);
|
||
assert(type, "Ticket: Type manquant.", 2);
|
||
|
||
const pageKey = normalizePageKeyFromChemin(chemin);
|
||
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
||
|
||
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
|
||
|
||
if (DO_VERIFY) {
|
||
const ok = await tryVerifyAnchor(pageKey, ancre);
|
||
if (ok === false) {
|
||
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
||
}
|
||
if (ok === null) {
|
||
if (STRICT) {
|
||
throw Object.assign(
|
||
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
|
||
{ __exitCode: 2 }
|
||
);
|
||
}
|
||
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
|
||
}
|
||
}
|
||
|
||
// ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
|
||
const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
|
||
const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
|
||
|
||
// legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
|
||
const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||
|
||
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
|
||
|
||
// load shard doc
|
||
const doc = await loadAnnoDocYaml(shardAbs, pageKey);
|
||
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
||
const entry = doc.paras[ancre];
|
||
|
||
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
|
||
if (await exists(legacyAbs)) {
|
||
try {
|
||
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
|
||
const legacyEntry = legacy?.paras?.[ancre];
|
||
if (isPlainObject(legacyEntry)) {
|
||
deepMergeEntry(entry, legacyEntry);
|
||
}
|
||
} catch {
|
||
// ignore legacy parse issues; shard still applies new data
|
||
}
|
||
}
|
||
|
||
const touchedFiles = [];
|
||
const notes = [];
|
||
let changed = false;
|
||
const nowIso = new Date().toISOString();
|
||
|
||
if (type === "type/comment") {
|
||
const comment = pickSection(body, ["Commentaire:", "Comment:", "Commentaires:"]) || "";
|
||
const text = comment.trim();
|
||
assert(text.length >= 3, "Ticket comment: bloc 'Commentaire:' introuvable ou trop court.", 2);
|
||
|
||
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
|
||
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
|
||
|
||
const before = entry.comments_editorial.length;
|
||
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
|
||
if (entry.comments_editorial.length !== before) {
|
||
changed = true;
|
||
notes.push(`+ comment added (len=${text.length})`);
|
||
} else {
|
||
notes.push(`~ comment already present (dedup)`);
|
||
}
|
||
stableSortByTs(entry.comments_editorial);
|
||
}
|
||
|
||
else if (type === "type/reference") {
|
||
const ref = parseReferenceBlock(body);
|
||
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
|
||
|
||
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
|
||
throw Object.assign(new Error(`Ticket reference (strict): URL invalide (http/https requis): "${ref.url}"`), { __exitCode: 2 });
|
||
}
|
||
|
||
if (!Array.isArray(entry.refs)) entry.refs = [];
|
||
const item = {
|
||
url: ref.url || "",
|
||
label: ref.label || (ref.url ? ref.url : "Référence"),
|
||
kind: ref.kind || "",
|
||
ts: nowIso,
|
||
fromIssue: issueNum,
|
||
};
|
||
if (ref.citation) item.citation = ref.citation;
|
||
|
||
const before = entry.refs.length;
|
||
entry.refs = uniqUnion(entry.refs, [item], keyRef);
|
||
if (entry.refs.length !== before) {
|
||
changed = true;
|
||
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
|
||
} else {
|
||
notes.push(`~ reference already present (dedup)`);
|
||
}
|
||
stableSortByTs(entry.refs);
|
||
}
|
||
|
||
else if (type === "type/media") {
|
||
if (!Array.isArray(entry.media)) entry.media = [];
|
||
|
||
const caption = (title || "").trim();
|
||
if (STRICT && !caption) {
|
||
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
|
||
}
|
||
const captionFinal = caption || ".";
|
||
|
||
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||
if (!atts.length) notes.push("! no assets found (nothing to download).");
|
||
|
||
for (const a of atts) {
|
||
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
|
||
const dl = a?.browser_download_url || a?.download_url || "";
|
||
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
||
|
||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
|
||
const destAbs = path.join(mediaDirAbs, name);
|
||
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
||
|
||
if (await exists(destAbs)) {
|
||
notes.push(`~ media already exists: ${urlPath}`);
|
||
} else if (!DRY_RUN) {
|
||
const bytes = await downloadToFile(dl, token, destAbs);
|
||
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
|
||
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
|
||
changed = true;
|
||
} else {
|
||
notes.push(`(dry) would download ${name} -> ${urlPath}`);
|
||
changed = true;
|
||
}
|
||
|
||
const item = {
|
||
type: inferMediaTypeFromFilename(name),
|
||
src: urlPath,
|
||
caption: captionFinal,
|
||
credit: "",
|
||
ts: nowIso,
|
||
fromIssue: issueNum,
|
||
};
|
||
|
||
const before = entry.media.length;
|
||
entry.media = uniqUnion(entry.media, [item], keyMedia);
|
||
if (entry.media.length !== before) changed = true;
|
||
}
|
||
|
||
stableSortByTs(entry.media);
|
||
}
|
||
|
||
else {
|
||
throw Object.assign(new Error(`Type non supporté: "${type}"`), { __exitCode: 2 });
|
||
}
|
||
|
||
if (!changed) {
|
||
console.log("ℹ️ No changes to apply.");
|
||
for (const n of notes) console.log(" ", n);
|
||
return;
|
||
}
|
||
|
||
if (DRY_RUN) {
|
||
console.log("\n--- DRY RUN (no write) ---");
|
||
console.log(`Would update: ${shardRel}`);
|
||
for (const n of notes) console.log(" ", n);
|
||
console.log("\nExcerpt (resulting entry):");
|
||
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
||
console.log("\n✅ Dry-run terminé.");
|
||
return;
|
||
}
|
||
|
||
await saveAnnoDocYaml(shardAbs, doc, paraOrder);
|
||
touchedFiles.unshift(shardRel);
|
||
|
||
console.log(`✅ Updated: ${shardRel}`);
|
||
for (const n of notes) console.log(" ", n);
|
||
|
||
if (DO_COMMIT) {
|
||
run("git", ["add", ...touchedFiles], { cwd: CWD });
|
||
|
||
if (!gitHasStagedChanges()) {
|
||
console.log("ℹ️ Nothing to commit (aucun changement staged).");
|
||
return;
|
||
}
|
||
|
||
const msg = `anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})`;
|
||
run("git", ["commit", "-m", msg], { cwd: CWD });
|
||
|
||
const sha = runQuiet("git", ["rev-parse", "--short", "HEAD"], { cwd: CWD }).trim();
|
||
console.log(`✅ Committed: ${msg} (${sha})`);
|
||
|
||
if (DO_CLOSE) {
|
||
const comment = `✅ Appliqué par apply-annotation-ticket.\nCommit: ${sha}`;
|
||
await closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment });
|
||
console.log(`✅ Ticket #${issueNum} fermé.`);
|
||
}
|
||
} else {
|
||
console.log("\nNext (manuel) :");
|
||
console.log(` git diff -- ${touchedFiles[0]}`);
|
||
console.log(` git add ${touchedFiles.join(" ")}`);
|
||
console.log(` git commit -m "anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})"`);
|
||
}
|
||
}
|
||
|
||
main().catch((e) => {
|
||
const code = e?.__exitCode || 1;
|
||
console.error("💥", e?.message || e);
|
||
process.exit(code);
|
||
}); |