689 lines
22 KiB
JavaScript
689 lines
22 KiB
JavaScript
#!/usr/bin/env node
|
||
// scripts/apply-annotation-ticket.mjs
|
||
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers src/annotations + public/media
|
||
// Robuste, idempotent, non destructif
|
||
//
|
||
// DRY RUN par défaut si --dry-run
|
||
// Options: --dry-run --no-download --verify --strict --commit --close
|
||
//
|
||
// Env requis:
|
||
// FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
|
||
// FORGE_TOKEN = PAT Gitea (repo + issues)
|
||
//
|
||
// Env optionnel:
|
||
// GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||
// ANNO_DIR (défaut: src/annotations)
|
||
// PUBLIC_DIR (défaut: public)
|
||
// MEDIA_ROOT (défaut URL: /media)
|
||
//
|
||
// Ticket attendu (body):
|
||
// Chemin: /archicrat-ia/chapitre-4/
|
||
// Ancre: #p-0-xxxxxxxx
|
||
// Type: type/media | type/reference | type/comment
|
||
//
|
||
// Exit codes:
|
||
// 0 ok
|
||
// 1 erreur fatale
|
||
// 2 refus (strict/verify/usage)
|
||
|
||
import fs from "node:fs/promises";
|
||
import path from "node:path";
|
||
import process from "node:process";
|
||
import { spawnSync } from "node:child_process";
|
||
import YAML from "yaml";
|
||
|
||
/* ---------------------------------- usage --------------------------------- */
|
||
|
||
function usage(exitCode = 0) {
|
||
console.log(`
|
||
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/
|
||
|
||
Usage:
|
||
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
|
||
|
||
Flags:
|
||
--dry-run : n'écrit rien (affiche un aperçu)
|
||
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
||
--verify : tente de vérifier que (page, ancre) existent (baseline/dist si dispo)
|
||
--strict : refuse si URL ref invalide (http/https) OU caption media vide
|
||
--commit : git add + git commit (le script commit dans la branche courante)
|
||
--close : ferme le ticket (nécessite --commit)
|
||
|
||
Env requis:
|
||
FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
|
||
FORGE_TOKEN = PAT Gitea (repo + issues)
|
||
|
||
Env optionnel:
|
||
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||
ANNO_DIR (défaut: src/annotations)
|
||
PUBLIC_DIR (défaut: public)
|
||
MEDIA_ROOT (défaut URL: /media) -> écrit dans public/media/...
|
||
|
||
Exit codes:
|
||
0 ok
|
||
1 erreur fatale
|
||
2 refus (strict/verify/close sans commit / incohérence)
|
||
`);
|
||
process.exit(exitCode);
|
||
}
|
||
|
||
/* ---------------------------------- args ---------------------------------- */
|
||
|
||
const argv = process.argv.slice(2);
|
||
if (argv.length === 0 || argv.includes("--help") || argv.includes("-h")) usage(0);
|
||
|
||
const issueNum = Number(argv[0]);
|
||
if (!Number.isFinite(issueNum) || issueNum <= 0) {
|
||
console.error("❌ Numéro de ticket invalide.");
|
||
usage(2);
|
||
}
|
||
|
||
const DRY_RUN = argv.includes("--dry-run");
|
||
const NO_DOWNLOAD = argv.includes("--no-download");
|
||
const DO_VERIFY = argv.includes("--verify");
|
||
const STRICT = argv.includes("--strict");
|
||
const DO_COMMIT = argv.includes("--commit");
|
||
const DO_CLOSE = argv.includes("--close");
|
||
|
||
if (DO_CLOSE && !DO_COMMIT) {
|
||
console.error("❌ --close nécessite --commit.");
|
||
process.exit(2);
|
||
}
|
||
|
||
if (typeof fetch !== "function") {
|
||
console.error("❌ fetch() indisponible. Utilise Node 18+.");
|
||
process.exit(1);
|
||
}
|
||
|
||
/* --------------------------------- config --------------------------------- */
|
||
|
||
const CWD = process.cwd();
|
||
const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations");
|
||
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
|
||
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
|
||
|
||
function getEnv(name, fallback = "") {
|
||
return (process.env[name] ?? fallback).trim();
|
||
}
|
||
|
||
function run(cmd, args, opts = {}) {
|
||
const r = spawnSync(cmd, args, { stdio: "inherit", ...opts });
|
||
if (r.error) throw r.error;
|
||
if (r.status !== 0) throw new Error(`Command failed: ${cmd} ${args.join(" ")}`);
|
||
}
|
||
|
||
function runQuiet(cmd, args, opts = {}) {
|
||
const r = spawnSync(cmd, args, { encoding: "utf8", stdio: "pipe", ...opts });
|
||
if (r.error) throw r.error;
|
||
if (r.status !== 0) {
|
||
const out = (r.stdout || "") + (r.stderr || "");
|
||
throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${out}`);
|
||
}
|
||
return r.stdout || "";
|
||
}
|
||
|
||
async function exists(p) {
|
||
try { await fs.access(p); return true; } catch { return false; }
|
||
}
|
||
|
||
function inferOwnerRepoFromGit() {
|
||
const r = spawnSync("git", ["remote", "get-url", "origin"], { encoding: "utf-8" });
|
||
if (r.status !== 0) return null;
|
||
const u = (r.stdout || "").trim();
|
||
const m = u.match(/[:/](?<owner>[^/]+)\/(?<repo>[^/]+?)(?:\.git)?$/);
|
||
if (!m?.groups) return null;
|
||
return { owner: m.groups.owner, repo: m.groups.repo };
|
||
}
|
||
|
||
function gitHasStagedChanges() {
|
||
const r = spawnSync("git", ["diff", "--cached", "--quiet"]);
|
||
return r.status === 1;
|
||
}
|
||
|
||
/* ------------------------------ gitea helpers ------------------------------ */
|
||
|
||
function apiBaseNorm(forgeApiBase) {
|
||
return forgeApiBase.replace(/\/+$/, "");
|
||
}
|
||
|
||
async function giteaGET(url, token) {
|
||
const res = await fetch(url, {
|
||
headers: {
|
||
Authorization: `token ${token}`,
|
||
Accept: "application/json",
|
||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||
},
|
||
});
|
||
if (!res.ok) {
|
||
const t = await res.text().catch(() => "");
|
||
throw new Error(`HTTP ${res.status} GET ${url}\n${t}`);
|
||
}
|
||
return await res.json();
|
||
}
|
||
|
||
async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
|
||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||
return await giteaGET(url, token);
|
||
}
|
||
|
||
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
|
||
// ✅ Gitea: /issues/{index}/assets
|
||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
|
||
try {
|
||
const json = await giteaGET(url, token);
|
||
return Array.isArray(json) ? json : [];
|
||
} catch {
|
||
return [];
|
||
}
|
||
}
|
||
|
||
async function postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment }) {
|
||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/comments`;
|
||
const res = await fetch(url, {
|
||
method: "POST",
|
||
headers: {
|
||
Authorization: `token ${token}`,
|
||
Accept: "application/json",
|
||
"Content-Type": "application/json",
|
||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||
},
|
||
body: JSON.stringify({ body: comment }),
|
||
});
|
||
if (!res.ok) {
|
||
const t = await res.text().catch(() => "");
|
||
throw new Error(`HTTP ${res.status} POST comment ${url}\n${t}`);
|
||
}
|
||
}
|
||
|
||
async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment }) {
|
||
if (comment) await postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment });
|
||
|
||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||
const res = await fetch(url, {
|
||
method: "PATCH",
|
||
headers: {
|
||
Authorization: `token ${token}`,
|
||
Accept: "application/json",
|
||
"Content-Type": "application/json",
|
||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||
},
|
||
body: JSON.stringify({ state: "closed" }),
|
||
});
|
||
if (!res.ok) {
|
||
const t = await res.text().catch(() => "");
|
||
throw new Error(`HTTP ${res.status} closing issue: ${url}\n${t}`);
|
||
}
|
||
}
|
||
|
||
/* ------------------------------ parsing helpers ---------------------------- */
|
||
|
||
function escapeRegExp(s) {
|
||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||
}
|
||
|
||
function pickLine(body, key) {
|
||
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||
const m = String(body || "").match(re);
|
||
return m ? m[1].trim() : "";
|
||
}
|
||
|
||
function pickSection(body, markers) {
|
||
const text = String(body || "").replace(/\r\n/g, "\n");
|
||
const idx = markers
|
||
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
||
.filter((x) => x.i >= 0)
|
||
.sort((a, b) => a.i - b.i)[0];
|
||
if (!idx) return "";
|
||
|
||
const start = idx.i + idx.m.length;
|
||
const tail = text.slice(start);
|
||
|
||
const stops = [
|
||
"\n## ",
|
||
"\n---",
|
||
"\nJustification",
|
||
"\nProposition",
|
||
"\nSources",
|
||
];
|
||
let end = tail.length;
|
||
for (const s of stops) {
|
||
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
||
if (j >= 0 && j < end) end = j;
|
||
}
|
||
return tail.slice(0, end).trim();
|
||
}
|
||
|
||
function normalizeChemin(chemin) {
|
||
let c = String(chemin || "").trim();
|
||
if (!c) return "";
|
||
if (!c.startsWith("/")) c = "/" + c;
|
||
if (!c.endsWith("/")) c = c + "/";
|
||
c = c.replace(/\/{2,}/g, "/");
|
||
return c;
|
||
}
|
||
|
||
function normalizePageKeyFromChemin(chemin) {
|
||
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||
}
|
||
|
||
function normalizeAnchorId(s) {
|
||
let a = String(s || "").trim();
|
||
if (a.startsWith("#")) a = a.slice(1);
|
||
return a;
|
||
}
|
||
|
||
function assert(cond, msg, code = 1) {
|
||
if (!cond) {
|
||
const e = new Error(msg);
|
||
e.__exitCode = code;
|
||
throw e;
|
||
}
|
||
}
|
||
|
||
function isPlainObject(x) {
|
||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||
}
|
||
|
||
/* ----------------------------- verify helpers ------------------------------ */
|
||
|
||
function paraIndexFromId(id) {
|
||
const m = String(id).match(/^p-(\d+)-/i);
|
||
return m ? Number(m[1]) : Number.NaN;
|
||
}
|
||
|
||
async function tryVerifyAnchor(pageKey, anchorId) {
|
||
// 1) dist/para-index.json (si build déjà faite)
|
||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||
if (await exists(distIdx)) {
|
||
const raw = await fs.readFile(distIdx, "utf8");
|
||
const idx = JSON.parse(raw);
|
||
const byId = idx?.byId;
|
||
if (byId && typeof byId === "object" && byId[anchorId] != null) return true;
|
||
}
|
||
|
||
// 2) tests/anchors-baseline.json (si dispo)
|
||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||
if (await exists(base)) {
|
||
const raw = await fs.readFile(base, "utf8");
|
||
const j = JSON.parse(raw);
|
||
|
||
// tolérant: cherche un array d'ids associé à la page
|
||
const candidates = [];
|
||
|
||
// cas 1: j.pages[...]
|
||
if (j?.pages && typeof j.pages === "object") {
|
||
for (const [k, v] of Object.entries(j.pages)) {
|
||
if (!Array.isArray(v)) continue;
|
||
// on matche large: pageKey inclus dans le path
|
||
if (String(k).includes(pageKey)) candidates.push(...v);
|
||
}
|
||
}
|
||
|
||
// cas 2: j.entries = [{page, ids}]
|
||
if (Array.isArray(j?.entries)) {
|
||
for (const it of j.entries) {
|
||
const p = String(it?.page || "");
|
||
const ids = it?.ids;
|
||
if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids);
|
||
}
|
||
}
|
||
|
||
if (candidates.length) {
|
||
return candidates.some((x) => String(x) === anchorId);
|
||
}
|
||
}
|
||
|
||
// impossible à vérifier
|
||
return null;
|
||
}
|
||
|
||
/* ----------------------------- annotations I/O ----------------------------- */
|
||
|
||
async function loadAnnoDoc(fileAbs, pageKey) {
|
||
if (!(await exists(fileAbs))) {
|
||
return { schema: 1, page: pageKey, paras: {} };
|
||
}
|
||
|
||
const raw = await fs.readFile(fileAbs, "utf8");
|
||
let doc;
|
||
try {
|
||
doc = YAML.parse(raw);
|
||
} catch (e) {
|
||
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
|
||
}
|
||
|
||
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`);
|
||
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`);
|
||
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`);
|
||
|
||
if (doc.page != null) {
|
||
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
|
||
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
||
} else {
|
||
doc.page = pageKey;
|
||
}
|
||
|
||
return doc;
|
||
}
|
||
|
||
function sortParasObject(paras) {
|
||
const keys = Object.keys(paras || {});
|
||
keys.sort((a, b) => {
|
||
const ia = paraIndexFromId(a);
|
||
const ib = paraIndexFromId(b);
|
||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||
return String(a).localeCompare(String(b));
|
||
});
|
||
const out = {};
|
||
for (const k of keys) out[k] = paras[k];
|
||
return out;
|
||
}
|
||
|
||
async function saveAnnoDocYaml(fileAbs, doc) {
|
||
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||
doc.paras = sortParasObject(doc.paras);
|
||
const out = YAML.stringify(doc);
|
||
await fs.writeFile(fileAbs, out, "utf8");
|
||
}
|
||
|
||
/* ------------------------------ apply per type ----------------------------- */
|
||
|
||
function ensureEntry(doc, paraId) {
|
||
if (!doc.paras[paraId] || !isPlainObject(doc.paras[paraId])) doc.paras[paraId] = {};
|
||
return doc.paras[paraId];
|
||
}
|
||
|
||
function uniqPush(arr, item, keyFn) {
|
||
const k = keyFn(item);
|
||
const exists = arr.some((x) => keyFn(x) === k);
|
||
if (!exists) arr.push(item);
|
||
return !exists;
|
||
}
|
||
|
||
function stableSortByTs(arr) {
|
||
if (!Array.isArray(arr)) return;
|
||
arr.sort((a, b) => {
|
||
const ta = Date.parse(a?.ts || "") || 0;
|
||
const tb = Date.parse(b?.ts || "") || 0;
|
||
if (ta !== tb) return ta - tb;
|
||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||
});
|
||
}
|
||
|
||
function parseReferenceBlock(body) {
|
||
const block =
|
||
pickSection(body, ["Référence (à compléter):", "Reference (à compléter):"]) ||
|
||
pickSection(body, ["Référence:", "Reference:"]);
|
||
|
||
const lines = String(block || "").split(/\r?\n/).map((l) => l.trim());
|
||
const get = (k) => {
|
||
const re = new RegExp(`^[-*]\\s*${escapeRegExp(k)}\\s*:\\s*(.*)$`, "i");
|
||
const m = lines.map((l) => l.match(re)).find(Boolean);
|
||
return (m?.[1] ?? "").trim();
|
||
};
|
||
|
||
return {
|
||
url: get("URL") || "",
|
||
label: get("Label") || "",
|
||
kind: get("Kind") || "",
|
||
citation: get("Citation") || get("Passage") || get("Extrait") || "",
|
||
rawBlock: block || "",
|
||
};
|
||
}
|
||
|
||
function inferMediaTypeFromFilename(name) {
|
||
const n = String(name || "").toLowerCase();
|
||
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
|
||
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
|
||
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
|
||
return "link";
|
||
}
|
||
|
||
function sanitizeFilename(name) {
|
||
return String(name || "file")
|
||
.replace(/[\/\\]/g, "_")
|
||
.replace(/[^\w.\-]+/g, "_")
|
||
.replace(/_+/g, "_")
|
||
.slice(0, 180);
|
||
}
|
||
|
||
function isHttpUrl(u) {
|
||
try {
|
||
const x = new URL(String(u));
|
||
return x.protocol === "http:" || x.protocol === "https:";
|
||
} catch {
|
||
return false;
|
||
}
|
||
}
|
||
|
||
async function downloadToFile(url, token, destAbs) {
|
||
const res = await fetch(url, {
|
||
headers: {
|
||
// la plupart des /attachments sont publics, mais on garde le token “au cas où”
|
||
Authorization: `token ${token}`,
|
||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||
},
|
||
redirect: "follow",
|
||
});
|
||
if (!res.ok) {
|
||
const t = await res.text().catch(() => "");
|
||
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
|
||
}
|
||
const buf = Buffer.from(await res.arrayBuffer());
|
||
await fs.mkdir(path.dirname(destAbs), { recursive: true });
|
||
await fs.writeFile(destAbs, buf);
|
||
return buf.length;
|
||
}
|
||
|
||
/* ----------------------------------- main ---------------------------------- */
|
||
|
||
async function main() {
|
||
const token = getEnv("FORGE_TOKEN");
|
||
assert(token, "❌ FORGE_TOKEN manquant.", 2);
|
||
|
||
const forgeApiBase = getEnv("FORGE_API") || getEnv("FORGE_BASE");
|
||
assert(forgeApiBase, "❌ FORGE_API (ou FORGE_BASE) manquant.", 2);
|
||
|
||
const inferred = inferOwnerRepoFromGit() || {};
|
||
const owner = getEnv("GITEA_OWNER", inferred.owner || "");
|
||
const repo = getEnv("GITEA_REPO", inferred.repo || "");
|
||
assert(owner && repo, "❌ Impossible de déterminer owner/repo. Fix: export GITEA_OWNER=... GITEA_REPO=...", 2);
|
||
|
||
console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo} …`);
|
||
const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum });
|
||
|
||
if (issue?.pull_request) {
|
||
console.error(`❌ #${issueNum} est une Pull Request, pas un ticket annotations.`);
|
||
process.exit(2);
|
||
}
|
||
|
||
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||
const title = String(issue.title || "");
|
||
|
||
const type = pickLine(body, "Type").toLowerCase();
|
||
const chemin = normalizeChemin(pickLine(body, "Chemin"));
|
||
const ancre = normalizeAnchorId(pickLine(body, "Ancre"));
|
||
|
||
assert(chemin, "Ticket: Chemin manquant.", 2);
|
||
assert(ancre && /^p-\d+-/i.test(ancre), `Ticket: Ancre invalide ("${ancre}")`, 2);
|
||
assert(type, "Ticket: Type manquant.", 2);
|
||
|
||
const pageKey = normalizePageKeyFromChemin(chemin);
|
||
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
||
|
||
if (DO_VERIFY) {
|
||
const ok = await tryVerifyAnchor(pageKey, ancre);
|
||
if (ok === false) {
|
||
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
||
}
|
||
if (ok === null) {
|
||
// pas de source de vérité dispo
|
||
if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de baseline/dist)`), { __exitCode: 2 });
|
||
console.warn("⚠️ verify: impossible de vérifier (pas de baseline/dist) — on continue.");
|
||
}
|
||
}
|
||
|
||
const annoFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||
const annoFileRel = path.relative(CWD, annoFileAbs).replace(/\\/g, "/");
|
||
|
||
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: annoFileRel });
|
||
|
||
const doc = await loadAnnoDoc(annoFileAbs, pageKey);
|
||
const entry = ensureEntry(doc, ancre);
|
||
|
||
const touchedFiles = [];
|
||
const notes = [];
|
||
|
||
let changed = false;
|
||
const nowIso = new Date().toISOString();
|
||
|
||
if (type === "type/comment") {
|
||
const comment = pickSection(body, ["Commentaire:", "Comment:", "Commentaires:"]) || "";
|
||
const text = comment.trim();
|
||
assert(text.length >= 3, "Ticket comment: bloc 'Commentaire:' introuvable ou trop court.", 2);
|
||
|
||
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
|
||
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
|
||
|
||
const added = uniqPush(entry.comments_editorial, item, (x) => `${(x?.text || "").trim()}`);
|
||
if (added) { changed = true; notes.push(`+ comment added (len=${text.length})`); }
|
||
else notes.push(`~ comment already present (dedup)`);
|
||
|
||
stableSortByTs(entry.comments_editorial);
|
||
}
|
||
|
||
else if (type === "type/reference") {
|
||
const ref = parseReferenceBlock(body);
|
||
|
||
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
|
||
|
||
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
|
||
throw Object.assign(new Error(`Ticket reference (strict): URL invalide (http/https requis): "${ref.url}"`), { __exitCode: 2 });
|
||
}
|
||
|
||
if (!Array.isArray(entry.refs)) entry.refs = [];
|
||
const item = {
|
||
url: ref.url || "",
|
||
label: ref.label || (ref.url ? ref.url : "Référence"),
|
||
kind: ref.kind || "",
|
||
ts: nowIso,
|
||
fromIssue: issueNum,
|
||
};
|
||
if (ref.citation) item.citation = ref.citation;
|
||
|
||
const added = uniqPush(entry.refs, item, (x) => `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`);
|
||
if (added) { changed = true; notes.push(`+ reference added (${item.url ? "url" : "label"})`); }
|
||
else notes.push(`~ reference already present (dedup)`);
|
||
|
||
stableSortByTs(entry.refs);
|
||
}
|
||
|
||
else if (type === "type/media") {
|
||
if (!Array.isArray(entry.media)) entry.media = [];
|
||
|
||
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||
|
||
if (!atts.length) {
|
||
notes.push("! no assets found (nothing to download).");
|
||
}
|
||
|
||
for (const a of atts) {
|
||
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
|
||
const dl = a?.browser_download_url || a?.download_url || "";
|
||
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
||
|
||
// caption = title du ticket (fallback ".")
|
||
const caption = (title || "").trim() || ".";
|
||
if (STRICT && !caption.trim()) {
|
||
throw Object.assign(new Error("Ticket media (strict): caption vide."), { __exitCode: 2 });
|
||
}
|
||
|
||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre);
|
||
const destAbs = path.join(mediaDirAbs, name);
|
||
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
||
|
||
if (await exists(destAbs)) {
|
||
notes.push(`~ media already exists: ${urlPath}`);
|
||
} else if (!DRY_RUN) {
|
||
const bytes = await downloadToFile(dl, token, destAbs);
|
||
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
|
||
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
|
||
} else {
|
||
notes.push(`(dry) would download ${name} -> ${urlPath}`);
|
||
}
|
||
|
||
const item = {
|
||
type: inferMediaTypeFromFilename(name),
|
||
src: urlPath,
|
||
caption,
|
||
credit: "",
|
||
ts: nowIso,
|
||
fromIssue: issueNum,
|
||
};
|
||
|
||
const added = uniqPush(entry.media, item, (x) => String(x?.src || ""));
|
||
if (added) changed = true;
|
||
}
|
||
|
||
stableSortByTs(entry.media);
|
||
}
|
||
|
||
else {
|
||
throw Object.assign(new Error(`Type non supporté: "${type}"`), { __exitCode: 2 });
|
||
}
|
||
|
||
if (!changed) {
|
||
console.log("ℹ️ No changes to apply.");
|
||
for (const n of notes) console.log(" ", n);
|
||
return;
|
||
}
|
||
|
||
if (DRY_RUN) {
|
||
console.log("\n--- DRY RUN (no write) ---");
|
||
console.log(`Would update: ${annoFileRel}`);
|
||
for (const n of notes) console.log(" ", n);
|
||
console.log("\nExcerpt (resulting entry):");
|
||
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
||
console.log("\n✅ Dry-run terminé.");
|
||
return;
|
||
}
|
||
|
||
await saveAnnoDocYaml(annoFileAbs, doc);
|
||
touchedFiles.unshift(annoFileRel);
|
||
|
||
console.log(`✅ Updated: ${annoFileRel}`);
|
||
for (const n of notes) console.log(" ", n);
|
||
|
||
if (DO_COMMIT) {
|
||
run("git", ["add", ...touchedFiles], { cwd: CWD });
|
||
|
||
if (!gitHasStagedChanges()) {
|
||
console.log("ℹ️ Nothing to commit (aucun changement staged).");
|
||
return;
|
||
}
|
||
|
||
const msg = `anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})`;
|
||
run("git", ["commit", "-m", msg], { cwd: CWD });
|
||
|
||
const sha = runQuiet("git", ["rev-parse", "--short", "HEAD"], { cwd: CWD }).trim();
|
||
console.log(`✅ Committed: ${msg} (${sha})`);
|
||
|
||
if (DO_CLOSE) {
|
||
const comment = `✅ Appliqué par apply-annotation-ticket.\nCommit: ${sha}`;
|
||
await closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment });
|
||
console.log(`✅ Ticket #${issueNum} fermé.`);
|
||
}
|
||
} else {
|
||
console.log("\nNext (manuel) :");
|
||
console.log(` git diff -- ${touchedFiles[0]}`);
|
||
console.log(` git add ${touchedFiles.join(" ")}`);
|
||
console.log(` git commit -m "anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})"`);
|
||
}
|
||
}
|
||
|
||
main().catch((e) => {
|
||
const code = e?.__exitCode || 1;
|
||
console.error("💥", e?.message || e);
|
||
process.exit(code);
|
||
});
|