Compare commits

..

1 Commits

Author SHA1 Message Date
archicratie-bot
6068819834 anno: apply ticket #129 (archicrat-ia/chapitre-4#p-11-67c14c09 type/media)
All checks were successful
CI / build-and-anchors (push) Successful in 1m39s
SMOKE / smoke (push) Successful in 18s
2026-02-26 13:15:16 +00:00
33 changed files with 760 additions and 2171 deletions

View File

@@ -16,13 +16,9 @@ defaults:
run:
shell: bash
concurrency:
group: anno-apply-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
cancel-in-progress: true
jobs:
apply-approved:
runs-on: mac-ci
runs-on: ubuntu-latest
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
@@ -33,11 +29,12 @@ jobs:
git --version
node --version
npm --version
npm ping --registry=https://registry.npmjs.org
- name: Derive context (event.json / workflow_dispatch)
env:
INPUT_ISSUE: ${{ inputs.issue }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
run: |
set -euo pipefail
export EVENT_JSON="/var/run/act/workflow/event.json"
@@ -81,12 +78,10 @@ jobs:
throw new Error("No issue number in event.json or workflow_dispatch input");
}
// label name: best-effort (non-bloquant)
let labelName = "workflow_dispatch";
const lab = ev?.label;
if (typeof lab === "string") labelName = lab;
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
else if (ev?.label?.name) labelName = ev.label.name;
const labelName =
ev?.label?.name ||
ev?.label ||
"workflow_dispatch";
const u = new URL(cloneUrl);
const origin = u.origin;
@@ -111,129 +106,16 @@ jobs:
echo "✅ context:"
sed -n '1,120p' /tmp/anno.env
- name: Early gate (label event fast-skip, but tolerant)
- name: Gate on label state/approved
run: |
set -euo pipefail
source /tmp/anno.env
echo " event label = $LABEL_NAME"
# Fast skip on obvious non-approved label events (avoid noise),
# BUT do NOT skip if label payload is weird/unknown.
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
echo " label=$LABEL_NAME => skip early"
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
echo " label=$LABEL_NAME => skip"
echo "SKIP=1" >> /tmp/anno.env
echo "SKIP_REASON=\"label_not_approved_event\"" >> /tmp/anno.env
exit 0
fi
echo "✅ continue to API gating (issue=$ISSUE_NUMBER)"
- name: Fetch issue + hard gate on labels + Type
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
-o /tmp/issue.json
node --input-type=module - <<'NODE' >> /tmp/anno.env
import fs from "node:fs";
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json","utf8"));
const title = String(issue.title || "");
const body = String(issue.body || "").replace(/\r\n/g, "\n");
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
const hasApproved = labels.includes("state/approved");
function pickLine(key) {
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = body.match(re);
return m ? m[1].trim() : "";
}
const typeRaw = pickLine("Type");
const type = String(typeRaw || "").trim().toLowerCase();
const allowed = new Set(["type/media","type/reference","type/comment"]);
const proposer = new Set(["type/correction","type/fact-check"]);
const out = [];
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
// HARD gate: must currently have state/approved (avoids depending on event payload)
if (!hasApproved) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("not_approved_label_present")}`);
process.stdout.write(out.join("\n") + "\n");
process.exit(0);
}
if (!type) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
} else if (allowed.has(type)) {
// proceed
} else if (proposer.has(type)) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("proposer_type:"+type)}`);
} else {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("unsupported_type:"+type)}`);
}
process.stdout.write(out.join("\n") + "\n");
NODE
echo "✅ gating result:"
grep -E '^(ISSUE_TYPE|SKIP|SKIP_REASON)=' /tmp/anno.env || true
- name: Comment issue if skipped (Proposer / unsupported / missing Type)
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
[[ "${SKIP:-0}" == "1" ]] || exit 0
# IMPORTANT: do NOT comment for "not_approved_label_present" (avoid spam on other label events)
if [[ "${SKIP_REASON:-}" == "not_approved_label_present" || "${SKIP_REASON:-}" == "label_not_approved_event" ]]; then
echo " skip reason=${SKIP_REASON} -> no comment"
exit 0
fi
test -n "${FORGE_TOKEN:-}" || exit 0
REASON="${SKIP_REASON:-}"
TYPE="${ISSUE_TYPE:-}"
if [[ "$REASON" == proposer_type:* ]]; then
MSG=" Ticket #${ISSUE_NUMBER} détecté comme **Proposer** (${TYPE}).\n\n- Ce type est **traité manuellement par les editors**.\n✅ Aucun traitement automatique."
elif [[ "$REASON" == unsupported_type:* ]]; then
MSG=" Ticket #${ISSUE_NUMBER} ignoré : Type non supporté par le bot (${TYPE}).\n\nTypes supportés : type/media, type/reference, type/comment."
else
MSG=" Ticket #${ISSUE_NUMBER} ignoré : champ 'Type:' manquant ou illisible.\n\nAjoute : Type: type/media|type/reference|type/comment"
fi
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
echo "✅ proceed (issue=$ISSUE_NUMBER)"
- name: Checkout default branch
run: |
@@ -253,7 +135,7 @@ jobs:
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
npm ci --no-audit --no-fund
npm ci
- name: Check apply script exists
run: |
@@ -272,7 +154,7 @@ jobs:
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
npm run build
npm run build:clean
test -f dist/para-index.json || {
echo "❌ missing dist/para-index.json after build"
@@ -291,7 +173,6 @@ jobs:
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
test -d .git || { echo "❌ not a git repo (checkout failed)"; echo "APPLY_RC=90" >> /tmp/anno.env; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
@@ -339,7 +220,7 @@ jobs:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
RC="${APPLY_RC:-0}"
@@ -348,15 +229,30 @@ jobs:
exit 0
fi
test -n "${FORGE_TOKEN:-}" || exit 0
if [[ -f /tmp/apply.log ]]; then
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
else
BODY="(no apply log found)"
fi
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
- name: Comment issue if no-op (already applied)
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
[[ "${NOOP:-0}" == "1" ]] || exit 0
MSG=" Ticket #${ISSUE_NUMBER} : rien à appliquer (déjà présent / dédupliqué)."
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
@@ -371,12 +267,11 @@ jobs:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || { echo " apply failed -> skip push"; exit 0; }
[[ "${NOOP:-0}" == "0" ]] || { echo " no-op -> skip push"; exit 0; }
test -d .git || { echo " no git repo -> skip push"; exit 0; }
AUTH_URL="$(node --input-type=module -e '
const [clone, tok] = process.argv.slice(1);
@@ -395,7 +290,7 @@ jobs:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || { echo " apply failed -> skip PR"; exit 0; }
@@ -438,7 +333,6 @@ jobs:
run: |
set -euo pipefail
source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
RC="${APPLY_RC:-0}"

View File

@@ -1,13 +1,8 @@
name: Anno Reject (close issue)
name: Anno Reject
on:
issues:
types: [labeled]
workflow_dispatch:
inputs:
issue:
description: "Issue number to reject/close"
required: true
env:
NODE_OPTIONS: --dns-result-order=ipv4first
@@ -16,26 +11,14 @@ defaults:
run:
shell: bash
concurrency:
group: anno-reject-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
cancel-in-progress: true
jobs:
reject:
runs-on: mac-ci
runs-on: ubuntu-latest
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps:
- name: Tools sanity
run: |
set -euo pipefail
node --version
- name: Derive context (event.json / workflow_dispatch)
env:
INPUT_ISSUE: ${{ inputs.issue }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
- name: Derive context
run: |
set -euo pipefail
export EVENT_JSON="/var/run/act/workflow/event.json"
@@ -46,122 +29,58 @@ jobs:
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const repoObj = ev?.repository || {};
const cloneUrl =
repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
if (!cloneUrl) throw new Error("No repository url");
let owner =
repoObj?.owner?.login ||
repoObj?.owner?.username ||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
let repo =
repoObj?.name ||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
if ((!owner || !repo) && cloneUrl) {
if (!owner || !repo) {
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
}
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
const issueNumber =
ev?.issue?.number ||
ev?.issue?.index ||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
const issueNumber = ev?.issue?.number || ev?.issue?.index;
if (!issueNumber) throw new Error("No issue number");
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
throw new Error("No issue number in event.json or workflow_dispatch input");
}
// label name: best-effort (non-bloquant)
let labelName = "workflow_dispatch";
const lab = ev?.label;
if (typeof lab === "string") labelName = lab;
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
let apiBase = "";
if (process.env.FORGE_API && String(process.env.FORGE_API).trim()) {
apiBase = String(process.env.FORGE_API).trim().replace(/\/+$/,"");
} else if (cloneUrl) {
apiBase = new URL(cloneUrl).origin;
} else {
apiBase = "";
}
const labelName = ev?.label?.name || ev?.label || "";
const u = new URL(cloneUrl);
function sh(s){ return JSON.stringify(String(s)); }
process.stdout.write([
`OWNER=${sh(owner)}`,
`REPO=${sh(repo)}`,
`ISSUE_NUMBER=${sh(issueNumber)}`,
`LABEL_NAME=${sh(labelName)}`,
`API_BASE=${sh(apiBase)}`
`API_BASE=${sh(u.origin)}`
].join("\n") + "\n");
NODE
echo "✅ context:"
sed -n '1,120p' /tmp/reject.env
- name: Early gate (fast-skip, tolerant)
- name: Gate on label state/rejected
run: |
set -euo pipefail
source /tmp/reject.env
echo " event label = $LABEL_NAME"
if [[ "$LABEL_NAME" != "state/rejected" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
echo " label=$LABEL_NAME => skip early"
echo "SKIP=1" >> /tmp/reject.env
echo "SKIP_REASON=\"label_not_rejected_event\"" >> /tmp/reject.env
if [[ "$LABEL_NAME" != "state/rejected" ]]; then
echo " label=$LABEL_NAME => skip"
exit 0
fi
echo "✅ reject issue=$ISSUE_NUMBER"
- name: Comment + close (only if label state/rejected is PRESENT now, and no conflict)
- name: Comment + close issue
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/reject.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
test -n "${API_BASE:-}" || { echo "❌ Missing API_BASE"; exit 1; }
curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
-o /tmp/reject.issue.json
node --input-type=module - <<'NODE' > /tmp/reject.flags
import fs from "node:fs";
const issue = JSON.parse(fs.readFileSync("/tmp/reject.issue.json","utf8"));
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
const hasApproved = labels.includes("state/approved");
const hasRejected = labels.includes("state/rejected");
process.stdout.write(`HAS_APPROVED=${hasApproved ? "1":"0"}\nHAS_REJECTED=${hasRejected ? "1":"0"}\n`);
NODE
source /tmp/reject.flags
# Do nothing unless state/rejected is truly present now (anti payload weird)
if [[ "${HAS_REJECTED:-0}" != "1" ]]; then
echo " state/rejected not present -> skip"
exit 0
fi
if [[ "${HAS_APPROVED:-0}" == "1" && "${HAS_REJECTED:-0}" == "1" ]]; then
MSG="⚠️ Conflit d'état sur le ticket #${ISSUE_NUMBER} : labels **state/approved** et **state/rejected** présents.\n\n➡ Action manuelle requise : retirer l'un des deux labels avant relance."
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
echo " conflict => stop"
exit 0
fi
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
@@ -176,6 +95,4 @@ jobs:
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
--data-binary '{"state":"closed"}'
echo "✅ rejected+closed"
--data-binary '{"state":"closed"}'

View File

@@ -4,37 +4,22 @@ on:
issues:
types: [opened, edited]
concurrency:
group: auto-label-${{ github.event.issue.number || github.event.issue.index || 'manual' }}
cancel-in-progress: true
jobs:
label:
runs-on: mac-ci
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
runs-on: ubuntu-latest
steps:
- name: Apply labels from Type/State/Category
env:
# IMPORTANT: préfère FORGE_BASE (LAN) si défini, sinon FORGE_API
FORGE_BASE: ${{ vars.FORGE_BASE || vars.FORGE_API || vars.FORGE_API_BASE }}
FORGE_BASE: ${{ vars.FORGE_API || vars.FORGE_BASE }}
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
REPO_FULL: ${{ gitea.repository }}
EVENT_PATH: ${{ github.event_path }}
NODE_OPTIONS: --dns-result-order=ipv4first
run: |
python3 - <<'PY'
import json, os, re, time, urllib.request, urllib.error, socket
forge = (os.environ.get("FORGE_BASE") or "").rstrip("/")
if not forge:
raise SystemExit("Missing FORGE_BASE/FORGE_API repo variable (e.g. http://192.168.1.20:3000)")
token = os.environ.get("FORGE_TOKEN") or ""
if not token:
raise SystemExit("Missing secret FORGE_TOKEN")
import json, os, re, urllib.request, urllib.error
forge = os.environ["FORGE_BASE"].rstrip("/")
token = os.environ["FORGE_TOKEN"]
owner, repo = os.environ["REPO_FULL"].split("/", 1)
event_path = os.environ["EVENT_PATH"]
@@ -61,9 +46,12 @@ jobs:
print("PARSED:", {"Type": t, "State": s, "Category": c})
# 1) explicite depuis le body
if t: desired.add(t)
if s: desired.add(s)
if c: desired.add(c)
if t:
desired.add(t)
if s:
desired.add(s)
if c:
desired.add(c)
# 2) fallback depuis le titre si Type absent
if not t:
@@ -88,56 +76,42 @@ jobs:
"Authorization": f"token {token}",
"Accept": "application/json",
"Content-Type": "application/json",
"User-Agent": "archicratie-auto-label/1.1",
"User-Agent": "archicratie-auto-label/1.0",
}
def jreq(method, url, payload=None, timeout=60, retries=4, backoff=2.0):
def jreq(method, url, payload=None):
data = None if payload is None else json.dumps(payload).encode("utf-8")
last_err = None
for i in range(retries):
req = urllib.request.Request(url, data=data, headers=headers, method=method)
try:
with urllib.request.urlopen(req, timeout=timeout) as r:
b = r.read()
return json.loads(b.decode("utf-8")) if b else None
except urllib.error.HTTPError as e:
b = e.read().decode("utf-8", errors="replace")
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
except (TimeoutError, socket.timeout, urllib.error.URLError) as e:
last_err = e
# retry only on network/timeout
time.sleep(backoff * (i + 1))
raise RuntimeError(f"Network/timeout after retries: {method} {url}\n{last_err}")
req = urllib.request.Request(url, data=data, headers=headers, method=method)
try:
with urllib.request.urlopen(req, timeout=20) as r:
b = r.read()
return json.loads(b.decode("utf-8")) if b else None
except urllib.error.HTTPError as e:
b = e.read().decode("utf-8", errors="replace")
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
# labels repo
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000", timeout=60) or []
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000") or []
name_to_id = {x.get("name"): x.get("id") for x in labels}
missing = [x for x in desired if x not in name_to_id]
if missing:
raise SystemExit("Missing labels in repo: " + ", ".join(sorted(missing)))
wanted_ids = sorted({int(name_to_id[x]) for x in desired})
wanted_ids = [name_to_id[x] for x in desired]
# labels actuels de l'issue
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
current_ids = {int(x.get("id")) for x in current if x.get("id") is not None}
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels") or []
current_ids = {x.get("id") for x in current if x.get("id") is not None}
final_ids = sorted(current_ids.union(wanted_ids))
# Replace labels = union (n'enlève rien)
# set labels = union (n'enlève rien)
url = f"{api}/repos/{owner}/{repo}/issues/{number}/labels"
# IMPORTANT: on n'envoie JAMAIS une liste brute ici (ça a causé le 422)
jreq("PUT", url, {"labels": final_ids}, timeout=90, retries=4)
# vérif post-apply (anti "timeout mais appliqué")
post = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
post_ids = {int(x.get("id")) for x in post if x.get("id") is not None}
missing_ids = [i for i in wanted_ids if i not in post_ids]
if missing_ids:
raise RuntimeError(f"Labels not applied after PUT (missing ids): {missing_ids}")
try:
jreq("PUT", url, {"labels": final_ids})
except Exception:
jreq("PUT", url, final_ids)
print(f"OK labels #{number}: {sorted(desired)}")
PY

View File

@@ -3,7 +3,7 @@ name: CI
on:
push:
pull_request:
branches: [main]
branches: [master]
workflow_dispatch:
env:
@@ -15,7 +15,7 @@ defaults:
jobs:
build-and-anchors:
runs-on: mac-ci
runs-on: ubuntu-latest
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm

View File

@@ -6,7 +6,7 @@ on:
workflow_dispatch:
inputs:
force:
description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)"
description: "Force deploy even if gate would skip (1=yes, 0=no)"
required: false
default: "0"
@@ -14,7 +14,6 @@ env:
NODE_OPTIONS: --dns-result-order=ipv4first
DOCKER_API_VERSION: "1.43"
COMPOSE_VERSION: "2.29.7"
ASTRO_TELEMETRY_DISABLED: "1"
defaults:
run:
@@ -26,9 +25,9 @@ concurrency:
jobs:
deploy:
runs-on: nas-deploy
runs-on: ubuntu-latest
container:
image: localhost:5000/archicratie/nas-deploy-node22@sha256:fefa8bb307005cebec07796661ab25528dc319c33a8f1e480e1d66f90cd5cff6
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps:
- name: Tools sanity
@@ -38,188 +37,92 @@ jobs:
node --version
npm --version
- name: Checkout (push or workflow_dispatch, no external actions)
env:
EVENT_JSON: /var/run/act/workflow/event.json
- name: Checkout (from event.json, no external actions)
run: |
set -euo pipefail
export EVENT_JSON="/var/run/act/workflow/event.json"
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
node --input-type=module <<'NODE'
import fs from "node:fs";
eval "$(node --input-type=module -e 'import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON,"utf8"));
const repo =
ev?.repository?.clone_url ||
(ev?.repository?.html_url ? (ev.repository.html_url.replace(/\/$/,"") + ".git") : "");
const sha =
ev?.after ||
ev?.pull_request?.head?.sha ||
ev?.head_commit?.id ||
ev?.sha ||
"";
if (!repo) throw new Error("No repository url in event.json");
if (!sha) throw new Error("No sha in event.json");
process.stdout.write(`REPO_URL=${JSON.stringify(repo)}\nSHA=${JSON.stringify(sha)}\n`);
')"
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const repoObj = ev?.repository || {};
const cloneUrl =
repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
const defaultBranch = repoObj?.default_branch || "main";
// Push-range (most reliable for change detection)
const before = String(ev?.before || "").trim();
const after =
(process.env.GITHUB_SHA && String(process.env.GITHUB_SHA).trim()) ||
String(ev?.after || ev?.sha || ev?.head_commit?.id || ev?.pull_request?.head?.sha || "").trim();
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
fs.writeFileSync("/tmp/deploy.env", [
`REPO_URL=${shq(cloneUrl)}`,
`DEFAULT_BRANCH=${shq(defaultBranch)}`,
`BEFORE=${shq(before)}`,
`AFTER=${shq(after)}`
].join("\n") + "\n");
NODE
source /tmp/deploy.env
echo "Repo URL: $REPO_URL"
echo "Default branch: $DEFAULT_BRANCH"
echo "BEFORE: ${BEFORE:-<empty>}"
echo "AFTER: ${AFTER:-<empty>}"
echo "Repo URL: $REPO_URL"
echo "SHA: $SHA"
rm -rf .git
git init -q
git remote add origin "$REPO_URL"
# Checkout AFTER (or default branch if missing)
if [[ -n "${AFTER:-}" ]]; then
git fetch --depth 50 origin "$AFTER"
git -c advice.detachedHead=false checkout -q FETCH_HEAD
else
git fetch --depth 50 origin "$DEFAULT_BRANCH"
git -c advice.detachedHead=false checkout -q "origin/$DEFAULT_BRANCH"
AFTER="$(git rev-parse HEAD)"
echo "AFTER='$AFTER'" >> /tmp/deploy.env
echo "Resolved AFTER: $AFTER"
fi
git fetch --depth 1 origin "$SHA"
git -c advice.detachedHead=false checkout -q FETCH_HEAD
git log -1 --oneline
- name: Gate — decide SKIP vs HOTPATCH vs FULL rebuild
echo "SHA=$SHA" >> /tmp/deploy.env
echo "REPO_URL=$REPO_URL" >> /tmp/deploy.env
- name: Gate — auto deploy only on annotations/media changes
env:
INPUT_FORCE: ${{ inputs.force }}
EVENT_JSON: /var/run/act/workflow/event.json
run: |
set -euo pipefail
source /tmp/deploy.env
source /tmp/deploy.env 2>/dev/null || true
FORCE="${INPUT_FORCE:-0}"
# Lire before/after du push depuis event.json (merge-proof)
node --input-type=module <<'NODE'
import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const before = ev?.before || "";
const after = ev?.after || ev?.sha || "";
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
fs.writeFileSync("/tmp/gate.env", [
`EV_BEFORE=${shq(before)}`,
`EV_AFTER=${shq(after)}`
].join("\n") + "\n");
NODE
source /tmp/gate.env
BEFORE="${EV_BEFORE:-}"
AFTER="${EV_AFTER:-}"
if [[ -z "${AFTER:-}" ]]; then
AFTER="${SHA:-}"
if [[ "$FORCE" == "1" ]]; then
echo "✅ force=1 -> bypass gate -> deploy allowed"
echo "GO=1" >> /tmp/deploy.env
exit 0
fi
echo "Gate ctx: BEFORE=${BEFORE:-<empty>} AFTER=${AFTER:-<empty>} FORCE=${FORCE}"
# fichiers touchés par CE commit (⚠️ merge commits -> utiliser -m)
CHANGED="$(git diff-tree -m --no-commit-id --name-only -r "$SHA" || true)"
echo "== changed files =="
echo "$CHANGED" | sed -n '1,200p'
# Produire une liste CHANGED fiable :
# - si BEFORE/AFTER valides -> git diff before..after
# - sinon fallback -> diff parent1..after ou show after
CHANGED=""
Z40="0000000000000000000000000000000000000000"
if [[ -n "${BEFORE:-}" && "${BEFORE}" != "${Z40}" ]] \
&& git cat-file -e "${BEFORE}^{commit}" 2>/dev/null \
&& git cat-file -e "${AFTER}^{commit}" 2>/dev/null; then
CHANGED="$(git diff --name-only "${BEFORE}" "${AFTER}" || true)"
else
P1="$(git rev-parse "${AFTER}^" 2>/dev/null || true)"
if [[ -n "${P1:-}" ]] && git cat-file -e "${P1}^{commit}" 2>/dev/null; then
CHANGED="$(git diff --name-only "${P1}" "${AFTER}" || true)"
else
CHANGED="$(git show --name-only --pretty="" "${AFTER}" | sed '/^$/d' || true)"
fi
# Gate strict : uniquement annotations + media
if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then
echo "GO=1" >> /tmp/deploy.env
echo "✅ deploy allowed (annotations/media change detected)"
exit 0
fi
printf "%s\n" "${CHANGED}" > /tmp/changed.txt
echo "GO=0" >> /tmp/deploy.env
echo " no annotations/media change -> skip deploy"
exit 0
echo "== changed files (first 200) =="
sed -n '1,200p' /tmp/changed.txt || true
# Flags
HAS_FULL=0
HAS_HOTPATCH=0
# FULL si build-impacting (ce que tu veux : content/anchors/pages/scripts)
if grep -qE '^(src/content/|src/anchors/|src/pages/|scripts/)' /tmp/changed.txt; then
HAS_FULL=1
fi
# HOTPATCH si annotations/media touchés
if grep -qE '^(src/annotations/|public/media/)' /tmp/changed.txt; then
HAS_HOTPATCH=1
fi
echo "Gate flags: HAS_FULL=${HAS_FULL} HAS_HOTPATCH=${HAS_HOTPATCH}"
# Décision
if [[ "${FORCE}" == "1" ]]; then
GO=1
MODE="full"
echo "✅ force=1 -> MODE=full (rebuild+restart)"
elif [[ "${HAS_FULL}" == "1" ]]; then
GO=1
MODE="full"
echo "✅ build-impacting change -> MODE=full (rebuild+restart)"
elif [[ "${HAS_HOTPATCH}" == "1" ]]; then
GO=1
MODE="hotpatch"
echo "✅ annotations/media change -> MODE=hotpatch"
else
GO=0
MODE="skip"
echo " no relevant change -> skip deploy"
fi
echo "GO=${GO}" >> /tmp/deploy.env
echo "MODE='${MODE}'" >> /tmp/deploy.env
- name: Toolchain sanity + resolve COMPOSE_PROJECT_NAME
- name: Install docker client + docker compose plugin (v2)
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
# tools are prebaked in the image
git --version
apt-get -o Acquire::Retries=5 -o Acquire::ForceIPv4=true update
apt-get install -y --no-install-recommends ca-certificates curl docker.io
rm -rf /var/lib/apt/lists/*
mkdir -p /usr/local/lib/docker/cli-plugins
curl -fsSL \
"https://github.com/docker/compose/releases/download/v${COMPOSE_VERSION}/docker-compose-linux-x86_64" \
-o /usr/local/lib/docker/cli-plugins/docker-compose
chmod +x /usr/local/lib/docker/cli-plugins/docker-compose
docker version
docker compose version
python3 -c 'import yaml; print("PyYAML OK")'
# Reuse existing compose project name if containers already exist
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
if [[ -z "${PROJ:-}" ]]; then
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
fi
if [[ -z "${PROJ:-}" ]]; then PROJ="archicratie-web"; fi
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
# Assert target containers exist (hotpatch needs them)
for c in archicratie-web-blue archicratie-web-green; do
docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; }
done
- name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full
- name: Assert required vars (PUBLIC_GITEA_*)
env:
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
@@ -228,26 +131,24 @@ jobs:
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
[[ "${MODE:-hotpatch}" == "full" ]] || { echo " hotpatch mode -> vars not required"; exit 0; }
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
echo "✅ vars OK"
- name: Assert deploy files exist — only needed for MODE=full
- name: Assert deploy files exist
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
[[ "${MODE:-hotpatch}" == "full" ]] || { echo " hotpatch mode -> files not required"; exit 0; }
test -f docker-compose.yml
test -f Dockerfile
test -f nginx.conf
echo "✅ deploy files OK"
- name: FULL — Build + deploy staging (blue) then warmup+smoke
- name: Build + deploy staging (blue) then smoke
env:
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
@@ -256,51 +157,31 @@ jobs:
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
[[ "${MODE:-hotpatch}" == "full" ]] || { echo " MODE=$MODE -> skip full rebuild"; exit 0; }
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
wait_url() {
local url="$1"
local label="$2"
local tries="${3:-60}"
for i in $(seq 1 "$tries"); do
if curl -fsS --max-time 4 "$url" >/dev/null; then
echo "✅ $label OK ($url)"
return 0
fi
echo "… warmup $label ($i/$tries)"
sleep 1
done
echo "❌ timeout $label ($url)"
return 1
}
# backup tags (best effort)
TS="$(date -u +%Y%m%d-%H%M%S)"
echo "TS='$TS'" >> /tmp/deploy.env
echo "TS=$TS" >> /tmp/deploy.env
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
docker rm -f archicratie-web-blue || true
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
# build + restart staging (blue=8081)
docker compose build --no-cache web_blue
docker compose up -d --force-recreate web_blue
# warmup endpoints
wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index"
wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index"
wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js"
# smoke staging (local port)
curl -fsS "http://127.0.0.1:8081/para-index.json" >/dev/null
curl -fsS "http://127.0.0.1:8081/annotations-index.json" >/dev/null
curl -fsS "http://127.0.0.1:8081/pagefind/pagefind.js" >/dev/null
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
CANON="$(curl -fsS "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
echo "canonical(blue)=$CANON"
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
echo "❌ staging canonical mismatch"
docker logs --tail 120 archicratie-web-blue || true
exit 3
echo "❌ staging canonical mismatch"; exit 3;
}
echo "✅ staging OK"
- name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed
- name: Build + deploy live (green) then smoke + rollback if needed
env:
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
@@ -309,253 +190,28 @@ jobs:
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
[[ "${MODE:-hotpatch}" == "full" ]] || { echo " MODE=$MODE -> skip full rebuild"; exit 0; }
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
wait_url() {
local url="$1"
local label="$2"
local tries="${3:-60}"
for i in $(seq 1 "$tries"); do
if curl -fsS --max-time 4 "$url" >/dev/null; then
echo "✅ $label OK ($url)"
return 0
fi
echo "… warmup $label ($i/$tries)"
sleep 1
done
echo "❌ timeout $label ($url)"
return 1
}
rollback() {
echo "⚠️ rollback green -> previous image tag (best effort)"
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
docker rm -f archicratie-web-green || true
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
docker compose up -d --force-recreate web_green || true
}
# build/restart green
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
echo "❌ build green failed"; rollback; exit 4
fi
set +e
docker compose build --no-cache web_green
docker compose up -d --force-recreate web_green
docker rm -f archicratie-web-green || true
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
curl -fsS "http://127.0.0.1:8082/para-index.json" >/dev/null
curl -fsS "http://127.0.0.1:8082/annotations-index.json" >/dev/null
curl -fsS "http://127.0.0.1:8082/pagefind/pagefind.js" >/dev/null
# warmup endpoints
if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi
if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi
if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
CANON="$(curl -fsS "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
echo "canonical(green)=$CANON"
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
echo "❌ live canonical mismatch"
docker logs --tail 120 archicratie-web-green || true
rollback
exit 4
echo "❌ live canonical mismatch"; rollback; exit 4;
}
echo "✅ live OK"
- name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
python3 - <<'PY'
import os, re, json, glob
import yaml
import datetime as dt
ROOT = os.getcwd()
ANNO_ROOT = os.path.join(ROOT, "src", "annotations")
def is_obj(x): return isinstance(x, dict)
def is_arr(x): return isinstance(x, list)
def iso_dt(x):
if isinstance(x, dt.datetime):
if x.tzinfo is None:
return x.isoformat()
return x.astimezone(dt.timezone.utc).isoformat().replace("+00:00","Z")
if isinstance(x, dt.date):
return x.isoformat()
return None
def normalize(x):
s = iso_dt(x)
if s is not None: return s
if isinstance(x, dict):
return {str(k): normalize(v) for k, v in x.items()}
if isinstance(x, list):
return [normalize(v) for v in x]
return x
def key_media(it): return str((it or {}).get("src",""))
def key_ref(it):
it = it or {}
return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))])
def key_comment(it): return str((it or {}).get("text","")).strip()
def dedup_extend(dst_list, src_list, key_fn):
seen = set(); out = []
for x in (dst_list or []):
x = normalize(x); k = key_fn(x)
if k and k not in seen: seen.add(k); out.append(x)
for x in (src_list or []):
x = normalize(x); k = key_fn(x)
if k and k not in seen: seen.add(k); out.append(x)
return out
def deep_merge(dst, src):
src = normalize(src)
for k, v in (src or {}).items():
if k in ("media","refs","comments_editorial") and is_arr(v):
if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media)
elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
continue
if is_obj(v):
if not is_obj(dst.get(k)): dst[k] = {}
deep_merge(dst[k], v)
continue
if is_arr(v):
cur = dst.get(k, [])
if not is_arr(cur): cur = []
seen = set(); out = []
for x in cur:
x = normalize(x)
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
if s not in seen: seen.add(s); out.append(x)
for x in v:
x = normalize(x)
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
if s not in seen: seen.add(s); out.append(x)
dst[k] = out
continue
v = normalize(v)
if k not in dst or dst.get(k) in (None, ""):
dst[k] = v
def para_num(pid):
m = re.match(r"^p-(\d+)-", str(pid))
return int(m.group(1)) if m else 10**9
def sort_lists(entry):
for k in ("media","refs","comments_editorial"):
arr = entry.get(k)
if not is_arr(arr): continue
def ts(x):
x = normalize(x)
try:
s = str((x or {}).get("ts",""))
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
except Exception:
return 0
arr = [normalize(x) for x in arr]
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
entry[k] = arr
if not os.path.isdir(ANNO_ROOT):
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
pages = {}
errors = []
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
for fp in files:
try:
with open(fp, "r", encoding="utf-8") as f:
doc = yaml.safe_load(f) or {}
doc = normalize(doc)
if not isinstance(doc, dict) or doc.get("schema") != 1:
continue
page = str(doc.get("page","")).strip().strip("/")
paras = doc.get("paras") or {}
if not page or not isinstance(paras, dict):
continue
pg = pages.setdefault(page, {"paras": {}})
for pid, entry in paras.items():
pid = str(pid)
if pid not in pg["paras"] or not isinstance(pg["paras"].get(pid), dict):
pg["paras"][pid] = {}
if isinstance(entry, dict):
deep_merge(pg["paras"][pid], entry)
sort_lists(pg["paras"][pid])
except Exception as e:
errors.append({"file": os.path.relpath(fp, ROOT), "error": str(e)})
for page, obj in pages.items():
keys = list((obj.get("paras") or {}).keys())
keys.sort(key=lambda k: (para_num(k), k))
obj["paras"] = {k: obj["paras"][k] for k in keys}
out = {
"schema": 1,
"generatedAt": dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc).isoformat().replace("+00:00","Z"),
"pages": pages,
"stats": {
"pages": len(pages),
"paras": sum(len(v.get("paras") or {}) for v in pages.values()),
"errors": len(errors),
},
"errors": errors,
}
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
json.dump(out, f, ensure_ascii=False)
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
PY
# patch JSON into running containers
for c in archicratie-web-blue archicratie-web-green; do
echo "== patch annotations-index.json into $c =="
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
done
# copy changed media files into containers (so new media appears without rebuild)
if [[ -s /tmp/changed.txt ]]; then
while IFS= read -r f; do
[[ -n "$f" ]] || continue
if [[ "$f" == public/media/* ]]; then
dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/...
for c in archicratie-web-blue archicratie-web-green; do
echo "== copy media into $c: $f -> $dest =="
docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\""
docker cp "$f" "$c:$dest"
done
fi
done < /tmp/changed.txt
fi
# smoke after patch
for p in 8081 8082; do
echo "== smoke annotations-index on $p =="
curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \
| python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))'
done
echo "✅ hotpatch done"
- name: Debug on failure (containers status/logs)
if: ${{ failure() }}
run: |
set -euo pipefail
echo "== docker ps =="
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true
for c in archicratie-web-blue archicratie-web-green; do
echo "== logs $c (tail 200) =="
docker logs --tail 200 "$c" || true
done
set -e

View File

@@ -1,395 +0,0 @@
name: Proposer Apply (PR)
on:
issues:
types: [labeled]
workflow_dispatch:
inputs:
issue:
description: "Issue number to apply (Proposer: correction/fact-check)"
required: true
env:
NODE_OPTIONS: --dns-result-order=ipv4first
defaults:
run:
shell: bash
concurrency:
group: proposer-apply-${{ github.event.issue.number || inputs.issue || 'manual' }}
cancel-in-progress: true
jobs:
apply-proposer:
runs-on: mac-ci
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps:
- name: Tools sanity
run: |
set -euo pipefail
git --version
node --version
npm --version
- name: Derive context (event.json / workflow_dispatch)
env:
INPUT_ISSUE: ${{ inputs.issue }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
run: |
set -euo pipefail
export EVENT_JSON="/var/run/act/workflow/event.json"
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
node --input-type=module - <<'NODE' > /tmp/proposer.env
import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const repoObj = ev?.repository || {};
const cloneUrl =
repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
let owner =
repoObj?.owner?.login ||
repoObj?.owner?.username ||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
let repo =
repoObj?.name ||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
if (!owner || !repo) {
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
}
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
const defaultBranch = repoObj?.default_branch || "main";
const issueNumber =
ev?.issue?.number ||
ev?.issue?.index ||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
throw new Error("No issue number in event.json or workflow_dispatch input");
}
const labelName =
ev?.label?.name ||
ev?.label ||
"workflow_dispatch";
const u = new URL(cloneUrl);
const origin = u.origin;
const apiBase = (process.env.FORGE_API && String(process.env.FORGE_API).trim())
? String(process.env.FORGE_API).trim().replace(/\/+$/,"")
: origin;
function sh(s){ return JSON.stringify(String(s)); }
process.stdout.write([
`CLONE_URL=${sh(cloneUrl)}`,
`OWNER=${sh(owner)}`,
`REPO=${sh(repo)}`,
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
`ISSUE_NUMBER=${sh(issueNumber)}`,
`LABEL_NAME=${sh(labelName)}`,
`API_BASE=${sh(apiBase)}`
].join("\n") + "\n");
NODE
echo "✅ context:"
sed -n '1,120p' /tmp/proposer.env
- name: Gate on label state/approved
run: |
set -euo pipefail
source /tmp/proposer.env
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
echo " label=$LABEL_NAME => skip"
echo "SKIP=1" >> /tmp/proposer.env
exit 0
fi
echo "✅ proceed (issue=$ISSUE_NUMBER)"
- name: Fetch issue + API-hard gate on (state/approved present + proposer type)
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
-o /tmp/issue.json
node --input-type=module - <<'NODE' >> /tmp/proposer.env
import fs from "node:fs";
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json","utf8"));
const title = String(issue.title || "");
const body = String(issue.body || "").replace(/\r\n/g, "\n");
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name||"")).filter(Boolean) : [];
function pickLine(key) {
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = body.match(re);
return m ? m[1].trim() : "";
}
const typeRaw = pickLine("Type");
const type = String(typeRaw || "").trim().toLowerCase();
const hasApproved = labels.includes("state/approved");
const proposer = new Set(["type/correction","type/fact-check"]);
const out = [];
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
out.push(`HAS_APPROVED=${hasApproved ? "1":"0"}`);
if (!hasApproved) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("approved_not_present")}`);
} else if (!type) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
} else if (!proposer.has(type)) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("not_proposer:"+type)}`);
}
process.stdout.write(out.join("\n") + "\n");
NODE
echo "✅ proposer gating:"
grep -E '^(ISSUE_TYPE|HAS_APPROVED|SKIP|SKIP_REASON)=' /tmp/proposer.env || true
- name: Comment issue if skipped
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" == "1" ]] || exit 0
[[ "$LABEL_NAME" == "state/approved" || "$LABEL_NAME" == "workflow_dispatch" ]] || exit 0
REASON="${SKIP_REASON:-}"
TYPE="${ISSUE_TYPE:-}"
if [[ "$REASON" == "approved_not_present" ]]; then
MSG=" Proposer Apply: skip — le label **state/approved** n'est pas présent sur le ticket au moment du run (gate API-hard)."
elif [[ "$REASON" == "missing_type" ]]; then
MSG=" Proposer Apply: skip — champ **Type:** manquant/illisible. Attendu: type/correction ou type/fact-check."
else
MSG=" Proposer Apply: skip — Type non-Proposer (${TYPE}). (Ce workflow ne traite que correction/fact-check.)"
fi
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD" || true
- name: Checkout default branch
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
rm -rf .git
git init -q
git remote add origin "$CLONE_URL"
git fetch --depth 1 origin "$DEFAULT_BRANCH"
git -c advice.detachedHead=false checkout -q FETCH_HEAD
git log -1 --oneline
echo "✅ workspace:"
ls -la | sed -n '1,120p'
- name: Detect app dir (repo-root vs ./site)
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
APP_DIR="."
if [[ -d "site" && -f "site/package.json" ]]; then
APP_DIR="site"
fi
echo "APP_DIR=$APP_DIR" >> /tmp/proposer.env
echo "✅ APP_DIR=$APP_DIR"
ls -la "$APP_DIR" | sed -n '1,120p'
test -f "$APP_DIR/package.json" || { echo "❌ package.json missing in APP_DIR=$APP_DIR"; exit 1; }
test -d "$APP_DIR/scripts" || { echo "❌ scripts/ missing in APP_DIR=$APP_DIR"; exit 1; }
- name: NPM harden (reduce flakiness)
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || exit 0
cd "$APP_DIR"
npm config set fetch-retries 5
npm config set fetch-retry-mintimeout 20000
npm config set fetch-retry-maxtimeout 120000
npm config set registry https://registry.npmjs.org
- name: Install deps (APP_DIR)
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
cd "$APP_DIR"
npm ci --no-audit --no-fund
- name: Build dist baseline (APP_DIR)
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
cd "$APP_DIR"
npm run build
- name: Apply ticket (alias + commit) on bot branch
continue-on-error: true
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
BOT_GIT_EMAIL: ${{ secrets.BOT_GIT_EMAIL }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
START_SHA="$(git rev-parse HEAD)"
TS="$(date -u +%Y%m%d-%H%M%S)"
BR="bot/proposer-${ISSUE_NUMBER}-${TS}"
echo "BRANCH=$BR" >> /tmp/proposer.env
git checkout -b "$BR"
export GITEA_OWNER="$OWNER"
export GITEA_REPO="$REPO"
export FORGE_BASE="$API_BASE"
LOG="/tmp/proposer-apply.log"
set +e
(cd "$APP_DIR" && node scripts/apply-ticket.mjs "$ISSUE_NUMBER" --alias --commit) >"$LOG" 2>&1
RC=$?
set -e
echo "APPLY_RC=$RC" >> /tmp/proposer.env
echo "== apply log (tail) =="
tail -n 200 "$LOG" || true
END_SHA="$(git rev-parse HEAD)"
if [[ "$RC" -ne 0 ]]; then
echo "NOOP=0" >> /tmp/proposer.env
exit 0
fi
if [[ "$START_SHA" == "$END_SHA" ]]; then
echo "NOOP=1" >> /tmp/proposer.env
else
echo "NOOP=0" >> /tmp/proposer.env
echo "END_SHA=$END_SHA" >> /tmp/proposer.env
fi
- name: Push bot branch
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || { echo " apply failed -> skip push"; exit 0; }
[[ "${NOOP:-0}" == "0" ]] || { echo " no-op -> skip push"; exit 0; }
[[ -n "${BRANCH:-}" ]] || { echo " BRANCH unset -> skip push"; exit 0; }
AUTH_URL="$(node --input-type=module -e '
const [clone, tok] = process.argv.slice(1);
const u = new URL(clone);
u.username = "oauth2";
u.password = tok;
console.log(u.toString());
' "$CLONE_URL" "$FORGE_TOKEN")"
git remote set-url origin "$AUTH_URL"
git push -u origin "$BRANCH"
- name: Create PR + comment issue
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
[[ "${NOOP:-0}" == "0" ]] || exit 0
[[ -n "${BRANCH:-}" ]] || { echo " BRANCH unset -> skip PR"; exit 0; }
PR_TITLE="proposer: apply ticket #${ISSUE_NUMBER}"
PR_BODY="PR auto depuis ticket #${ISSUE_NUMBER} (state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA:-unknown}\n\nMerge si CI OK."
PR_PAYLOAD="$(node --input-type=module -e '
const [title, body, base, head] = process.argv.slice(1);
console.log(JSON.stringify({ title, body, base, head, allow_maintainer_edit: true }));
' "$PR_TITLE" "$PR_BODY" "$DEFAULT_BRANCH" "${OWNER}:${BRANCH}")"
PR_JSON="$(curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls" \
--data-binary "$PR_PAYLOAD")"
PR_URL="$(node --input-type=module -e '
const pr = JSON.parse(process.argv[1] || "{}");
console.log(pr.html_url || pr.url || "");
' "$PR_JSON")"
test -n "$PR_URL" || { echo "❌ PR URL missing. Raw: $PR_JSON"; exit 1; }
MSG="✅ PR Proposer créée pour ticket #${ISSUE_NUMBER} : ${PR_URL}"
C_PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$C_PAYLOAD"
- name: Finalize (fail job if apply failed)
if: ${{ always() }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
RC="${APPLY_RC:-0}"
if [[ "$RC" != "0" ]]; then
echo "❌ apply failed (rc=$RC)"
exit "$RC"
fi
echo "✅ apply ok"

View File

@@ -3,7 +3,7 @@ on: [push, workflow_dispatch]
jobs:
smoke:
runs-on: mac-ci
runs-on: ubuntu-latest
steps:
- run: node -v && npm -v
- run: echo "runner OK"

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 61 KiB

View File

@@ -1,18 +1,9 @@
#!/usr/bin/env node
// scripts/apply-annotation-ticket.mjs
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers src/annotations + public/media
// Robuste, idempotent, non destructif
//
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
//
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
//
// Compat rétro : lit (si présent) l'ancien monolithe:
// src/annotations/<oeuvre>/<chapitre>.yml
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
// pour permettre une migration progressive sans perte.
//
// Robuste, idempotent, non destructif.
// DRY RUN si --dry-run
// DRY RUN par défaut si --dry-run
// Options: --dry-run --no-download --verify --strict --commit --close
//
// Env requis:
@@ -45,7 +36,7 @@ import YAML from "yaml";
function usage(exitCode = 0) {
console.log(`
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/ (shard par paragraphe)
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/
Usage:
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
@@ -53,9 +44,9 @@ Usage:
Flags:
--dry-run : n'écrit rien (affiche un aperçu)
--no-download : n'essaie pas de télécharger les pièces jointes (media)
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
--strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
--commit : git add + git commit (commit dans la branche courante)
--verify : tente de vérifier que (page, ancre) existent (baseline/dist si dispo)
--strict : refuse si URL ref invalide (http/https) OU caption media vide
--commit : git add + git commit (le script commit dans la branche courante)
--close : ferme le ticket (nécessite --commit)
Env requis:
@@ -66,7 +57,7 @@ Env optionnel:
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
ANNO_DIR (défaut: src/annotations)
PUBLIC_DIR (défaut: public)
MEDIA_ROOT (défaut URL: /media)
MEDIA_ROOT (défaut URL: /media) -> écrit dans public/media/...
Exit codes:
0 ok
@@ -111,8 +102,6 @@ const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations");
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
/* --------------------------------- helpers -------------------------------- */
function getEnv(name, fallback = "") {
return (process.env[name] ?? fallback).trim();
}
@@ -134,12 +123,7 @@ function runQuiet(cmd, args, opts = {}) {
}
async function exists(p) {
try {
await fs.access(p);
return true;
} catch {
return false;
}
try { await fs.access(p); return true; } catch { return false; }
}
function inferOwnerRepoFromGit() {
@@ -156,371 +140,6 @@ function gitHasStagedChanges() {
return r.status === 1;
}
function escapeRegExp(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function pickLine(body, key) {
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = String(body || "").match(re);
return m ? m[1].trim() : "";
}
function pickSection(body, markers) {
const text = String(body || "").replace(/\r\n/g, "\n");
const idx = markers
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
.filter((x) => x.i >= 0)
.sort((a, b) => a.i - b.i)[0];
if (!idx) return "";
const start = idx.i + idx.m.length;
const tail = text.slice(start);
const stops = ["\n## ", "\n---", "\nJustification", "\nProposition", "\nSources"];
let end = tail.length;
for (const s of stops) {
const j = tail.toLowerCase().indexOf(s.toLowerCase());
if (j >= 0 && j < end) end = j;
}
return tail.slice(0, end).trim();
}
function normalizeChemin(chemin) {
let c = String(chemin || "").trim();
if (!c) return "";
if (!c.startsWith("/")) c = "/" + c;
if (!c.endsWith("/")) c = c + "/";
c = c.replace(/\/{2,}/g, "/");
return c;
}
function normalizePageKeyFromChemin(chemin) {
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
}
function normalizeAnchorId(s) {
let a = String(s || "").trim();
if (a.startsWith("#")) a = a.slice(1);
return a;
}
function assert(cond, msg, code = 1) {
if (!cond) {
const e = new Error(msg);
e.__exitCode = code;
throw e;
}
}
function isPlainObject(x) {
return !!x && typeof x === "object" && !Array.isArray(x);
}
function paraIndexFromId(id) {
const m = String(id).match(/^p-(\d+)-/i);
return m ? Number(m[1]) : Number.NaN;
}
function isHttpUrl(u) {
try {
const x = new URL(String(u));
return x.protocol === "http:" || x.protocol === "https:";
} catch {
return false;
}
}
function stableSortByTs(arr) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(a?.ts || "") || 0;
const tb = Date.parse(b?.ts || "") || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function normPage(s) {
let x = String(s || "").trim();
if (!x) return "";
// retire origin si on a une URL complète
x = x.replace(/^https?:\/\/[^/]+/i, "");
// enlève query/hash
x = x.split("#")[0].split("?")[0];
// enlève index.html
x = x.replace(/index\.html$/i, "");
// enlève slashs de bord
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
return x;
}
/* ------------------------------ para-index (verify + order) ------------------------------ */
async function loadParaOrderFromDist(pageKey) {
const distIdx = path.join(CWD, "dist", "para-index.json");
if (!(await exists(distIdx))) return null;
let j;
try {
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
} catch {
return null;
}
const want = normPage(pageKey);
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
const items = Array.isArray(j?.items)
? j.items
: Array.isArray(j?.index?.items)
? j.index.items
: null;
if (items) {
const ids = [];
for (const it of items) {
// page peut être dans plein de clés différentes
const pageCand = normPage(
it?.page ??
it?.pageKey ??
it?.path ??
it?.route ??
it?.href ??
it?.url ??
""
);
// id peut être dans plein de clés différentes
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
if (id.startsWith("#")) id = id.slice(1);
if (pageCand === want && id) ids.push(id);
}
if (ids.length) return ids;
}
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
if (j?.byId && typeof j.byId === "object") {
const ids = Object.keys(j.byId)
.filter((id) => {
const meta = j.byId[id] || {};
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
return pageCand === want;
});
if (ids.length) {
ids.sort((a, b) => {
const ia = paraIndexFromId(a);
const ib = paraIndexFromId(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
return ids;
}
}
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
if (j?.pages && typeof j.pages === "object") {
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
const keys = Object.keys(j.pages);
const hit = keys.find((k) => normPage(k) === want);
if (hit) {
const pg = j.pages[hit];
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
}
}
return null;
}
async function tryVerifyAnchor(pageKey, anchorId) {
// 1) dist/para-index.json : order complet si possible
const order = await loadParaOrderFromDist(pageKey);
if (order) return order.includes(anchorId);
// 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
const distIdx = path.join(CWD, "dist", "para-index.json");
if (await exists(distIdx)) {
try {
const raw = await fs.readFile(distIdx, "utf8");
if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
return true;
}
} catch {
// ignore
}
}
// 2) tests/anchors-baseline.json (fallback)
const base = path.join(CWD, "tests", "anchors-baseline.json");
if (await exists(base)) {
try {
const j = JSON.parse(await fs.readFile(base, "utf8"));
const candidates = [];
if (j?.pages && typeof j.pages === "object") {
for (const [k, v] of Object.entries(j.pages)) {
if (!Array.isArray(v)) continue;
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
}
}
if (Array.isArray(j?.entries)) {
for (const it of j.entries) {
const p = String(it?.page || "");
const ids = it?.ids;
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
}
}
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
} catch {
// ignore
}
}
return null; // cannot verify
}
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
function keyMedia(x) {
return String(x?.src || "");
}
function keyRef(x) {
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
}
function keyComment(x) {
return String(x?.text || "").trim();
}
function uniqUnion(dstArr, srcArr, keyFn) {
const out = Array.isArray(dstArr) ? [...dstArr] : [];
const seen = new Set(out.map((x) => keyFn(x)));
for (const it of (Array.isArray(srcArr) ? srcArr : [])) {
const k = keyFn(it);
if (!k) continue;
if (!seen.has(k)) {
seen.add(k);
out.push(it);
}
}
return out;
}
function deepMergeEntry(dst, src) {
if (!isPlainObject(dst) || !isPlainObject(src)) return;
for (const [k, v] of Object.entries(src)) {
if (k === "media" && Array.isArray(v)) {
dst.media = uniqUnion(dst.media, v, keyMedia);
continue;
}
if (k === "refs" && Array.isArray(v)) {
dst.refs = uniqUnion(dst.refs, v, keyRef);
continue;
}
if (k === "comments_editorial" && Array.isArray(v)) {
dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment);
continue;
}
if (isPlainObject(v)) {
if (!isPlainObject(dst[k])) dst[k] = {};
deepMergeEntry(dst[k], v);
continue;
}
if (Array.isArray(v)) {
const cur = Array.isArray(dst[k]) ? dst[k] : [];
const seen = new Set(cur.map((x) => JSON.stringify(x)));
const out = [...cur];
for (const it of v) {
const s = JSON.stringify(it);
if (!seen.has(s)) {
seen.add(s);
out.push(it);
}
}
dst[k] = out;
continue;
}
// scalar: set only if missing/empty
if (!(k in dst) || dst[k] == null || dst[k] === "") {
dst[k] = v;
}
}
}
/* ----------------------------- annotations I/O ----------------------------- */
async function loadAnnoDocYaml(fileAbs, pageKey) {
if (!(await exists(fileAbs))) {
return { schema: 1, page: pageKey, paras: {} };
}
const raw = await fs.readFile(fileAbs, "utf8");
let doc;
try {
doc = YAML.parse(raw);
} catch (e) {
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
}
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`, 2);
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`, 2);
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`, 2);
if (doc.page != null) {
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`, 2);
} else {
doc.page = pageKey;
}
return doc;
}
function sortParasObject(paras, order) {
const keys = Object.keys(paras || {});
const idx = new Map();
if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
keys.sort((a, b) => {
const ha = idx.has(a);
const hb = idx.has(b);
if (ha && hb) return idx.get(a) - idx.get(b);
if (ha && !hb) return -1;
if (!ha && hb) return 1;
const ia = paraIndexFromId(a);
const ib = paraIndexFromId(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
const out = {};
for (const k of keys) out[k] = paras[k];
return out;
}
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
doc.paras = sortParasObject(doc.paras, order);
for (const e of Object.values(doc.paras || {})) {
if (!isPlainObject(e)) continue;
stableSortByTs(e.media);
stableSortByTs(e.refs);
stableSortByTs(e.comments_editorial);
}
const out = YAML.stringify(doc);
await fs.writeFile(fileAbs, out, "utf8");
}
/* ------------------------------ gitea helpers ------------------------------ */
function apiBaseNorm(forgeApiBase) {
@@ -548,7 +167,7 @@ async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
}
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
// Gitea: /issues/{index}/assets
// Gitea: /issues/{index}/assets
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
try {
const json = await giteaGET(url, token);
@@ -596,43 +215,200 @@ async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment
}
}
/* ------------------------------ media helpers ------------------------------ */
/* ------------------------------ parsing helpers ---------------------------- */
function inferMediaTypeFromFilename(name) {
const n = String(name || "").toLowerCase();
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
return "link";
function escapeRegExp(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function sanitizeFilename(name) {
return String(name || "file")
.replace(/[\/\\]/g, "_")
.replace(/[^\w.\-]+/g, "_")
.replace(/_+/g, "_")
.slice(0, 180);
function pickLine(body, key) {
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = String(body || "").match(re);
return m ? m[1].trim() : "";
}
async function downloadToFile(url, token, destAbs) {
const res = await fetch(url, {
headers: {
Authorization: `token ${token}`,
"User-Agent": "archicratie-apply-annotation/1.0",
},
redirect: "follow",
});
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
function pickSection(body, markers) {
const text = String(body || "").replace(/\r\n/g, "\n");
const idx = markers
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
.filter((x) => x.i >= 0)
.sort((a, b) => a.i - b.i)[0];
if (!idx) return "";
const start = idx.i + idx.m.length;
const tail = text.slice(start);
const stops = [
"\n## ",
"\n---",
"\nJustification",
"\nProposition",
"\nSources",
];
let end = tail.length;
for (const s of stops) {
const j = tail.toLowerCase().indexOf(s.toLowerCase());
if (j >= 0 && j < end) end = j;
}
const buf = Buffer.from(await res.arrayBuffer());
await fs.mkdir(path.dirname(destAbs), { recursive: true });
await fs.writeFile(destAbs, buf);
return buf.length;
return tail.slice(0, end).trim();
}
/* ------------------------------ type parsers ------------------------------ */
function normalizeChemin(chemin) {
let c = String(chemin || "").trim();
if (!c) return "";
if (!c.startsWith("/")) c = "/" + c;
if (!c.endsWith("/")) c = c + "/";
c = c.replace(/\/{2,}/g, "/");
return c;
}
function normalizePageKeyFromChemin(chemin) {
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
}
function normalizeAnchorId(s) {
let a = String(s || "").trim();
if (a.startsWith("#")) a = a.slice(1);
return a;
}
function assert(cond, msg, code = 1) {
if (!cond) {
const e = new Error(msg);
e.__exitCode = code;
throw e;
}
}
function isPlainObject(x) {
return !!x && typeof x === "object" && !Array.isArray(x);
}
/* ----------------------------- verify helpers ------------------------------ */
function paraIndexFromId(id) {
const m = String(id).match(/^p-(\d+)-/i);
return m ? Number(m[1]) : Number.NaN;
}
async function tryVerifyAnchor(pageKey, anchorId) {
// 1) dist/para-index.json (si build déjà faite)
const distIdx = path.join(CWD, "dist", "para-index.json");
if (await exists(distIdx)) {
const raw = await fs.readFile(distIdx, "utf8");
const idx = JSON.parse(raw);
const byId = idx?.byId;
if (byId && typeof byId === "object" && byId[anchorId] != null) return true;
}
// 2) tests/anchors-baseline.json (si dispo)
const base = path.join(CWD, "tests", "anchors-baseline.json");
if (await exists(base)) {
const raw = await fs.readFile(base, "utf8");
const j = JSON.parse(raw);
// tolérant: cherche un array d'ids associé à la page
const candidates = [];
// cas 1: j.pages[...]
if (j?.pages && typeof j.pages === "object") {
for (const [k, v] of Object.entries(j.pages)) {
if (!Array.isArray(v)) continue;
// on matche large: pageKey inclus dans le path
if (String(k).includes(pageKey)) candidates.push(...v);
}
}
// cas 2: j.entries = [{page, ids}]
if (Array.isArray(j?.entries)) {
for (const it of j.entries) {
const p = String(it?.page || "");
const ids = it?.ids;
if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids);
}
}
if (candidates.length) {
return candidates.some((x) => String(x) === anchorId);
}
}
// impossible à vérifier
return null;
}
/* ----------------------------- annotations I/O ----------------------------- */
async function loadAnnoDoc(fileAbs, pageKey) {
if (!(await exists(fileAbs))) {
return { schema: 1, page: pageKey, paras: {} };
}
const raw = await fs.readFile(fileAbs, "utf8");
let doc;
try {
doc = YAML.parse(raw);
} catch (e) {
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
}
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`);
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`);
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`);
if (doc.page != null) {
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`);
} else {
doc.page = pageKey;
}
return doc;
}
function sortParasObject(paras) {
const keys = Object.keys(paras || {});
keys.sort((a, b) => {
const ia = paraIndexFromId(a);
const ib = paraIndexFromId(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
const out = {};
for (const k of keys) out[k] = paras[k];
return out;
}
async function saveAnnoDocYaml(fileAbs, doc) {
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
doc.paras = sortParasObject(doc.paras);
const out = YAML.stringify(doc);
await fs.writeFile(fileAbs, out, "utf8");
}
/* ------------------------------ apply per type ----------------------------- */
function ensureEntry(doc, paraId) {
if (!doc.paras[paraId] || !isPlainObject(doc.paras[paraId])) doc.paras[paraId] = {};
return doc.paras[paraId];
}
function uniqPush(arr, item, keyFn) {
const k = keyFn(item);
const exists = arr.some((x) => keyFn(x) === k);
if (!exists) arr.push(item);
return !exists;
}
function stableSortByTs(arr) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(a?.ts || "") || 0;
const tb = Date.parse(b?.ts || "") || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function parseReferenceBlock(body) {
const block =
@@ -655,6 +431,50 @@ function parseReferenceBlock(body) {
};
}
function inferMediaTypeFromFilename(name) {
const n = String(name || "").toLowerCase();
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
return "link";
}
function sanitizeFilename(name) {
return String(name || "file")
.replace(/[\/\\]/g, "_")
.replace(/[^\w.\-]+/g, "_")
.replace(/_+/g, "_")
.slice(0, 180);
}
function isHttpUrl(u) {
try {
const x = new URL(String(u));
return x.protocol === "http:" || x.protocol === "https:";
} catch {
return false;
}
}
async function downloadToFile(url, token, destAbs) {
const res = await fetch(url, {
headers: {
// la plupart des /attachments sont publics, mais on garde le token “au cas où”
Authorization: `token ${token}`,
"User-Agent": "archicratie-apply-annotation/1.0",
},
redirect: "follow",
});
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
}
const buf = Buffer.from(await res.arrayBuffer());
await fs.mkdir(path.dirname(destAbs), { recursive: true });
await fs.writeFile(destAbs, buf);
return buf.length;
}
/* ----------------------------------- main ---------------------------------- */
async function main() {
@@ -691,53 +511,29 @@ async function main() {
const pageKey = normalizePageKeyFromChemin(chemin);
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
if (DO_VERIFY) {
const ok = await tryVerifyAnchor(pageKey, ancre);
if (ok === false) {
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
}
if (ok === null) {
if (STRICT) {
throw Object.assign(
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
{ __exitCode: 2 }
);
}
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
// pas de source de vérité dispo
if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de baseline/dist)`), { __exitCode: 2 });
console.warn("⚠️ verify: impossible de vérifier (pas de baseline/dist) — on continue.");
}
}
// ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
const annoFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
const annoFileRel = path.relative(CWD, annoFileAbs).replace(/\\/g, "/");
// legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: annoFileRel });
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
// load shard doc
const doc = await loadAnnoDocYaml(shardAbs, pageKey);
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
const entry = doc.paras[ancre];
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
if (await exists(legacyAbs)) {
try {
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
const legacyEntry = legacy?.paras?.[ancre];
if (isPlainObject(legacyEntry)) {
deepMergeEntry(entry, legacyEntry);
}
} catch {
// ignore legacy parse issues; shard still applies new data
}
}
const doc = await loadAnnoDoc(annoFileAbs, pageKey);
const entry = ensureEntry(doc, ancre);
const touchedFiles = [];
const notes = [];
let changed = false;
const nowIso = new Date().toISOString();
@@ -749,19 +545,16 @@ async function main() {
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
const before = entry.comments_editorial.length;
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
if (entry.comments_editorial.length !== before) {
changed = true;
notes.push(`+ comment added (len=${text.length})`);
} else {
notes.push(`~ comment already present (dedup)`);
}
const added = uniqPush(entry.comments_editorial, item, (x) => `${(x?.text || "").trim()}`);
if (added) { changed = true; notes.push(`+ comment added (len=${text.length})`); }
else notes.push(`~ comment already present (dedup)`);
stableSortByTs(entry.comments_editorial);
}
else if (type === "type/reference") {
const ref = parseReferenceBlock(body);
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
@@ -778,35 +571,34 @@ async function main() {
};
if (ref.citation) item.citation = ref.citation;
const before = entry.refs.length;
entry.refs = uniqUnion(entry.refs, [item], keyRef);
if (entry.refs.length !== before) {
changed = true;
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
} else {
notes.push(`~ reference already present (dedup)`);
}
const added = uniqPush(entry.refs, item, (x) => `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`);
if (added) { changed = true; notes.push(`+ reference added (${item.url ? "url" : "label"})`); }
else notes.push(`~ reference already present (dedup)`);
stableSortByTs(entry.refs);
}
else if (type === "type/media") {
if (!Array.isArray(entry.media)) entry.media = [];
const caption = (title || "").trim();
if (STRICT && !caption) {
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
}
const captionFinal = caption || ".";
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
if (!atts.length) notes.push("! no assets found (nothing to download).");
if (!atts.length) {
notes.push("! no assets found (nothing to download).");
}
for (const a of atts) {
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
const dl = a?.browser_download_url || a?.download_url || "";
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
// caption = title du ticket (fallback ".")
const caption = (title || "").trim() || ".";
if (STRICT && !caption.trim()) {
throw Object.assign(new Error("Ticket media (strict): caption vide."), { __exitCode: 2 });
}
const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre);
const destAbs = path.join(mediaDirAbs, name);
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
@@ -816,24 +608,21 @@ async function main() {
const bytes = await downloadToFile(dl, token, destAbs);
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
changed = true;
} else {
notes.push(`(dry) would download ${name} -> ${urlPath}`);
changed = true;
}
const item = {
type: inferMediaTypeFromFilename(name),
src: urlPath,
caption: captionFinal,
caption,
credit: "",
ts: nowIso,
fromIssue: issueNum,
};
const before = entry.media.length;
entry.media = uniqUnion(entry.media, [item], keyMedia);
if (entry.media.length !== before) changed = true;
const added = uniqPush(entry.media, item, (x) => String(x?.src || ""));
if (added) changed = true;
}
stableSortByTs(entry.media);
@@ -851,7 +640,7 @@ async function main() {
if (DRY_RUN) {
console.log("\n--- DRY RUN (no write) ---");
console.log(`Would update: ${shardRel}`);
console.log(`Would update: ${annoFileRel}`);
for (const n of notes) console.log(" ", n);
console.log("\nExcerpt (resulting entry):");
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
@@ -859,10 +648,10 @@ async function main() {
return;
}
await saveAnnoDocYaml(shardAbs, doc, paraOrder);
touchedFiles.unshift(shardRel);
await saveAnnoDocYaml(annoFileAbs, doc);
touchedFiles.unshift(annoFileRel);
console.log(`✅ Updated: ${shardRel}`);
console.log(`✅ Updated: ${annoFileRel}`);
for (const n of notes) console.log(" ", n);
if (DO_COMMIT) {
@@ -896,4 +685,4 @@ main().catch((e) => {
const code = e?.__exitCode || 1;
console.error("💥", e?.message || e);
process.exit(code);
});
});

View File

@@ -1,106 +1,28 @@
#!/usr/bin/env node
// scripts/build-annotations-index.mjs
// Construit dist/annotations-index.json à partir de src/annotations/**/*.yml
// Supporte:
// - monolith : src/annotations/<pageKey>.yml
// - shard : src/annotations/<pageKey>/<paraId>.yml (paraId = p-<n>-...)
// Invariants:
// - doc.schema === 1
// - doc.page (si présent) == pageKey déduit du chemin
// - shard: doc.paras doit contenir EXACTEMENT la clé paraId (sinon fail)
//
// Deep-merge non destructif (media/refs/comments dédupliqués), tri stable.
import fs from "node:fs/promises";
import path from "node:path";
import YAML from "yaml";
const ROOT = process.cwd();
const ANNO_ROOT = path.join(ROOT, "src", "annotations");
const DIST_DIR = path.join(ROOT, "dist");
const OUT = path.join(DIST_DIR, "annotations-index.json");
function parseArgs(argv) {
const out = {
inDir: "src/annotations",
outFile: "dist/annotations-index.json",
};
function assert(cond, msg) {
if (!cond) throw new Error(msg);
}
for (let i = 0; i < argv.length; i++) {
const a = argv[i];
function isObj(x) {
return !!x && typeof x === "object" && !Array.isArray(x);
}
function isArr(x) {
return Array.isArray(x);
}
if (a === "--in" && argv[i + 1]) out.inDir = argv[++i];
else if (a.startsWith("--in=")) out.inDir = a.slice("--in=".length);
function normPath(s) {
return String(s || "")
.replace(/\\/g, "/")
.replace(/^\/+|\/+$/g, "");
}
function paraNum(pid) {
const m = String(pid).match(/^p-(\d+)-/i);
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
}
function stableSortByTs(arr) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(a?.ts || "") || 0;
const tb = Date.parse(b?.ts || "") || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function keyMedia(x) { return String(x?.src || ""); }
function keyRef(x) {
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
}
function keyComment(x) { return String(x?.text || "").trim(); }
function uniqUnion(dst, src, keyFn) {
const out = isArr(dst) ? [...dst] : [];
const seen = new Set(out.map((x) => keyFn(x)));
for (const it of (isArr(src) ? src : [])) {
const k = keyFn(it);
if (!k) continue;
if (!seen.has(k)) {
seen.add(k);
out.push(it);
}
if (a === "--out" && argv[i + 1]) out.outFile = argv[++i];
else if (a.startsWith("--out=")) out.outFile = a.slice("--out=".length);
}
return out;
}
function deepMergeEntry(dst, src) {
if (!isObj(dst) || !isObj(src)) return;
for (const [k, v] of Object.entries(src)) {
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
if (isObj(v)) {
if (!isObj(dst[k])) dst[k] = {};
deepMergeEntry(dst[k], v);
continue;
}
if (isArr(v)) {
const cur = isArr(dst[k]) ? dst[k] : [];
const seen = new Set(cur.map((x) => JSON.stringify(x)));
const out = [...cur];
for (const it of v) {
const s = JSON.stringify(it);
if (!seen.has(s)) { seen.add(s); out.push(it); }
}
dst[k] = out;
continue;
}
// scalar: set only if missing/empty
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
}
async function exists(p) {
try { await fs.access(p); return true; } catch { return false; }
}
async function walk(dir) {
@@ -108,116 +30,111 @@ async function walk(dir) {
const ents = await fs.readdir(dir, { withFileTypes: true });
for (const e of ents) {
const p = path.join(dir, e.name);
if (e.isDirectory()) out.push(...await walk(p));
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
if (e.isDirectory()) out.push(...(await walk(p)));
else out.push(p);
}
return out;
}
function inferExpectedFromRel(relNoExt) {
const parts = relNoExt.split("/").filter(Boolean);
const last = parts.at(-1) || "";
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
const paraId = isShard ? last : null;
return { isShard, pageKey, paraId };
function inferPageKeyFromFile(inDirAbs, fileAbs) {
// src/annotations/<page>.yml -> "<page>"
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
return rel.replace(/\.(ya?ml|json)$/i, "");
}
function validateAndNormalizeDoc(doc, relFile, expectedPageKey, expectedParaId) {
assert(isObj(doc), `${relFile}: doc must be an object`);
assert(doc.schema === 1, `${relFile}: schema must be 1`);
assert(isObj(doc.paras), `${relFile}: missing object key "paras"`);
function assert(cond, msg) {
if (!cond) throw new Error(msg);
}
const gotPage = doc.page != null ? normPath(doc.page) : "";
const expPage = normPath(expectedPageKey);
function isPlainObject(x) {
return !!x && typeof x === "object" && !Array.isArray(x);
}
if (gotPage) {
assert(
gotPage === expPage,
`${relFile}: page mismatch (page="${doc.page}" vs path="${expectedPageKey}")`
);
} else {
doc.page = expPage;
}
function normalizePageKey(s) {
// pas de / en tête/fin
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
}
if (expectedParaId) {
const keys = Object.keys(doc.paras || {}).map(String);
function validateAndNormalizeDoc(doc, pageKey, fileRel) {
assert(isPlainObject(doc), `${fileRel}: document must be an object`);
assert(doc.schema === 1, `${fileRel}: schema must be 1`);
if (doc.page != null) {
assert(
keys.includes(expectedParaId),
`${relFile}: shard mismatch: must contain paras["${expectedParaId}"]`
);
assert(
keys.length === 1 && keys[0] === expectedParaId,
`${relFile}: shard invariant violated: shard file must contain ONLY paras["${expectedParaId}"] (got: ${keys.join(", ")})`
normalizePageKey(doc.page) === pageKey,
`${fileRel}: page mismatch (page="${doc.page}" vs path="${pageKey}")`
);
}
assert(isPlainObject(doc.paras), `${fileRel}: missing object key "paras"`);
return doc;
const parasOut = Object.create(null);
for (const [paraId, entry] of Object.entries(doc.paras)) {
assert(/^p-\d+-/i.test(paraId), `${fileRel}: invalid para id "${paraId}"`);
// entry peut être vide, mais doit être un objet si présent
assert(entry == null || isPlainObject(entry), `${fileRel}: paras.${paraId} must be an object`);
const e = entry ? { ...entry } : {};
// Sanity checks (non destructifs : on nécrase pas, on vérifie juste les types)
if (e.refs != null) assert(Array.isArray(e.refs), `${fileRel}: paras.${paraId}.refs must be an array`);
if (e.authors != null) assert(Array.isArray(e.authors), `${fileRel}: paras.${paraId}.authors must be an array`);
if (e.quotes != null) assert(Array.isArray(e.quotes), `${fileRel}: paras.${paraId}.quotes must be an array`);
if (e.media != null) assert(Array.isArray(e.media), `${fileRel}: paras.${paraId}.media must be an array`);
if (e.comments_editorial != null) assert(Array.isArray(e.comments_editorial), `${fileRel}: paras.${paraId}.comments_editorial must be an array`);
parasOut[paraId] = e;
}
return parasOut;
}
async function readDoc(fileAbs) {
const raw = await fs.readFile(fileAbs, "utf8");
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
return YAML.parse(raw);
}
async function main() {
const pages = {};
const errors = [];
const { inDir, outFile } = parseArgs(process.argv.slice(2));
const CWD = process.cwd();
await fs.mkdir(DIST_DIR, { recursive: true });
const inDirAbs = path.isAbsolute(inDir) ? inDir : path.join(CWD, inDir);
const outAbs = path.isAbsolute(outFile) ? outFile : path.join(CWD, outFile);
const files = await walk(ANNO_ROOT);
for (const fp of files) {
const rel = normPath(path.relative(ANNO_ROOT, fp));
const relNoExt = rel.replace(/\.ya?ml$/i, "");
const { isShard, pageKey, paraId } = inferExpectedFromRel(relNoExt);
try {
const raw = await fs.readFile(fp, "utf8");
const doc = YAML.parse(raw) || {};
if (!isObj(doc) || doc.schema !== 1) continue;
validateAndNormalizeDoc(
doc,
`src/annotations/${rel}`,
pageKey,
isShard ? paraId : null
);
const pg = (pages[pageKey] ??= { paras: {} });
if (isShard) {
const entry = doc.paras[paraId];
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
stableSortByTs(pg.paras[paraId].media);
stableSortByTs(pg.paras[paraId].refs);
stableSortByTs(pg.paras[paraId].comments_editorial);
} else {
for (const [pid, entry] of Object.entries(doc.paras || {})) {
const p = String(pid);
if (!isObj(pg.paras[p])) pg.paras[p] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
stableSortByTs(pg.paras[p].media);
stableSortByTs(pg.paras[p].refs);
stableSortByTs(pg.paras[p].comments_editorial);
}
}
} catch (e) {
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
}
// antifragile
if (!(await exists(inDirAbs))) {
console.log(` annotations-index: skip (input missing): ${inDir}`);
process.exit(0);
}
for (const [pageKey, pg] of Object.entries(pages)) {
const keys = Object.keys(pg.paras || {});
keys.sort((a, b) => {
const ia = paraNum(a);
const ib = paraNum(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
const next = {};
for (const k of keys) next[k] = pg.paras[k];
pg.paras = next;
const files = (await walk(inDirAbs)).filter((p) => /\.(ya?ml|json)$/i.test(p));
if (!files.length) {
console.log(` annotations-index: skip (no .yml/.yaml/.json found in): ${inDir}`);
process.exit(0);
}
const pages = Object.create(null);
let paraCount = 0;
for (const f of files) {
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
const pageKey = normalizePageKey(inferPageKeyFromFile(inDirAbs, f));
assert(pageKey, `${fileRel}: cannot infer page key`);
let doc;
try {
doc = await readDoc(f);
} catch (e) {
throw new Error(`${fileRel}: parse failed: ${String(e?.message ?? e)}`);
}
const paras = validateAndNormalizeDoc(doc, pageKey, fileRel);
// 1 fichier = 1 page (canon)
assert(!pages[pageKey], `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
pages[pageKey] = { paras };
paraCount += Object.keys(paras).length;
}
const out = {
@@ -226,21 +143,17 @@ async function main() {
pages,
stats: {
pages: Object.keys(pages).length,
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
errors: errors.length,
paras: paraCount,
},
errors,
};
if (errors.length) {
throw new Error(`${errors[0].file}: ${errors[0].error}`);
}
await fs.mkdir(path.dirname(outAbs), { recursive: true });
await fs.writeFile(outAbs, JSON.stringify(out), "utf8");
await fs.writeFile(OUT, JSON.stringify(out), "utf8");
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> dist/annotations-index.json`);
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> ${path.relative(CWD, outAbs)}`);
}
main().catch((e) => {
console.error(`FAIL: build-annotations-index crashed: ${e?.stack || e?.message || e}`);
console.error("FAIL: build-annotations-index crashed:", e);
process.exit(1);
});
});

View File

@@ -48,9 +48,6 @@ async function main() {
let missing = 0;
const notes = [];
// Optim: éviter de vérifier 100 fois le même fichier media
const seenMedia = new Set(); // src string
for (const f of files) {
const rel = path.relative(CWD, f).replace(/\\/g, "/");
const raw = await fs.readFile(f, "utf8");
@@ -73,10 +70,6 @@ async function main() {
const src = String(m?.src || "");
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
// dédupe
if (seenMedia.has(src)) continue;
seenMedia.add(src);
checked++;
const p = toPublicPathFromUrl(src);
if (!p) continue;
@@ -101,4 +94,4 @@ async function main() {
main().catch((e) => {
console.error("FAIL: check-annotations-media crashed:", e);
process.exit(1);
});
});

View File

@@ -27,6 +27,11 @@ function escRe(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function inferPageKeyFromFile(fileAbs) {
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
return rel.replace(/\.(ya?ml|json)$/i, "");
}
function normalizePageKey(s) {
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
}
@@ -35,31 +40,6 @@ function isPlainObject(x) {
return !!x && typeof x === "object" && !Array.isArray(x);
}
function isParaId(s) {
return /^p-\d+-/i.test(String(s || ""));
}
/**
* Supporte:
* - monolith: src/annotations/<pageKey>.yml -> pageKey = rel sans ext
* - shard : src/annotations/<pageKey>/<paraId>.yml -> pageKey = dirname(rel), paraId = basename
*
* shard seulement si le fichier est dans un sous-dossier (anti cas pathologique).
*/
function inferFromFile(fileAbs) {
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
const relNoExt = rel.replace(/\.(ya?ml|json)$/i, "");
const parts = relNoExt.split("/").filter(Boolean);
const base = parts[parts.length - 1] || "";
const dirParts = parts.slice(0, -1);
const isShard = dirParts.length > 0 && isParaId(base);
const pageKey = isShard ? dirParts.join("/") : relNoExt;
const paraId = isShard ? base : "";
return { pageKey: normalizePageKey(pageKey), paraId };
}
async function loadAliases() {
if (!(await exists(ALIASES_PATH))) return {};
try {
@@ -103,11 +83,7 @@ async function main() {
const aliases = await loadAliases();
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
// perf: cache HTML par page (shards = beaucoup de fichiers pour 1 page)
const htmlCache = new Map(); // pageKey -> html
const missingDistPage = new Set(); // pageKey
let pagesSeen = new Set();
let pages = 0;
let checked = 0;
let failures = 0;
const notes = [];
@@ -131,7 +107,7 @@ async function main() {
continue;
}
const { pageKey, paraId: shardParaId } = inferFromFile(f);
const pageKey = normalizePageKey(inferPageKeyFromFile(f));
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
failures++;
@@ -145,44 +121,20 @@ async function main() {
continue;
}
// shard invariant (fort) : doit contenir paras[paraId]
if (shardParaId) {
if (!Object.prototype.hasOwnProperty.call(doc.paras, shardParaId)) {
failures++;
notes.push(`- SHARD MISMATCH: ${rel} (expected paras["${shardParaId}"] present)`);
continue;
}
// si extras -> warning (non destructif)
const keys = Object.keys(doc.paras);
if (!(keys.length === 1 && keys[0] === shardParaId)) {
notes.push(`- WARN shard has extra paras: ${rel} (expected only "${shardParaId}", got ${keys.join(", ")})`);
}
}
pagesSeen.add(pageKey);
const distFile = path.join(DIST_DIR, pageKey, "index.html");
if (!(await exists(distFile))) {
if (!missingDistPage.has(pageKey)) {
missingDistPage.add(pageKey);
failures++;
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
} else {
notes.push(`- WARN missing page already reported: dist/${pageKey}/index.html (from ${rel})`);
}
failures++;
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
continue;
}
let html = htmlCache.get(pageKey);
if (!html) {
html = await fs.readFile(distFile, "utf8");
htmlCache.set(pageKey, html);
}
pages++;
const html = await fs.readFile(distFile, "utf8");
for (const paraId of Object.keys(doc.paras)) {
checked++;
if (!isParaId(paraId)) {
if (!/^p-\d+-/i.test(paraId)) {
failures++;
notes.push(`- INVALID ID: ${rel} (${paraId})`);
continue;
@@ -206,7 +158,6 @@ async function main() {
}
const warns = notes.filter((x) => x.startsWith("- WARN"));
const pages = pagesSeen.size;
if (failures > 0) {
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
@@ -221,4 +172,4 @@ async function main() {
main().catch((e) => {
console.error("FAIL: annotations check crashed:", e);
process.exit(1);
});
});

View File

@@ -114,6 +114,7 @@ async function runMammoth(docxPath, assetsOutDirWebRoot) {
);
let html = result.value || "";
// Mammoth gives relative src="image-xx.png" ; we will prefix later
return html;
}
@@ -181,25 +182,6 @@ async function exists(p) {
try { await fs.access(p); return true; } catch { return false; }
}
/**
* ✅ compat:
* - ancien : collection="archicratie" + slug="archicrat-ia/chapitre-3"
* - nouveau : collection="archicrat-ia" + slug="chapitre-3"
*
* But : toujours écrire dans src/content/archicrat-ia/<slugSansPrefix>.mdx
*/
function normalizeDest(collection, slug) {
let outCollection = String(collection || "").trim();
let outSlug = String(slug || "").trim().replace(/^\/+|\/+$/g, "");
if (outCollection === "archicratie" && outSlug.startsWith("archicrat-ia/")) {
outCollection = "archicrat-ia";
outSlug = outSlug.replace(/^archicrat-ia\//, "");
}
return { outCollection, outSlug };
}
async function main() {
const args = parseArgs(process.argv);
const manifestPath = path.resolve(args.manifest);
@@ -221,14 +203,11 @@ async function main() {
for (const it of selected) {
const docxPath = path.resolve(it.source);
const { outCollection, outSlug } = normalizeDest(it.collection, it.slug);
const outFile = path.resolve("src/content", outCollection, `${outSlug}.mdx`);
const outFile = path.resolve("src/content", it.collection, `${it.slug}.mdx`);
const outDir = path.dirname(outFile);
const assetsPublicDir = path.posix.join("/imported", outCollection, outSlug);
const assetsDiskDir = path.resolve("public", "imported", outCollection, outSlug);
const assetsPublicDir = path.posix.join("/imported", it.collection, it.slug);
const assetsDiskDir = path.resolve("public", "imported", it.collection, it.slug);
if (!(await exists(docxPath))) {
throw new Error(`Missing source docx: ${docxPath}`);
@@ -262,20 +241,18 @@ async function main() {
html = rewriteLocalImageLinks(html, assetsPublicDir);
body = html.trim() ? html : "<p>(Import vide)</p>";
}
const defaultVersion = process.env.PUBLIC_RELEASE || "0.1.0";
// ✅ IMPORTANT: archicrat-ia partage edition/status avec archicratie (pas de migration frontmatter)
const schemaDefaultsByCollection = {
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
"archicrat-ia": { edition: "archicrat-ia", status: "essai_these", level: 1 },
ia: { edition: "ia", status: "cas_pratique", level: 1 },
traite: { edition: "traite", status: "ontodynamique", level: 1 },
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
atlas: { edition: "atlas", status: "atlas", level: 1 },
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
ia: { edition: "ia", status: "cas_pratique", level: 1 },
traite: { edition: "traite", status: "ontodynamique", level: 1 },
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
atlas: { edition: "atlas", status: "atlas", level: 1 },
};
const defaults = schemaDefaultsByCollection[outCollection] || { edition: outCollection, status: "draft", level: 1 };
const defaults = schemaDefaultsByCollection[it.collection] || { edition: it.collection, status: "draft", level: 1 };
const fm = [
"---",
@@ -305,4 +282,4 @@ async function main() {
main().catch((e) => {
console.error("\nERROR:", e?.message || e);
process.exit(1);
});
});

View File

@@ -1,5 +1,2 @@
{
"/archicrat-ia/chapitre-3/": {
"p-1-60c7ea48": "p-1-a21087b0"
}
}
{}

View File

@@ -1,10 +0,0 @@
schema: 1
page: archicrat-ia/chapitre-1
paras:
p-0-8d27a7f5:
refs:
- url: https://auth.archicratie.trans-hands.synology.me/authenticated
label: Lien web
kind: (livre / article / vidéo / site / autre) Site
ts: 2026-02-27T12:34:31.704Z
fromIssue: 142

View File

@@ -1,9 +0,0 @@
schema: 1
page: archicrat-ia/chapitre-1
paras:
p-1-8a6c18bf:
comments_editorial:
- text: Yeaha
status: new
ts: 2026-02-27T12:40:39.462Z
fromIssue: 143

View File

@@ -1,18 +0,0 @@
schema: 1
page: archicrat-ia/chapitre-3
paras:
p-0-ace27175:
media:
- type: image
src: /media/archicrat-ia/chapitre-3/p-0-ace27175/Capture_d_e_cran_2025-05-05_a_19.20.40.png
caption: "[Media] p-0-ace27175 — Chapitre 3 — Philosophies du pouvoir et
archicration"
credit: ""
ts: 2026-02-27T12:43:14.259Z
fromIssue: 144
refs:
- url: https://gitea.archicratie.trans-hands.synology.me
label: Gitea
kind: (livre / article / vidéo / site / autre) Site
ts: 2026-03-02T19:53:21.252Z
fromIssue: 169

View File

@@ -1,11 +0,0 @@
schema: 1
page: archicrat-ia/chapitre-3
paras:
p-1-60c7ea48:
refs:
- url: https://gitea.archicratie.trans-hands.synology.me
label: Gitea
kind: (livre / article / vidéo / site / autre) Site
ts: 2026-03-02T20:01:55.858Z
fromIssue: 172
# testB: hotpatch-auto gate proof

View File

@@ -26,5 +26,5 @@ paras:
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
révolutions industrielles"
credit: ""
ts: 2026-02-26T13:17:41.286Z
ts: 2026-02-26T13:15:14.817Z
fromIssue: 129

View File

@@ -1,19 +0,0 @@
schema: 1
page: archicrat-ia/chapitre-4
paras:
p-11-67c14c09:
media:
- type: image
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2026-02-16_a_13.07.35.png
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
révolutions industrielles"
credit: ""
ts: 2026-02-26T13:17:41.286Z
fromIssue: 129
- type: image
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2025-05-05_a_19.20.40.png
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
révolutions industrielles"
credit: ""
ts: 2026-02-27T09:17:04.386Z
fromIssue: 127

View File

@@ -3,11 +3,14 @@ import { getCollection } from "astro:content";
const { currentSlug } = Astro.props;
// ✅ Après migration : TOC = collection "archicrat-ia"
const entries = (await getCollection("archicrat-ia"))
const entries = (await getCollection("archicratie"))
.filter((e) => e.slug.startsWith("archicrat-ia/"))
.sort((a, b) => (a.data.order ?? 0) - (b.data.order ?? 0));
const href = (slug) => `/archicrat-ia/${slug}/`;
// ✅ On route lEssai-thèse sur /archicrat-ia/<slug-sans-prefix>/
// (Astro trailingSlash = always → on garde le "/" final)
const strip = (s) => String(s || "").replace(/^archicrat-ia\//, "");
const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
---
<nav class="toc-global" aria-label="Table des matières — ArchiCraT-IA">
@@ -160,4 +163,4 @@ const href = (slug) => `/archicrat-ia/${slug}/`;
const active = document.querySelector(".toc-global .toc-item.is-active");
if (active) active.scrollIntoView({ block: "nearest" });
})();
</script>
</script>

View File

@@ -14,7 +14,7 @@ source:
---
Ce chapitre se tient à un point nodal de notre essai-thèse : il ouvre un espace dexploration systématique des formes conceptuelles et philosophiques à travers lesquelles le pouvoir se configure comme régime de régulation. Il ne sagit pas ici de revenir une nouvelle fois sur les fondements de lautorité, ni dinterroger la légitimité politique au sens classique du terme, ni même denquêter sur la genèse des institutions. Lambition est autre, structurelle, transversale, morphologique, elle tentera darpenter, à même les dispositifs, les pensées, les théorisations et les expériences, les modalités différentiées par lesquelles sinstaurent, séprouvent et se disputent les formes de régulation du vivre-ensemble.
Dès lors, ce chapitre ne postule aucun fondement, ne cherche aucun point dorigine, ne prétend restituer aucune ontologie stable du politique. Ce quil donne à lire, cest une cartographie dynamique des régimes de régulation, traversée par des formes irréductibles, non homogènes, souvent conflictuelles, parfois incompatibles, mais toutes pensées comme des configurations singulières, et souvent complémentaires.
Dès lors, ce chapitre ne postule aucun fondement, ne cherche aucun point dorigine, ne prétend restituer aucune ontologie stable du politique. Ce quil donne à lire, cest une cartographie dynamique des régimes de régulation, traversée par des formes irréductibles, non homogènes, souvent conflictuelles, parfois incompatibles, mais toutes pensées comme des configurations singulières.
Ainsi, loin dêtre une galerie illustrative de théories politiques juxtaposées, le chapitre sagence comme une topologie critique, une plongée stratigraphique dans les scènes où sarticule la régulation — entendue ici non comme stabilisation externe ou ajustement technico-fonctionnel, mais comme dispositif instituant, tension structurante, scène traversée de conflictualité et dexigence normative. Car à nos yeux, la régulation nest pas ce qui vient après le pouvoir, elle en est la forme même constitutive — son architecture, son rythme, son épaisseur. Elle est ce par quoi le pouvoir ne se contente pas dêtre exercé, mais sinstitue, se justifie, se dispute, se recompose.

View File

@@ -2,7 +2,7 @@ import { defineCollection, z } from "astro:content";
const linkSchema = z.object({
type: z.enum(["definition", "appui", "transposition"]),
target: z.string().min(1),
target: z.string().min(1), // URL interne (ex: /glossaire/archicratie/) ou slug
note: z.string().optional()
});
@@ -12,6 +12,7 @@ const baseTextSchema = z.object({
version: z.string().min(1),
concepts: z.array(z.string().min(1)).default([]),
links: z.array(linkSchema).default([]),
// optionnels mais utiles dès maintenant
order: z.number().int().nonnegative().optional(),
summary: z.string().optional()
});
@@ -49,31 +50,20 @@ const atlas = defineCollection({
})
});
// ✅ NOUVELLE collection : archicrat-ia (Essai-thèse)
// NOTE : on accepte temporairement edition/status "archicratie/modele_sociopolitique"
// si tes MDX nont pas encore été normalisés.
// Quand tu voudras "strict", on passera à edition="archicrat-ia" status="essai_these"
// + update frontmatter des 7 fichiers.
const archicratIa = defineCollection({
type: "content",
schema: baseTextSchema.extend({
edition: z.union([z.literal("archicrat-ia"), z.literal("archicratie")]),
status: z.union([z.literal("essai_these"), z.literal("modele_sociopolitique")])
})
});
// Glossaire (référentiel terminologique)
const glossaire = defineCollection({
type: "content",
schema: z.object({
title: z.string().min(1),
term: z.string().min(1),
title: z.string().min(1), // Titre public (souvent identique au terme)
term: z.string().min(1), // Terme canonique
aliases: z.array(z.string().min(1)).default([]),
edition: z.literal("glossaire"),
status: z.literal("referentiel"),
version: z.string().min(1),
// Micro-définition affichable en popover (courte, stable)
definitionShort: z.string().min(1),
concepts: z.array(z.string().min(1)).default([]),
// Liens typés (vers ouvrages ou autres termes)
links: z.array(linkSchema).default([])
})
});
@@ -83,8 +73,5 @@ export const collections = {
archicratie,
ia,
glossaire,
atlas,
// ⚠️ clé avec tiret => doit être quotée
"archicrat-ia": archicratIa
};
atlas
};

View File

@@ -1,80 +1,23 @@
// src/pages/annotations-index.json.ts
import type { APIRoute } from "astro";
import fs from "node:fs/promises";
import path from "node:path";
import YAML from "yaml";
import * as fs from "node:fs/promises";
import * as path from "node:path";
import { parse as parseYAML } from "yaml";
const CWD = process.cwd();
const ANNO_ROOT = path.join(CWD, "src", "annotations");
const ANNO_DIR = path.join(CWD, "src", "annotations");
const isObj = (x: any) => !!x && typeof x === "object" && !Array.isArray(x);
const isArr = (x: any) => Array.isArray(x);
// Strict en CI (ou override explicite)
const STRICT =
process.env.ANNOTATIONS_STRICT === "1" ||
process.env.CI === "1" ||
process.env.CI === "true";
function normPath(s: string) {
return String(s || "").replace(/\\/g, "/").replace(/^\/+|\/+$/g, "");
}
function paraNum(pid: string) {
const m = String(pid).match(/^p-(\d+)-/i);
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
}
function toIso(v: any) {
if (v instanceof Date) return v.toISOString();
return typeof v === "string" ? v : "";
}
function stableSortByTs(arr: any[]) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(toIso(a?.ts)) || 0;
const tb = Date.parse(toIso(b?.ts)) || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function keyMedia(x: any) { return String(x?.src || ""); }
function keyRef(x: any) {
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
}
function keyComment(x: any) { return String(x?.text || "").trim(); }
function uniqUnion(dst: any[], src: any[], keyFn: (x:any)=>string) {
const out = isArr(dst) ? [...dst] : [];
const seen = new Set(out.map((x) => keyFn(x)));
for (const it of (isArr(src) ? src : [])) {
const k = keyFn(it);
if (!k) continue;
if (!seen.has(k)) { seen.add(k); out.push(it); }
}
return out;
}
function deepMergeEntry(dst: any, src: any) {
if (!isObj(dst) || !isObj(src)) return;
for (const [k, v] of Object.entries(src)) {
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
if (isObj(v)) {
if (!isObj((dst as any)[k])) (dst as any)[k] = {};
deepMergeEntry((dst as any)[k], v);
continue;
}
if (isArr(v)) {
const cur = isArr((dst as any)[k]) ? (dst as any)[k] : [];
const seen = new Set(cur.map((x:any) => JSON.stringify(x)));
const out = [...cur];
for (const it of v) {
const s = JSON.stringify(it);
if (!seen.has(s)) { seen.add(s); out.push(it); }
}
(dst as any)[k] = out;
continue;
}
if (!(k in (dst as any)) || (dst as any)[k] == null || (dst as any)[k] === "") (dst as any)[k] = v;
async function exists(p: string): Promise<boolean> {
try {
await fs.access(p);
return true;
} catch {
return false;
}
}
@@ -83,98 +26,154 @@ async function walk(dir: string): Promise<string[]> {
const ents = await fs.readdir(dir, { withFileTypes: true });
for (const e of ents) {
const p = path.join(dir, e.name);
if (e.isDirectory()) out.push(...await walk(p));
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
if (e.isDirectory()) out.push(...(await walk(p)));
else out.push(p);
}
return out;
}
function inferExpected(relNoExt: string) {
const parts = relNoExt.split("/").filter(Boolean);
const last = parts.at(-1) || "";
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
const paraId = isShard ? last : null;
return { isShard, pageKey, paraId };
function isPlainObject(x: unknown): x is Record<string, unknown> {
return !!x && typeof x === "object" && !Array.isArray(x);
}
export const GET: APIRoute = async () => {
const pages: Record<string, { paras: Record<string, any> }> = {};
const errors: Array<{ file: string; error: string }> = [];
function normalizePageKey(s: unknown): string {
return String(s ?? "")
.replace(/^\/+/, "")
.replace(/\/+$/, "")
.trim();
}
let files: string[] = [];
try {
files = await walk(ANNO_ROOT);
} catch (e: any) {
throw new Error(`Missing annotations root: ${ANNO_ROOT} (${e?.message || e})`);
function inferPageKeyFromFile(inDirAbs: string, fileAbs: string): string {
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
return rel.replace(/\.(ya?ml|json)$/i, "");
}
function parseDoc(raw: string, fileAbs: string): unknown {
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
return parseYAML(raw);
}
function hardFailOrCollect(errors: string[], msg: string): void {
if (STRICT) throw new Error(msg);
errors.push(msg);
}
function sanitizeEntry(
fileRel: string,
paraId: string,
entry: unknown,
errors: string[]
): Record<string, unknown> {
if (entry == null) return {};
if (!isPlainObject(entry)) {
hardFailOrCollect(errors, `${fileRel}: paras.${paraId} must be an object`);
return {};
}
for (const fp of files) {
const rel = normPath(path.relative(ANNO_ROOT, fp));
const relNoExt = rel.replace(/\.ya?ml$/i, "");
const { isShard, pageKey, paraId } = inferExpected(relNoExt);
const e: Record<string, unknown> = { ...entry };
try {
const raw = await fs.readFile(fp, "utf8");
const doc = YAML.parse(raw) || {};
const arrayFields = [
"refs",
"authors",
"quotes",
"media",
"comments_editorial",
] as const;
if (!isObj(doc) || doc.schema !== 1) continue;
const docPage = normPath(doc.page || "");
if (docPage && docPage !== pageKey) {
throw new Error(`page mismatch (page="${doc.page}" vs path="${pageKey}")`);
}
if (!doc.page) doc.page = pageKey;
if (!isObj(doc.paras)) throw new Error(`missing object key "paras"`);
const pg = pages[pageKey] ??= { paras: {} };
if (isShard) {
if (!paraId) throw new Error("internal: missing paraId");
if (!(paraId in doc.paras)) {
throw new Error(`shard mismatch: file must contain paras["${paraId}"]`);
}
// ✅ invariant aligné avec build-annotations-index
const keys = Object.keys(doc.paras).map(String);
if (!(keys.length === 1 && keys[0] === paraId)) {
throw new Error(`shard invariant violated: shard must contain ONLY paras["${paraId}"] (got: ${keys.join(", ")})`);
}
const entry = doc.paras[paraId];
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
stableSortByTs(pg.paras[paraId].media);
stableSortByTs(pg.paras[paraId].refs);
stableSortByTs(pg.paras[paraId].comments_editorial);
} else {
for (const [pid, entry] of Object.entries(doc.paras)) {
const p = String(pid);
if (!isObj(pg.paras[p])) pg.paras[p] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
stableSortByTs(pg.paras[p].media);
stableSortByTs(pg.paras[p].refs);
stableSortByTs(pg.paras[p].comments_editorial);
}
}
} catch (e: any) {
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
for (const k of arrayFields) {
if (e[k] == null) continue;
if (!Array.isArray(e[k])) {
errors.push(`${fileRel}: paras.${paraId}.${k} must be an array (coerced to [])`);
e[k] = [];
}
}
for (const [pk, pg] of Object.entries(pages)) {
const keys = Object.keys(pg.paras || {});
keys.sort((a, b) => {
const ia = paraNum(a);
const ib = paraNum(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
return e;
}
export const GET: APIRoute = async () => {
if (!(await exists(ANNO_DIR))) {
const out = {
schema: 1,
generatedAt: new Date().toISOString(),
pages: {},
stats: { pages: 0, paras: 0, errors: 0 },
errors: [] as string[],
};
return new Response(JSON.stringify(out), {
headers: {
"Content-Type": "application/json; charset=utf-8",
"Cache-Control": "no-store",
},
});
const next: Record<string, any> = {};
for (const k of keys) next[k] = pg.paras[k];
pg.paras = next;
}
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
const pages: Record<string, { paras: Record<string, Record<string, unknown>> }> =
Object.create(null);
const errors: string[] = [];
let paraCount = 0;
for (const f of files) {
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
const pageKey = normalizePageKey(inferPageKeyFromFile(ANNO_DIR, f));
if (!pageKey) {
hardFailOrCollect(errors, `${fileRel}: cannot infer page key`);
continue;
}
let doc: unknown;
try {
const raw = await fs.readFile(f, "utf8");
doc = parseDoc(raw, f);
} catch (e) {
hardFailOrCollect(errors, `${fileRel}: parse failed: ${String((e as any)?.message ?? e)}`);
continue;
}
if (!isPlainObject(doc) || (doc as any).schema !== 1) {
hardFailOrCollect(errors, `${fileRel}: schema must be 1`);
continue;
}
if ((doc as any).page != null) {
const declared = normalizePageKey((doc as any).page);
if (declared !== pageKey) {
hardFailOrCollect(
errors,
`${fileRel}: page mismatch (page="${declared}" vs path="${pageKey}")`
);
}
}
const parasAny = (doc as any).paras;
if (!isPlainObject(parasAny)) {
hardFailOrCollect(errors, `${fileRel}: missing object key "paras"`);
continue;
}
if (pages[pageKey]) {
hardFailOrCollect(errors, `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
continue;
}
const parasOut: Record<string, Record<string, unknown>> = Object.create(null);
for (const [paraId, entry] of Object.entries(parasAny)) {
if (!/^p-\d+-/i.test(paraId)) {
hardFailOrCollect(errors, `${fileRel}: invalid para id "${paraId}"`);
continue;
}
parasOut[paraId] = sanitizeEntry(fileRel, paraId, entry, errors);
}
pages[pageKey] = { paras: parasOut };
paraCount += Object.keys(parasOut).length;
}
const out = {
@@ -183,17 +182,16 @@ export const GET: APIRoute = async () => {
pages,
stats: {
pages: Object.keys(pages).length,
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
paras: paraCount,
errors: errors.length,
},
errors,
};
if (errors.length) {
throw new Error(`${errors[0].file}: ${errors[0].error}`);
}
return new Response(JSON.stringify(out), {
headers: { "Content-Type": "application/json; charset=utf-8" },
headers: {
"Content-Type": "application/json; charset=utf-8",
"Cache-Control": "no-store",
},
});
};
};

View File

@@ -5,11 +5,12 @@ import EditionToc from "../../components/EditionToc.astro";
import LocalToc from "../../components/LocalToc.astro";
export async function getStaticPaths() {
// ✅ Après migration : plus de filtre par prefix, on prend toute la collection
const entries = await getCollection("archicrat-ia");
const entries = (await getCollection("archicratie"))
.filter((e) => e.slug.startsWith("archicrat-ia/"));
return entries.map((entry) => ({
params: { slug: entry.slug },
// ✅ inline : jamais de helper externe (évite "stripPrefix is not defined")
params: { slug: entry.slug.replace(/^archicrat-ia\//, "") },
props: { entry },
}));
}
@@ -34,4 +35,4 @@ const { Content, headings } = await entry.render();
<h1>{entry.data.title}</h1>
<Content />
</EditionLayout>
</EditionLayout>

View File

@@ -2,12 +2,13 @@
import SiteLayout from "../../layouts/SiteLayout.astro";
import { getCollection } from "astro:content";
// ✅ Après migration physique : collection = "archicrat-ia", slug = "chapitre-3" (sans prefix)
const entries = await getCollection("archicrat-ia");
const entries = (await getCollection("archicratie"))
.filter((e) => e.slug.startsWith("archicrat-ia/"));
entries.sort((a, b) => (a.data.order ?? 9999) - (b.data.order ?? 9999));
const href = (slug) => `/archicrat-ia/${slug}/`;
const strip = (slug) => slug.replace(/^archicrat-ia\//, "");
const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
---
<SiteLayout title="Essai-thèse — ArchiCraT-IA">
@@ -18,4 +19,4 @@ const href = (slug) => `/archicrat-ia/${slug}/`;
<li><a href={href(e.slug)}>{e.data.title}</a></li>
))}
</ul>
</SiteLayout>
</SiteLayout>