Compare commits

...

31 Commits

Author SHA1 Message Date
497bddd05d ci: fix anno apply/reject JSON parsing + hard gates
All checks were successful
SMOKE / smoke (push) Successful in 8s
CI / build-and-anchors (push) Successful in 48s
CI / build-and-anchors (pull_request) Successful in 40s
2026-03-02 12:47:37 +01:00
7c8e49c1a9 ci: stabilize anno apply/reject (event parsing + strict gating)
Some checks failed
CI / build-and-anchors (push) Successful in 52s
CI / build-and-anchors (pull_request) Successful in 41s
SMOKE / smoke (push) Failing after 12m55s
2026-03-02 11:10:53 +01:00
901d28b89b Merge pull request 'deploy: pin nas-deploy image by digest' (#159) from chore/pin-nas-deploy-image-digest into main
All checks were successful
SMOKE / smoke (push) Successful in 12s
CI / build-and-anchors (push) Successful in 36s
Deploy staging+live (annotations) / deploy (push) Successful in 45s
Reviewed-on: #159
2026-02-28 20:24:46 +01:00
43e2862c89 deploy: pin nas-deploy image by digest
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 39s
CI / build-and-anchors (pull_request) Successful in 37s
2026-02-28 20:22:17 +01:00
73fb38c4d1 Merge pull request 'deploy: use prebaked nas-deploy image; remove apt-get step' (#158) from chore/deploy-use-prebaked-image into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 36s
Deploy staging+live (annotations) / deploy (push) Successful in 39s
Reviewed-on: #158
2026-02-28 19:51:27 +01:00
a81d206aba deploy: use prebaked nas-deploy image; remove apt-get step
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 39s
CI / build-and-anchors (pull_request) Successful in 36s
2026-02-28 19:49:25 +01:00
9801ea3cea Merge pull request 'ci: lock deploy workflow to nas-deploy runner' (#157) from chore/lock-nas-deploy into main
All checks were successful
SMOKE / smoke (push) Successful in 16s
CI / build-and-anchors (push) Successful in 44s
Deploy staging+live (annotations) / deploy (push) Successful in 3m40s
Reviewed-on: #157
2026-02-28 17:45:35 +01:00
c11189fe11 ci: lock deploy workflow to nas-deploy runner
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 40s
2026-02-28 17:43:19 +01:00
b47edb24cf Merge pull request 'ci: fix YAML newlines after runs-on (mac-ci)' (#156) from chore/fix-yaml-runs-on-newlines into main
Some checks failed
CI / build-and-anchors (push) Successful in 41s
SMOKE / smoke (push) Successful in 3s
Deploy staging+live (annotations) / deploy (push) Has been cancelled
Reviewed-on: #156
2026-02-28 15:54:48 +01:00
be191b09a0 ci: fix YAML newlines after runs-on (mac-ci)
All checks were successful
SMOKE / smoke (push) Successful in 25s
CI / build-and-anchors (push) Successful in 1m6s
CI / build-and-anchors (pull_request) Successful in 37s
2026-02-28 15:50:48 +01:00
e06587478d Merge pull request 'ci: route CI/bots to mac runner; keep deploy on NAS' (#155) from chore/route-ci-to-mac-runner into main
All checks were successful
Deploy staging+live (annotations) / deploy (push) Successful in 1m58s
Reviewed-on: #155
2026-02-28 15:29:35 +01:00
402ffb04cd ci: route CI/bots to mac runner; keep deploy on NAS 2026-02-28 15:28:49 +01:00
1cbfc02670 Merge pull request 'ci: harden anno-reject (dispatch + conflict guard) and keep deploy concurrency safe' (#153) from chore/fix-anno-reject-close-guard into main
All checks were successful
CI / build-and-anchors (push) Successful in 2m49s
Deploy staging+live (annotations) / deploy (push) Successful in 3m7s
SMOKE / smoke (push) Successful in 19s
Reviewed-on: #153
2026-02-28 10:05:26 +01:00
28d2fbbd2f ci: harden anno-reject (dispatch + conflict guard) and keep deploy concurrency safe
All checks were successful
CI / build-and-anchors (push) Successful in 2m21s
SMOKE / smoke (push) Successful in 19s
2026-02-28 09:55:37 +01:00
225368a952 Merge pull request 'ci: anno-apply gate on supported types (skip proposer)' (#149) from chore/fix-anno-apply-gate-types into main
All checks were successful
Deploy staging+live (annotations) / deploy (push) Successful in 2m12s
CI / build-and-anchors (push) Successful in 1m45s
SMOKE / smoke (push) Successful in 17s
Reviewed-on: #149
2026-02-27 20:06:57 +01:00
3574695041 ci: anno-apply gate on supported types (skip proposer)
All checks were successful
CI / build-and-anchors (push) Successful in 1m52s
SMOKE / smoke (push) Successful in 15s
2026-02-27 20:03:16 +01:00
ea68025a1d Merge pull request 'anno: apply ticket #144' (#148) from bot/anno-144-20260227-124313 into main
All checks were successful
CI / build-and-anchors (push) Successful in 2m5s
Deploy staging+live (annotations) / deploy (push) Successful in 2m8s
SMOKE / smoke (push) Successful in 13s
Reviewed-on: #148
2026-02-27 15:49:11 +01:00
3a08698003 Merge pull request 'anno: apply ticket #143' (#147) from bot/anno-143-20260227-124037 into main
Some checks failed
CI / build-and-anchors (push) Has been cancelled
Deploy staging+live (annotations) / deploy (push) Has been cancelled
SMOKE / smoke (push) Has been cancelled
Reviewed-on: #147
2026-02-27 15:48:28 +01:00
3d583608c2 Merge pull request 'anno: apply ticket #142' (#146) from bot/anno-142-20260227-123430 into main
Some checks failed
CI / build-and-anchors (push) Has been cancelled
Deploy staging+live (annotations) / deploy (push) Has been cancelled
SMOKE / smoke (push) Successful in 21s
Reviewed-on: #146
2026-02-27 15:47:44 +01:00
archicratie-bot
01ae95ab43 anno: apply ticket #144 (archicrat-ia/chapitre-3#p-0-ace27175 type/media)
All checks were successful
CI / build-and-anchors (push) Successful in 2m0s
SMOKE / smoke (push) Successful in 18s
2026-02-27 12:43:16 +00:00
archicratie-bot
0d5821c640 anno: apply ticket #143 (archicrat-ia/chapitre-1#p-1-8a6c18bf type/comment)
All checks were successful
CI / build-and-anchors (push) Successful in 1m54s
SMOKE / smoke (push) Successful in 16s
2026-02-27 12:40:39 +00:00
archicratie-bot
2bcea39558 anno: apply ticket #142 (archicrat-ia/chapitre-1#p-0-8d27a7f5 type/reference)
All checks were successful
CI / build-and-anchors (push) Successful in 1m53s
SMOKE / smoke (push) Successful in 14s
2026-02-27 12:34:32 +00:00
af85970d4a Merge pull request 'chore/fix-build-annotations-index-shards' (#141) from chore/fix-build-annotations-index-shards into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m45s
Deploy staging+live (annotations) / deploy (push) Successful in 1m53s
SMOKE / smoke (push) Successful in 13s
Reviewed-on: #141
2026-02-27 13:21:43 +01:00
210f621487 ci: support shard annotations in checks + endpoint (pageKey inference)
All checks were successful
CI / build-and-anchors (push) Successful in 1m58s
SMOKE / smoke (push) Successful in 13s
2026-02-27 13:13:31 +01:00
8ad960dc69 anno: build-annotations-index supports shard annotations
Some checks failed
SMOKE / smoke (push) Successful in 16s
CI / build-and-anchors (push) Failing after 1m48s
2026-02-27 12:27:35 +01:00
d45a8b285f anno: support shard annotations in annotations-index endpoint
Some checks failed
CI / build-and-anchors (push) Failing after 1m45s
SMOKE / smoke (push) Successful in 15s
2026-02-27 12:09:40 +01:00
b6e04a9138 anno: robust verify for para-index (normalize page keys)
Some checks failed
SMOKE / smoke (push) Successful in 33s
CI / build-and-anchors (push) Failing after 1m53s
2026-02-27 10:20:49 +01:00
dcf1fc2d0b anno: apply ticket #127 (archicrat-ia/chapitre-4#p-11-67c14c09 type/media) 2026-02-27 10:17:06 +01:00
41b0517c6c Merge pull request 'ci: deploy hotpatch-only + full rebuild warmup' (#139) from chore/fix-deploy-hotpatch-stable into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m50s
Deploy staging+live (annotations) / deploy (push) Successful in 2m12s
SMOKE / smoke (push) Successful in 19s
Reviewed-on: #139
2026-02-26 22:00:20 +01:00
d40f24e92d Merge pull request 'ci: fix hotpatch (yaml datetime -> json safe)' (#138) from chore/fix-hotpatch-json into main
Some checks failed
CI / build-and-anchors (push) Successful in 1m54s
Deploy staging+live (annotations) / deploy (push) Failing after 5m35s
SMOKE / smoke (push) Successful in 16s
Reviewed-on: #138
2026-02-26 21:32:56 +01:00
a5d68d6a7e Merge pull request 'ci: fix deploy workflow (warmup + hotpatch)' (#137) from chore/fix-deploy-warmup into main
Some checks failed
CI / build-and-anchors (push) Successful in 1m38s
Deploy staging+live (annotations) / deploy (push) Failing after 8m16s
SMOKE / smoke (push) Successful in 15s
Reviewed-on: #137
2026-02-26 21:09:21 +01:00
17 changed files with 940 additions and 472 deletions

View File

@@ -16,9 +16,13 @@ defaults:
run: run:
shell: bash shell: bash
concurrency:
group: anno-apply-${{ github.event.issue.number || inputs.issue || 'manual' }}
cancel-in-progress: true
jobs: jobs:
apply-approved: apply-approved:
runs-on: ubuntu-latest runs-on: mac-ci
container: container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
@@ -29,7 +33,6 @@ jobs:
git --version git --version
node --version node --version
npm --version npm --version
npm ping --registry=https://registry.npmjs.org
- name: Derive context (event.json / workflow_dispatch) - name: Derive context (event.json / workflow_dispatch)
env: env:
@@ -49,18 +52,15 @@ jobs:
const cloneUrl = const cloneUrl =
repoObj?.clone_url || repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : ""); (repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json"); if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
let owner = let owner =
repoObj?.owner?.login || repoObj?.owner?.login ||
repoObj?.owner?.username || repoObj?.owner?.username ||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : ""); (repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
let repo = let repo =
repoObj?.name || repoObj?.name ||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : ""); (repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
if (!owner || !repo) { if (!owner || !repo) {
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/); const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; } if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
@@ -73,15 +73,15 @@ jobs:
ev?.issue?.number || ev?.issue?.number ||
ev?.issue?.index || ev?.issue?.index ||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0); (process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
if (!issueNumber || !Number.isFinite(Number(issueNumber))) { if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
throw new Error("No issue number in event.json or workflow_dispatch input"); throw new Error("No issue number in event.json or workflow_dispatch input");
} }
// label triggered (best effort; may be missing depending on Gitea payload)
const labelName = const labelName =
ev?.label?.name || ev?.label?.name ||
ev?.label || (typeof ev?.label === "string" ? ev.label : "") ||
"workflow_dispatch"; "";
const u = new URL(cloneUrl); const u = new URL(cloneUrl);
const origin = u.origin; const origin = u.origin;
@@ -90,7 +90,7 @@ jobs:
? String(process.env.FORGE_API).trim().replace(/\/+$/,"") ? String(process.env.FORGE_API).trim().replace(/\/+$/,"")
: origin; : origin;
function sh(s){ return JSON.stringify(String(s)); } function sh(s){ return JSON.stringify(String(s ?? "")); }
process.stdout.write([ process.stdout.write([
`CLONE_URL=${sh(cloneUrl)}`, `CLONE_URL=${sh(cloneUrl)}`,
@@ -99,23 +99,144 @@ jobs:
`DEFAULT_BRANCH=${sh(defaultBranch)}`, `DEFAULT_BRANCH=${sh(defaultBranch)}`,
`ISSUE_NUMBER=${sh(issueNumber)}`, `ISSUE_NUMBER=${sh(issueNumber)}`,
`LABEL_NAME=${sh(labelName)}`, `LABEL_NAME=${sh(labelName)}`,
`API_BASE=${sh(apiBase)}` `API_BASE=${sh(apiBase)}`,
// init safe defaults (avoid "unbound variable" cascades)
`SKIP=0`,
`SKIP_REASON=${sh("")}`,
`ISSUE_TYPE=${sh("")}`,
`ISSUE_TITLE=${sh("")}`,
`APPLY_RC=${sh("")}`,
`NOOP=0`,
`BRANCH=${sh("")}`,
`END_SHA=${sh("")}`
].join("\n") + "\n"); ].join("\n") + "\n");
NODE NODE
echo "✅ context:" echo "✅ context:"
sed -n '1,120p' /tmp/anno.env sed -n '1,160p' /tmp/anno.env
- name: Gate on label state/approved - name: Gate fast (only if label is state/approved or workflow_dispatch)
env:
INPUT_ISSUE: ${{ inputs.issue }}
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/anno.env source /tmp/anno.env
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
echo " label=$LABEL_NAME => skip" # workflow_dispatch => allow
echo "SKIP=1" >> /tmp/anno.env if [[ -n "${INPUT_ISSUE:-}" ]]; then
echo "✅ workflow_dispatch => proceed"
exit 0 exit 0
fi fi
echo "✅ proceed (issue=$ISSUE_NUMBER)"
# if payload provides the triggering label, we can skip without API call
if [[ -n "${LABEL_NAME:-}" && "$LABEL_NAME" != "state/approved" ]]; then
echo " triggering label='$LABEL_NAME' (not state/approved) => skip"
echo "SKIP=1" >> /tmp/anno.env
echo "SKIP_REASON=\"trigger_label_not_approved\"" >> /tmp/anno.env
exit 0
fi
echo " label unknown or approved => continue to API gating"
- name: Fetch issue + gate on state/approved + gate on Type (skip Proposer)
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
> /tmp/issue.json
node --input-type=module - <<'NODE' >> /tmp/anno.env
import fs from "node:fs";
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json","utf8"));
const title = String(issue.title || "");
const body = String(issue.body || "").replace(/\r\n/g, "\n");
const labels = Array.isArray(issue.labels)
? issue.labels.map(l => String(l?.name || "")).filter(Boolean)
: [];
const hasApproved = labels.includes("state/approved");
function pickLine(key) {
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = body.match(re);
return m ? m[1].trim() : "";
}
const typeRaw = pickLine("Type");
const type = String(typeRaw || "").trim().toLowerCase();
const allowed = new Set(["type/media","type/reference","type/comment"]);
const proposer = new Set(["type/correction","type/fact-check"]);
const out = [];
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
// main gate: only act if state/approved is actually present on the issue
if (!hasApproved) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("no_state_approved")}`);
} else if (!type) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
} else if (allowed.has(type)) {
// proceed
} else if (proposer.has(type)) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("proposer_type:"+type)}`);
} else {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("unsupported_type:"+type)}`);
}
process.stdout.write(out.join("\n") + "\n");
NODE
echo "✅ issue gating:"
grep -E '^(ISSUE_TYPE|SKIP|SKIP_REASON)=' /tmp/anno.env || true
- name: Comment issue if skipped (only when state/approved was present)
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
[[ "${SKIP:-0}" == "1" ]] || exit 0
[[ "${SKIP_REASON:-}" != "no_state_approved" ]] || exit 0 # do not comment on normal label churn
test -n "${FORGE_TOKEN:-}" || exit 0
REASON="${SKIP_REASON:-}"
TYPE="${ISSUE_TYPE:-}"
if [[ "$REASON" == proposer_type:* ]]; then
MSG=" Ticket #${ISSUE_NUMBER} détecté comme **Proposer** (${TYPE}).\n\n- Ce type est **traité manuellement par les editors** (correction/fact-check + cat/*).\n- Le bot n'applique **jamais** Proposer.\n\n✅ Action : traitement éditorial manuel."
elif [[ "$REASON" == unsupported_type:* ]]; then
MSG=" Ticket #${ISSUE_NUMBER} ignoré : Type non supporté par le bot (${TYPE}).\n\nTypes supportés : type/media, type/reference, type/comment.\n✅ Action : traitement manuel si nécessaire."
else
MSG=" Ticket #${ISSUE_NUMBER} ignoré : champ 'Type:' manquant ou illisible.\n\n✅ Action : corriger le ticket (Type: type/media|type/reference|type/comment) ou traiter manuellement."
fi
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
- name: Checkout default branch - name: Checkout default branch
run: | run: |
@@ -135,7 +256,7 @@ jobs:
set -euo pipefail set -euo pipefail
source /tmp/anno.env source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; } [[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
npm ci npm ci --no-audit --no-fund
- name: Check apply script exists - name: Check apply script exists
run: | run: |
@@ -154,7 +275,7 @@ jobs:
source /tmp/anno.env source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; } [[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
npm run build:clean npm run build
test -f dist/para-index.json || { test -f dist/para-index.json || {
echo "❌ missing dist/para-index.json after build" echo "❌ missing dist/para-index.json after build"
@@ -182,7 +303,7 @@ jobs:
START_SHA="$(git rev-parse HEAD)" START_SHA="$(git rev-parse HEAD)"
TS="$(date -u +%Y%m%d-%H%M%S)" TS="$(date -u +%Y%m%d-%H%M%S)"
BR="bot/anno-${ISSUE_NUMBER}-${TS}" BR="bot/anno-${ISSUE_NUMBER}-${TS}"
echo "BRANCH=$BR" >> /tmp/anno.env echo "BRANCH=\"$BR\"" >> /tmp/anno.env
git checkout -b "$BR" git checkout -b "$BR"
export FORGE_API="$API_BASE" export FORGE_API="$API_BASE"
@@ -195,12 +316,13 @@ jobs:
RC=$? RC=$?
set -e set -e
echo "APPLY_RC=$RC" >> /tmp/anno.env echo "APPLY_RC=\"$RC\"" >> /tmp/anno.env
echo "== apply log (tail) ==" echo "== apply log (tail) =="
tail -n 180 "$LOG" || true tail -n 180 "$LOG" || true
END_SHA="$(git rev-parse HEAD)" END_SHA="$(git rev-parse HEAD)"
echo "END_SHA=\"$END_SHA\"" >> /tmp/anno.env
if [[ "$RC" -ne 0 ]]; then if [[ "$RC" -ne 0 ]]; then
echo "NOOP=0" >> /tmp/anno.env echo "NOOP=0" >> /tmp/anno.env
@@ -211,7 +333,6 @@ jobs:
echo "NOOP=1" >> /tmp/anno.env echo "NOOP=1" >> /tmp/anno.env
else else
echo "NOOP=0" >> /tmp/anno.env echo "NOOP=0" >> /tmp/anno.env
echo "END_SHA=$END_SHA" >> /tmp/anno.env
fi fi
- name: Comment issue on failure (strict/verify/etc) - name: Comment issue on failure (strict/verify/etc)
@@ -220,18 +341,20 @@ jobs:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }} FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/anno.env source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; } [[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
RC="${APPLY_RC:-0}" RC="${APPLY_RC:-}"
if [[ "$RC" == "0" ]]; then [[ -n "$RC" ]] || { echo " apply not executed"; exit 0; }
echo " no failure detected" [[ "$RC" != "0" ]] || { echo " no failure detected"; exit 0; }
exit 0
if [[ -f /tmp/apply.log ]]; then
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
else
BODY="(no apply log found)"
fi fi
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n" MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")" PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \ curl -fsS -X POST \
@@ -246,10 +369,11 @@ jobs:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }} FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/anno.env source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0 [[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0 RC="${APPLY_RC:-}"
[[ "$RC" == "0" ]] || exit 0
[[ "${NOOP:-0}" == "1" ]] || exit 0 [[ "${NOOP:-0}" == "1" ]] || exit 0
MSG=" Ticket #${ISSUE_NUMBER} : rien à appliquer (déjà présent / dédupliqué)." MSG=" Ticket #${ISSUE_NUMBER} : rien à appliquer (déjà présent / dédupliqué)."
@@ -267,11 +391,12 @@ jobs:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }} FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/anno.env source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0 [[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || { echo " apply failed -> skip push"; exit 0; } [[ "${APPLY_RC:-}" == "0" ]] || { echo " apply not ok -> skip push"; exit 0; }
[[ "${NOOP:-0}" == "0" ]] || { echo " no-op -> skip push"; exit 0; } [[ "${NOOP:-0}" == "0" ]] || { echo " no-op -> skip push"; exit 0; }
[[ -n "${BRANCH:-}" ]] || { echo " missing BRANCH -> skip push"; exit 0; }
AUTH_URL="$(node --input-type=module -e ' AUTH_URL="$(node --input-type=module -e '
const [clone, tok] = process.argv.slice(1); const [clone, tok] = process.argv.slice(1);
@@ -290,11 +415,13 @@ jobs:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }} FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/anno.env source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0 [[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || { echo " apply failed -> skip PR"; exit 0; } [[ "${APPLY_RC:-}" == "0" ]] || { echo " apply not ok -> skip PR"; exit 0; }
[[ "${NOOP:-0}" == "0" ]] || { echo " no-op -> skip PR"; exit 0; } [[ "${NOOP:-0}" == "0" ]] || { echo " no-op -> skip PR"; exit 0; }
[[ -n "${BRANCH:-}" ]] || { echo " missing BRANCH -> skip PR"; exit 0; }
[[ -n "${END_SHA:-}" ]] || { echo " missing END_SHA -> skip PR"; exit 0; }
PR_TITLE="anno: apply ticket #${ISSUE_NUMBER}" PR_TITLE="anno: apply ticket #${ISSUE_NUMBER}"
PR_BODY="PR auto depuis ticket #${ISSUE_NUMBER} (state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA}\n\nMerge si CI OK." PR_BODY="PR auto depuis ticket #${ISSUE_NUMBER} (state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA}\n\nMerge si CI OK."
@@ -333,9 +460,12 @@ jobs:
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/anno.env || true source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; } [[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
RC="${APPLY_RC:-0}" RC="${APPLY_RC:-}"
[[ -n "$RC" ]] || { echo "❌ apply did not run"; exit 2; }
if [[ "$RC" != "0" ]]; then if [[ "$RC" != "0" ]]; then
echo "❌ apply failed (rc=$RC)" echo "❌ apply failed (rc=$RC)"
exit "$RC" exit "$RC"

View File

@@ -1,8 +1,13 @@
name: Anno Reject name: Anno Reject (close issue)
on: on:
issues: issues:
types: [labeled] types: [labeled]
workflow_dispatch:
inputs:
issue:
description: "Issue number to reject/close"
required: true
env: env:
NODE_OPTIONS: --dns-result-order=ipv4first NODE_OPTIONS: --dns-result-order=ipv4first
@@ -11,14 +16,26 @@ defaults:
run: run:
shell: bash shell: bash
concurrency:
group: anno-reject-${{ github.event.issue.number || inputs.issue || 'manual' }}
cancel-in-progress: true
jobs: jobs:
reject: reject:
runs-on: ubuntu-latest runs-on: mac-ci
container: container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps: steps:
- name: Derive context - name: Tools sanity
run: |
set -euo pipefail
node --version
- name: Derive context (event.json / workflow_dispatch)
env:
INPUT_ISSUE: ${{ inputs.issue }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
run: | run: |
set -euo pipefail set -euo pipefail
export EVENT_JSON="/var/run/act/workflow/event.json" export EVENT_JSON="/var/run/act/workflow/event.json"
@@ -29,58 +46,129 @@ jobs:
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8")); const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const repoObj = ev?.repository || {}; const repoObj = ev?.repository || {};
const cloneUrl = const cloneUrl =
repoObj?.clone_url || repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : ""); (repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
if (!cloneUrl) throw new Error("No repository url");
let owner = let owner =
repoObj?.owner?.login || repoObj?.owner?.login ||
repoObj?.owner?.username || repoObj?.owner?.username ||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : ""); (repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
let repo = let repo =
repoObj?.name || repoObj?.name ||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : ""); (repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
if (!owner || !repo) { if ((!owner || !repo) && cloneUrl) {
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/); const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; } if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
} }
if (!owner || !repo) throw new Error("Cannot infer owner/repo"); if (!owner || !repo) throw new Error("Cannot infer owner/repo");
const issueNumber = ev?.issue?.number || ev?.issue?.index; const issueNumber =
if (!issueNumber) throw new Error("No issue number"); ev?.issue?.number ||
ev?.issue?.index ||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
const labelName = ev?.label?.name || ev?.label || ""; if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
const u = new URL(cloneUrl); throw new Error("No issue number in event.json or workflow_dispatch input");
}
const labelName =
ev?.label?.name ||
(typeof ev?.label === "string" ? ev.label : "") ||
"";
let apiBase = "";
if (process.env.FORGE_API && String(process.env.FORGE_API).trim()) {
apiBase = String(process.env.FORGE_API).trim().replace(/\/+$/,"");
} else if (cloneUrl) {
apiBase = new URL(cloneUrl).origin;
} else {
apiBase = "";
}
function sh(s){ return JSON.stringify(String(s ?? "")); }
function sh(s){ return JSON.stringify(String(s)); }
process.stdout.write([ process.stdout.write([
`OWNER=${sh(owner)}`, `OWNER=${sh(owner)}`,
`REPO=${sh(repo)}`, `REPO=${sh(repo)}`,
`ISSUE_NUMBER=${sh(issueNumber)}`, `ISSUE_NUMBER=${sh(issueNumber)}`,
`LABEL_NAME=${sh(labelName)}`, `LABEL_NAME=${sh(labelName)}`,
`API_BASE=${sh(u.origin)}` `API_BASE=${sh(apiBase)}`,
`SKIP=0`
].join("\n") + "\n"); ].join("\n") + "\n");
NODE NODE
- name: Gate on label state/rejected echo "✅ context:"
sed -n '1,160p' /tmp/reject.env
- name: Gate fast (only if label is state/rejected or workflow_dispatch)
env:
INPUT_ISSUE: ${{ inputs.issue }}
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/reject.env source /tmp/reject.env
if [[ "$LABEL_NAME" != "state/rejected" ]]; then
echo " label=$LABEL_NAME => skip" if [[ -n "${INPUT_ISSUE:-}" ]]; then
echo "✅ workflow_dispatch => proceed"
exit 0 exit 0
fi fi
echo "✅ reject issue=$ISSUE_NUMBER"
- name: Comment + close issue if [[ -n "${LABEL_NAME:-}" && "$LABEL_NAME" != "state/rejected" ]]; then
echo " triggering label='$LABEL_NAME' (not state/rejected) => skip"
echo "SKIP=1" >> /tmp/reject.env
exit 0
fi
echo " label unknown or rejected => continue to API gating"
- name: Comment + close (only if issue has state/rejected; conflict-guard approved+rejected)
env: env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }} FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/reject.env source /tmp/reject.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; } test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
test -n "${API_BASE:-}" || { echo "❌ Missing API_BASE"; exit 1; }
curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
> /tmp/issue.json
node --input-type=module - <<'NODE' > /tmp/reject.flags
import fs from "node:fs";
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json","utf8"));
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
const hasApproved = labels.includes("state/approved");
const hasRejected = labels.includes("state/rejected");
process.stdout.write(`HAS_APPROVED=${hasApproved ? "1":"0"}\nHAS_REJECTED=${hasRejected ? "1":"0"}\n`);
NODE
source /tmp/reject.flags
# If issue does not actually have state/rejected -> do nothing (normal label churn)
if [[ "${HAS_REJECTED:-0}" != "1" ]]; then
echo " issue has no state/rejected => skip"
exit 0
fi
if [[ "${HAS_APPROVED:-0}" == "1" && "${HAS_REJECTED:-0}" == "1" ]]; then
MSG="⚠️ Conflit d'état sur le ticket #${ISSUE_NUMBER} : labels **state/approved** et **state/rejected** présents.\n\n➡ Action manuelle requise : retirer l'un des deux labels avant relance."
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
echo " conflict => stop"
exit 0
fi
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)." MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")" PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
@@ -95,4 +183,6 @@ jobs:
-H "Authorization: token $FORGE_TOKEN" \ -H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \ -H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \ "$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
--data-binary '{"state":"closed"}' --data-binary '{"state":"closed"}'
echo "✅ rejected+closed"

View File

@@ -6,7 +6,7 @@ on:
jobs: jobs:
label: label:
runs-on: ubuntu-latest runs-on: mac-ci
steps: steps:
- name: Apply labels from Type/State/Category - name: Apply labels from Type/State/Category
env: env:

View File

@@ -3,7 +3,7 @@ name: CI
on: on:
push: push:
pull_request: pull_request:
branches: [master] branches: [main]
workflow_dispatch: workflow_dispatch:
env: env:
@@ -15,7 +15,7 @@ defaults:
jobs: jobs:
build-and-anchors: build-and-anchors:
runs-on: ubuntu-latest runs-on: mac-ci
container: container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm

View File

@@ -26,9 +26,9 @@ concurrency:
jobs: jobs:
deploy: deploy:
runs-on: ubuntu-latest runs-on: nas-deploy
container: container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm image: localhost:5000/archicratie/nas-deploy-node22@sha256:fefa8bb307005cebec07796661ab25528dc319c33a8f1e480e1d66f90cd5cff6
steps: steps:
- name: Tools sanity - name: Tools sanity
@@ -127,25 +127,17 @@ jobs:
echo " no annotations/media change -> skip deploy" echo " no annotations/media change -> skip deploy"
fi fi
- name: Install docker client + docker compose plugin (v2) + python yaml - name: Toolchain sanity + resolve COMPOSE_PROJECT_NAME
run: | run: |
set -euo pipefail set -euo pipefail
source /tmp/deploy.env source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; } [[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
apt-get -o Acquire::Retries=5 -o Acquire::ForceIPv4=true update # tools are prebaked in the image
apt-get install -y --no-install-recommends ca-certificates curl docker.io python3 python3-yaml git --version
rm -rf /var/lib/apt/lists/*
mkdir -p /usr/local/lib/docker/cli-plugins
curl -fsSL \
"https://github.com/docker/compose/releases/download/v${COMPOSE_VERSION}/docker-compose-linux-x86_64" \
-o /usr/local/lib/docker/cli-plugins/docker-compose
chmod +x /usr/local/lib/docker/cli-plugins/docker-compose
docker version docker version
docker compose version docker compose version
python3 --version python3 -c 'import yaml; print("PyYAML OK")'
# Reuse existing compose project name if containers already exist # Reuse existing compose project name if containers already exist
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)" PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"

View File

@@ -3,7 +3,7 @@ on: [push, workflow_dispatch]
jobs: jobs:
smoke: smoke:
runs-on: ubuntu-latest runs-on: mac-ci
steps: steps:
- run: node -v && npm -v - run: node -v && npm -v
- run: echo "runner OK" - run: echo "runner OK"

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 61 KiB

View File

@@ -1,12 +1,17 @@
#!/usr/bin/env node #!/usr/bin/env node
// scripts/apply-annotation-ticket.mjs // scripts/apply-annotation-ticket.mjs
//
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers: // Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
// //
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe) // ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file> // ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
// //
// Robuste, idempotent, non destructif. // Compat rétro : lit (si présent) l'ancien monolithe:
// src/annotations/<oeuvre>/<chapitre>.yml
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
// pour permettre une migration progressive sans perte.
// //
// Robuste, idempotent, non destructif.
// DRY RUN si --dry-run // DRY RUN si --dry-run
// Options: --dry-run --no-download --verify --strict --commit --close // Options: --dry-run --no-download --verify --strict --commit --close
// //
@@ -49,8 +54,8 @@ Flags:
--dry-run : n'écrit rien (affiche un aperçu) --dry-run : n'écrit rien (affiche un aperçu)
--no-download : n'essaie pas de télécharger les pièces jointes (media) --no-download : n'essaie pas de télécharger les pièces jointes (media)
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline) --verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
--strict : refuse si URL ref invalide (http/https) OU caption media vide --strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
--commit : git add + git commit (le script commit dans la branche courante) --commit : git add + git commit (commit dans la branche courante)
--close : ferme le ticket (nécessite --commit) --close : ferme le ticket (nécessite --commit)
Env requis: Env requis:
@@ -191,6 +196,7 @@ function normalizeChemin(chemin) {
} }
function normalizePageKeyFromChemin(chemin) { function normalizePageKeyFromChemin(chemin) {
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, ""); return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
} }
@@ -226,90 +232,156 @@ function isHttpUrl(u) {
} }
} }
/* ------------------------------ para-index (verify + sort) ------------------------------ */ function stableSortByTs(arr) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(a?.ts || "") || 0;
const tb = Date.parse(b?.ts || "") || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function normPage(s) {
let x = String(s || "").trim();
if (!x) return "";
// retire origin si on a une URL complète
x = x.replace(/^https?:\/\/[^/]+/i, "");
// enlève query/hash
x = x.split("#")[0].split("?")[0];
// enlève index.html
x = x.replace(/index\.html$/i, "");
// enlève slashs de bord
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
return x;
}
/* ------------------------------ para-index (verify + order) ------------------------------ */
async function loadParaOrderFromDist(pageKey) { async function loadParaOrderFromDist(pageKey) {
const distIdx = path.join(CWD, "dist", "para-index.json"); const distIdx = path.join(CWD, "dist", "para-index.json");
if (!(await exists(distIdx))) return null; if (!(await exists(distIdx))) return null;
let j; let j;
try { try {
j = JSON.parse(await fs.readFile(distIdx, "utf8")); j = JSON.parse(await fs.readFile(distIdx, "utf8"));
} catch { } catch {
return null;
}
const want = normPage(pageKey);
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
const items = Array.isArray(j?.items)
? j.items
: Array.isArray(j?.index?.items)
? j.index.items
: null;
if (items) {
const ids = [];
for (const it of items) {
// page peut être dans plein de clés différentes
const pageCand = normPage(
it?.page ??
it?.pageKey ??
it?.path ??
it?.route ??
it?.href ??
it?.url ??
""
);
// id peut être dans plein de clés différentes
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
if (id.startsWith("#")) id = id.slice(1);
if (pageCand === want && id) ids.push(id);
}
if (ids.length) return ids;
}
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
if (j?.byId && typeof j.byId === "object") {
const ids = Object.keys(j.byId)
.filter((id) => {
const meta = j.byId[id] || {};
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
return pageCand === want;
});
if (ids.length) {
ids.sort((a, b) => {
const ia = paraIndexFromId(a);
const ib = paraIndexFromId(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
return ids;
}
}
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
if (j?.pages && typeof j.pages === "object") {
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
const keys = Object.keys(j.pages);
const hit = keys.find((k) => normPage(k) === want);
if (hit) {
const pg = j.pages[hit];
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
}
}
return null; return null;
} }
// Support several shapes: async function tryVerifyAnchor(pageKey, anchorId) {
// A) { items:[{id,page,...}, ...] } // 1) dist/para-index.json : order complet si possible
if (Array.isArray(j?.items)) { const order = await loadParaOrderFromDist(pageKey);
const ids = []; if (order) return order.includes(anchorId);
for (const it of j.items) {
const p = String(it?.page || it?.pageKey || ""); // 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
const id = String(it?.id || it?.paraId || ""); const distIdx = path.join(CWD, "dist", "para-index.json");
if (p === pageKey && id) ids.push(id); if (await exists(distIdx)) {
} try {
if (ids.length) return ids; const raw = await fs.readFile(distIdx, "utf8");
} if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
return true;
// B) { byId: { "p-...": { page:"archicrat-ia/chapitre-4", ... }, ... } }
if (j?.byId && typeof j.byId === "object") {
// cannot rebuild full order; but can verify existence
// return a pseudo-order map from known ids sorted by p-<n>- then alpha
const ids = Object.keys(j.byId).filter((id) => String(j.byId[id]?.page || "") === pageKey);
if (ids.length) {
ids.sort((a, b) => {
const ia = paraIndexFromId(a);
const ib = paraIndexFromId(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
return ids;
}
}
// C) { pages: { "archicrat-ia/chapitre-4": { ids:[...]} } }
if (j?.pages && typeof j.pages === "object") {
const pg = j.pages[pageKey];
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
}
return null;
}
async function tryVerifyAnchor(pageKey, anchorId) {
// 1) dist/para-index.json
const order = await loadParaOrderFromDist(pageKey);
if (order) return order.includes(anchorId);
// 2) tests/anchors-baseline.json (fallback)
const base = path.join(CWD, "tests", "anchors-baseline.json");
if (await exists(base)) {
try {
const j = JSON.parse(await fs.readFile(base, "utf8"));
const candidates = [];
if (j?.pages && typeof j.pages === "object") {
for (const [k, v] of Object.entries(j.pages)) {
if (!Array.isArray(v)) continue;
if (String(k).includes(pageKey)) candidates.push(...v);
} }
} catch {
// ignore
} }
if (Array.isArray(j?.entries)) {
for (const it of j.entries) {
const p = String(it?.page || "");
const ids = it?.ids;
if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids);
}
}
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
} catch {
// ignore
} }
// 2) tests/anchors-baseline.json (fallback)
const base = path.join(CWD, "tests", "anchors-baseline.json");
if (await exists(base)) {
try {
const j = JSON.parse(await fs.readFile(base, "utf8"));
const candidates = [];
if (j?.pages && typeof j.pages === "object") {
for (const [k, v] of Object.entries(j.pages)) {
if (!Array.isArray(v)) continue;
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
}
}
if (Array.isArray(j?.entries)) {
for (const it of j.entries) {
const p = String(it?.page || "");
const ids = it?.ids;
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
}
}
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
} catch {
// ignore
}
}
return null; // cannot verify
} }
return null; // cannot verify
}
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */ /* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
function keyMedia(x) { function keyMedia(x) {
@@ -360,7 +432,6 @@ function deepMergeEntry(dst, src) {
} }
if (Array.isArray(v)) { if (Array.isArray(v)) {
// fallback: union by JSON string
const cur = Array.isArray(dst[k]) ? dst[k] : []; const cur = Array.isArray(dst[k]) ? dst[k] : [];
const seen = new Set(cur.map((x) => JSON.stringify(x))); const seen = new Set(cur.map((x) => JSON.stringify(x)));
const out = [...cur]; const out = [...cur];
@@ -382,16 +453,6 @@ function deepMergeEntry(dst, src) {
} }
} }
function stableSortByTs(arr) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(a?.ts || "") || 0;
const tb = Date.parse(b?.ts || "") || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
/* ----------------------------- annotations I/O ----------------------------- */ /* ----------------------------- annotations I/O ----------------------------- */
async function loadAnnoDocYaml(fileAbs, pageKey) { async function loadAnnoDocYaml(fileAbs, pageKey) {
@@ -424,9 +485,7 @@ async function loadAnnoDocYaml(fileAbs, pageKey) {
function sortParasObject(paras, order) { function sortParasObject(paras, order) {
const keys = Object.keys(paras || {}); const keys = Object.keys(paras || {});
const idx = new Map(); const idx = new Map();
if (Array.isArray(order)) { if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
order.forEach((id, i) => idx.set(String(id), i));
}
keys.sort((a, b) => { keys.sort((a, b) => {
const ha = idx.has(a); const ha = idx.has(a);
@@ -448,9 +507,9 @@ function sortParasObject(paras, order) {
async function saveAnnoDocYaml(fileAbs, doc, order = null) { async function saveAnnoDocYaml(fileAbs, doc, order = null) {
await fs.mkdir(path.dirname(fileAbs), { recursive: true }); await fs.mkdir(path.dirname(fileAbs), { recursive: true });
doc.paras = sortParasObject(doc.paras, order); doc.paras = sortParasObject(doc.paras, order);
// also sort known lists inside each para for stable diffs
for (const e of Object.values(doc.paras || {})) { for (const e of Object.values(doc.paras || {})) {
if (!isPlainObject(e)) continue; if (!isPlainObject(e)) continue;
stableSortByTs(e.media); stableSortByTs(e.media);
@@ -632,7 +691,6 @@ async function main() {
const pageKey = normalizePageKeyFromChemin(chemin); const pageKey = normalizePageKeyFromChemin(chemin);
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2); assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
// para order (used for verify + sorting)
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null; const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
if (DO_VERIFY) { if (DO_VERIFY) {
@@ -641,46 +699,43 @@ async function main() {
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 }); throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
} }
if (ok === null) { if (ok === null) {
if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`), { __exitCode: 2 }); if (STRICT) {
throw Object.assign(
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
{ __exitCode: 2 }
);
}
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue."); console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
} }
} }
// ✅ SHARD FILE: src/annotations/<pageKey>/<paraId>.yml // ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
const annoShardFileAbs = path.join(ANNO_DIR, pageKey, `${ancre}.yml`); const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
const annoShardFileRel = path.relative(CWD, annoShardFileAbs).replace(/\\/g, "/"); const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
// legacy (read-only, used as base to avoid losing previously stored data) // legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
const annoLegacyFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`); const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
console.log("✅ Parsed:", { console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
type,
chemin,
ancre: `#${ancre}`,
pageKey,
annoFile: annoShardFileRel,
});
// load shard doc // load shard doc
const doc = await loadAnnoDocYaml(annoShardFileAbs, pageKey); const doc = await loadAnnoDocYaml(shardAbs, pageKey);
// merge legacy para into shard as base (non destructive)
if (await exists(annoLegacyFileAbs)) {
try {
const legacy = await loadAnnoDocYaml(annoLegacyFileAbs, pageKey);
const legacyEntry = legacy?.paras?.[ancre];
if (isPlainObject(legacyEntry)) {
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
deepMergeEntry(doc.paras[ancre], legacyEntry);
}
} catch {
// ignore legacy parse issues (shard still works)
}
}
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {}; if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
const entry = doc.paras[ancre]; const entry = doc.paras[ancre];
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
if (await exists(legacyAbs)) {
try {
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
const legacyEntry = legacy?.paras?.[ancre];
if (isPlainObject(legacyEntry)) {
deepMergeEntry(entry, legacyEntry);
}
} catch {
// ignore legacy parse issues; shard still applies new data
}
}
const touchedFiles = []; const touchedFiles = [];
const notes = []; const notes = [];
let changed = false; let changed = false;
@@ -696,10 +751,13 @@ async function main() {
const before = entry.comments_editorial.length; const before = entry.comments_editorial.length;
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment); entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
changed = changed || entry.comments_editorial.length !== before; if (entry.comments_editorial.length !== before) {
changed = true;
notes.push(`+ comment added (len=${text.length})`);
} else {
notes.push(`~ comment already present (dedup)`);
}
stableSortByTs(entry.comments_editorial); stableSortByTs(entry.comments_editorial);
notes.push(changed ? `+ comment added (len=${text.length})` : `~ comment already present (dedup)`);
} }
else if (type === "type/reference") { else if (type === "type/reference") {
@@ -722,15 +780,24 @@ async function main() {
const before = entry.refs.length; const before = entry.refs.length;
entry.refs = uniqUnion(entry.refs, [item], keyRef); entry.refs = uniqUnion(entry.refs, [item], keyRef);
changed = changed || entry.refs.length !== before; if (entry.refs.length !== before) {
changed = true;
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
} else {
notes.push(`~ reference already present (dedup)`);
}
stableSortByTs(entry.refs); stableSortByTs(entry.refs);
notes.push(changed ? `+ reference added (${item.url ? "url" : "label"})` : `~ reference already present (dedup)`);
} }
else if (type === "type/media") { else if (type === "type/media") {
if (!Array.isArray(entry.media)) entry.media = []; if (!Array.isArray(entry.media)) entry.media = [];
const caption = (title || "").trim();
if (STRICT && !caption) {
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
}
const captionFinal = caption || ".";
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }); const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
if (!atts.length) notes.push("! no assets found (nothing to download)."); if (!atts.length) notes.push("! no assets found (nothing to download).");
@@ -739,13 +806,7 @@ async function main() {
const dl = a?.browser_download_url || a?.download_url || ""; const dl = a?.browser_download_url || a?.download_url || "";
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; } if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
const caption = (title || "").trim(); const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
if (STRICT && !caption) {
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
}
const captionFinal = caption || ".";
const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre);
const destAbs = path.join(mediaDirAbs, name); const destAbs = path.join(mediaDirAbs, name);
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/"); const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
@@ -790,7 +851,7 @@ async function main() {
if (DRY_RUN) { if (DRY_RUN) {
console.log("\n--- DRY RUN (no write) ---"); console.log("\n--- DRY RUN (no write) ---");
console.log(`Would update: ${annoShardFileRel}`); console.log(`Would update: ${shardRel}`);
for (const n of notes) console.log(" ", n); for (const n of notes) console.log(" ", n);
console.log("\nExcerpt (resulting entry):"); console.log("\nExcerpt (resulting entry):");
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd()); console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
@@ -798,10 +859,10 @@ async function main() {
return; return;
} }
await saveAnnoDocYaml(annoShardFileAbs, doc, paraOrder); await saveAnnoDocYaml(shardAbs, doc, paraOrder);
touchedFiles.unshift(annoShardFileRel); touchedFiles.unshift(shardRel);
console.log(`✅ Updated: ${annoShardFileRel}`); console.log(`✅ Updated: ${shardRel}`);
for (const n of notes) console.log(" ", n); for (const n of notes) console.log(" ", n);
if (DO_COMMIT) { if (DO_COMMIT) {

View File

@@ -1,28 +1,106 @@
#!/usr/bin/env node
// scripts/build-annotations-index.mjs // scripts/build-annotations-index.mjs
// Construit dist/annotations-index.json à partir de src/annotations/**/*.yml
// Supporte:
// - monolith : src/annotations/<pageKey>.yml
// - shard : src/annotations/<pageKey>/<paraId>.yml (paraId = p-<n>-...)
// Invariants:
// - doc.schema === 1
// - doc.page (si présent) == pageKey déduit du chemin
// - shard: doc.paras doit contenir EXACTEMENT la clé paraId (sinon fail)
//
// Deep-merge non destructif (media/refs/comments dédupliqués), tri stable.
import fs from "node:fs/promises"; import fs from "node:fs/promises";
import path from "node:path"; import path from "node:path";
import YAML from "yaml"; import YAML from "yaml";
function parseArgs(argv) { const ROOT = process.cwd();
const out = { const ANNO_ROOT = path.join(ROOT, "src", "annotations");
inDir: "src/annotations", const DIST_DIR = path.join(ROOT, "dist");
outFile: "dist/annotations-index.json", const OUT = path.join(DIST_DIR, "annotations-index.json");
};
for (let i = 0; i < argv.length; i++) { function assert(cond, msg) {
const a = argv[i]; if (!cond) throw new Error(msg);
}
if (a === "--in" && argv[i + 1]) out.inDir = argv[++i]; function isObj(x) {
else if (a.startsWith("--in=")) out.inDir = a.slice("--in=".length); return !!x && typeof x === "object" && !Array.isArray(x);
}
function isArr(x) {
return Array.isArray(x);
}
if (a === "--out" && argv[i + 1]) out.outFile = argv[++i]; function normPath(s) {
else if (a.startsWith("--out=")) out.outFile = a.slice("--out=".length); return String(s || "")
.replace(/\\/g, "/")
.replace(/^\/+|\/+$/g, "");
}
function paraNum(pid) {
const m = String(pid).match(/^p-(\d+)-/i);
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
}
function stableSortByTs(arr) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(a?.ts || "") || 0;
const tb = Date.parse(b?.ts || "") || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function keyMedia(x) { return String(x?.src || ""); }
function keyRef(x) {
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
}
function keyComment(x) { return String(x?.text || "").trim(); }
function uniqUnion(dst, src, keyFn) {
const out = isArr(dst) ? [...dst] : [];
const seen = new Set(out.map((x) => keyFn(x)));
for (const it of (isArr(src) ? src : [])) {
const k = keyFn(it);
if (!k) continue;
if (!seen.has(k)) {
seen.add(k);
out.push(it);
}
} }
return out; return out;
} }
async function exists(p) { function deepMergeEntry(dst, src) {
try { await fs.access(p); return true; } catch { return false; } if (!isObj(dst) || !isObj(src)) return;
for (const [k, v] of Object.entries(src)) {
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
if (isObj(v)) {
if (!isObj(dst[k])) dst[k] = {};
deepMergeEntry(dst[k], v);
continue;
}
if (isArr(v)) {
const cur = isArr(dst[k]) ? dst[k] : [];
const seen = new Set(cur.map((x) => JSON.stringify(x)));
const out = [...cur];
for (const it of v) {
const s = JSON.stringify(it);
if (!seen.has(s)) { seen.add(s); out.push(it); }
}
dst[k] = out;
continue;
}
// scalar: set only if missing/empty
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
}
} }
async function walk(dir) { async function walk(dir) {
@@ -30,111 +108,116 @@ async function walk(dir) {
const ents = await fs.readdir(dir, { withFileTypes: true }); const ents = await fs.readdir(dir, { withFileTypes: true });
for (const e of ents) { for (const e of ents) {
const p = path.join(dir, e.name); const p = path.join(dir, e.name);
if (e.isDirectory()) out.push(...(await walk(p))); if (e.isDirectory()) out.push(...await walk(p));
else out.push(p); else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
} }
return out; return out;
} }
function inferPageKeyFromFile(inDirAbs, fileAbs) { function inferExpectedFromRel(relNoExt) {
// src/annotations/<page>.yml -> "<page>" const parts = relNoExt.split("/").filter(Boolean);
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/"); const last = parts.at(-1) || "";
return rel.replace(/\.(ya?ml|json)$/i, ""); const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
const paraId = isShard ? last : null;
return { isShard, pageKey, paraId };
} }
function assert(cond, msg) { function validateAndNormalizeDoc(doc, relFile, expectedPageKey, expectedParaId) {
if (!cond) throw new Error(msg); assert(isObj(doc), `${relFile}: doc must be an object`);
} assert(doc.schema === 1, `${relFile}: schema must be 1`);
assert(isObj(doc.paras), `${relFile}: missing object key "paras"`);
function isPlainObject(x) { const gotPage = doc.page != null ? normPath(doc.page) : "";
return !!x && typeof x === "object" && !Array.isArray(x); const expPage = normPath(expectedPageKey);
}
function normalizePageKey(s) { if (gotPage) {
// pas de / en tête/fin
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
}
function validateAndNormalizeDoc(doc, pageKey, fileRel) {
assert(isPlainObject(doc), `${fileRel}: document must be an object`);
assert(doc.schema === 1, `${fileRel}: schema must be 1`);
if (doc.page != null) {
assert( assert(
normalizePageKey(doc.page) === pageKey, gotPage === expPage,
`${fileRel}: page mismatch (page="${doc.page}" vs path="${pageKey}")` `${relFile}: page mismatch (page="${doc.page}" vs path="${expectedPageKey}")`
);
} else {
doc.page = expPage;
}
if (expectedParaId) {
const keys = Object.keys(doc.paras || {}).map(String);
assert(
keys.includes(expectedParaId),
`${relFile}: shard mismatch: must contain paras["${expectedParaId}"]`
);
assert(
keys.length === 1 && keys[0] === expectedParaId,
`${relFile}: shard invariant violated: shard file must contain ONLY paras["${expectedParaId}"] (got: ${keys.join(", ")})`
); );
} }
assert(isPlainObject(doc.paras), `${fileRel}: missing object key "paras"`);
const parasOut = Object.create(null); return doc;
for (const [paraId, entry] of Object.entries(doc.paras)) {
assert(/^p-\d+-/i.test(paraId), `${fileRel}: invalid para id "${paraId}"`);
// entry peut être vide, mais doit être un objet si présent
assert(entry == null || isPlainObject(entry), `${fileRel}: paras.${paraId} must be an object`);
const e = entry ? { ...entry } : {};
// Sanity checks (non destructifs : on nécrase pas, on vérifie juste les types)
if (e.refs != null) assert(Array.isArray(e.refs), `${fileRel}: paras.${paraId}.refs must be an array`);
if (e.authors != null) assert(Array.isArray(e.authors), `${fileRel}: paras.${paraId}.authors must be an array`);
if (e.quotes != null) assert(Array.isArray(e.quotes), `${fileRel}: paras.${paraId}.quotes must be an array`);
if (e.media != null) assert(Array.isArray(e.media), `${fileRel}: paras.${paraId}.media must be an array`);
if (e.comments_editorial != null) assert(Array.isArray(e.comments_editorial), `${fileRel}: paras.${paraId}.comments_editorial must be an array`);
parasOut[paraId] = e;
}
return parasOut;
}
async function readDoc(fileAbs) {
const raw = await fs.readFile(fileAbs, "utf8");
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
return YAML.parse(raw);
} }
async function main() { async function main() {
const { inDir, outFile } = parseArgs(process.argv.slice(2)); const pages = {};
const CWD = process.cwd(); const errors = [];
const inDirAbs = path.isAbsolute(inDir) ? inDir : path.join(CWD, inDir); await fs.mkdir(DIST_DIR, { recursive: true });
const outAbs = path.isAbsolute(outFile) ? outFile : path.join(CWD, outFile);
// antifragile const files = await walk(ANNO_ROOT);
if (!(await exists(inDirAbs))) {
console.log(` annotations-index: skip (input missing): ${inDir}`);
process.exit(0);
}
const files = (await walk(inDirAbs)).filter((p) => /\.(ya?ml|json)$/i.test(p)); for (const fp of files) {
if (!files.length) { const rel = normPath(path.relative(ANNO_ROOT, fp));
console.log(` annotations-index: skip (no .yml/.yaml/.json found in): ${inDir}`); const relNoExt = rel.replace(/\.ya?ml$/i, "");
process.exit(0); const { isShard, pageKey, paraId } = inferExpectedFromRel(relNoExt);
}
const pages = Object.create(null);
let paraCount = 0;
for (const f of files) {
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
const pageKey = normalizePageKey(inferPageKeyFromFile(inDirAbs, f));
assert(pageKey, `${fileRel}: cannot infer page key`);
let doc;
try { try {
doc = await readDoc(f); const raw = await fs.readFile(fp, "utf8");
const doc = YAML.parse(raw) || {};
if (!isObj(doc) || doc.schema !== 1) continue;
validateAndNormalizeDoc(
doc,
`src/annotations/${rel}`,
pageKey,
isShard ? paraId : null
);
const pg = (pages[pageKey] ??= { paras: {} });
if (isShard) {
const entry = doc.paras[paraId];
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
stableSortByTs(pg.paras[paraId].media);
stableSortByTs(pg.paras[paraId].refs);
stableSortByTs(pg.paras[paraId].comments_editorial);
} else {
for (const [pid, entry] of Object.entries(doc.paras || {})) {
const p = String(pid);
if (!isObj(pg.paras[p])) pg.paras[p] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
stableSortByTs(pg.paras[p].media);
stableSortByTs(pg.paras[p].refs);
stableSortByTs(pg.paras[p].comments_editorial);
}
}
} catch (e) { } catch (e) {
throw new Error(`${fileRel}: parse failed: ${String(e?.message ?? e)}`); errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
} }
}
const paras = validateAndNormalizeDoc(doc, pageKey, fileRel); for (const [pageKey, pg] of Object.entries(pages)) {
const keys = Object.keys(pg.paras || {});
// 1 fichier = 1 page (canon) keys.sort((a, b) => {
assert(!pages[pageKey], `${fileRel}: duplicate page "${pageKey}" (only one file per page)`); const ia = paraNum(a);
pages[pageKey] = { paras }; const ib = paraNum(b);
paraCount += Object.keys(paras).length; if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
const next = {};
for (const k of keys) next[k] = pg.paras[k];
pg.paras = next;
} }
const out = { const out = {
@@ -143,17 +226,21 @@ async function main() {
pages, pages,
stats: { stats: {
pages: Object.keys(pages).length, pages: Object.keys(pages).length,
paras: paraCount, paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
errors: errors.length,
}, },
errors,
}; };
await fs.mkdir(path.dirname(outAbs), { recursive: true }); if (errors.length) {
await fs.writeFile(outAbs, JSON.stringify(out), "utf8"); throw new Error(`${errors[0].file}: ${errors[0].error}`);
}
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> ${path.relative(CWD, outAbs)}`); await fs.writeFile(OUT, JSON.stringify(out), "utf8");
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> dist/annotations-index.json`);
} }
main().catch((e) => { main().catch((e) => {
console.error("FAIL: build-annotations-index crashed:", e); console.error(`FAIL: build-annotations-index crashed: ${e?.stack || e?.message || e}`);
process.exit(1); process.exit(1);
}); });

View File

@@ -48,6 +48,9 @@ async function main() {
let missing = 0; let missing = 0;
const notes = []; const notes = [];
// Optim: éviter de vérifier 100 fois le même fichier media
const seenMedia = new Set(); // src string
for (const f of files) { for (const f of files) {
const rel = path.relative(CWD, f).replace(/\\/g, "/"); const rel = path.relative(CWD, f).replace(/\\/g, "/");
const raw = await fs.readFile(f, "utf8"); const raw = await fs.readFile(f, "utf8");
@@ -70,6 +73,10 @@ async function main() {
const src = String(m?.src || ""); const src = String(m?.src || "");
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
// dédupe
if (seenMedia.has(src)) continue;
seenMedia.add(src);
checked++; checked++;
const p = toPublicPathFromUrl(src); const p = toPublicPathFromUrl(src);
if (!p) continue; if (!p) continue;
@@ -94,4 +101,4 @@ async function main() {
main().catch((e) => { main().catch((e) => {
console.error("FAIL: check-annotations-media crashed:", e); console.error("FAIL: check-annotations-media crashed:", e);
process.exit(1); process.exit(1);
}); });

View File

@@ -27,11 +27,6 @@ function escRe(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
} }
function inferPageKeyFromFile(fileAbs) {
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
return rel.replace(/\.(ya?ml|json)$/i, "");
}
function normalizePageKey(s) { function normalizePageKey(s) {
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, ""); return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
} }
@@ -40,6 +35,31 @@ function isPlainObject(x) {
return !!x && typeof x === "object" && !Array.isArray(x); return !!x && typeof x === "object" && !Array.isArray(x);
} }
function isParaId(s) {
return /^p-\d+-/i.test(String(s || ""));
}
/**
* Supporte:
* - monolith: src/annotations/<pageKey>.yml -> pageKey = rel sans ext
* - shard : src/annotations/<pageKey>/<paraId>.yml -> pageKey = dirname(rel), paraId = basename
*
* shard seulement si le fichier est dans un sous-dossier (anti cas pathologique).
*/
function inferFromFile(fileAbs) {
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
const relNoExt = rel.replace(/\.(ya?ml|json)$/i, "");
const parts = relNoExt.split("/").filter(Boolean);
const base = parts[parts.length - 1] || "";
const dirParts = parts.slice(0, -1);
const isShard = dirParts.length > 0 && isParaId(base);
const pageKey = isShard ? dirParts.join("/") : relNoExt;
const paraId = isShard ? base : "";
return { pageKey: normalizePageKey(pageKey), paraId };
}
async function loadAliases() { async function loadAliases() {
if (!(await exists(ALIASES_PATH))) return {}; if (!(await exists(ALIASES_PATH))) return {};
try { try {
@@ -83,7 +103,11 @@ async function main() {
const aliases = await loadAliases(); const aliases = await loadAliases();
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p)); const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
let pages = 0; // perf: cache HTML par page (shards = beaucoup de fichiers pour 1 page)
const htmlCache = new Map(); // pageKey -> html
const missingDistPage = new Set(); // pageKey
let pagesSeen = new Set();
let checked = 0; let checked = 0;
let failures = 0; let failures = 0;
const notes = []; const notes = [];
@@ -107,7 +131,7 @@ async function main() {
continue; continue;
} }
const pageKey = normalizePageKey(inferPageKeyFromFile(f)); const { pageKey, paraId: shardParaId } = inferFromFile(f);
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) { if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
failures++; failures++;
@@ -121,20 +145,44 @@ async function main() {
continue; continue;
} }
// shard invariant (fort) : doit contenir paras[paraId]
if (shardParaId) {
if (!Object.prototype.hasOwnProperty.call(doc.paras, shardParaId)) {
failures++;
notes.push(`- SHARD MISMATCH: ${rel} (expected paras["${shardParaId}"] present)`);
continue;
}
// si extras -> warning (non destructif)
const keys = Object.keys(doc.paras);
if (!(keys.length === 1 && keys[0] === shardParaId)) {
notes.push(`- WARN shard has extra paras: ${rel} (expected only "${shardParaId}", got ${keys.join(", ")})`);
}
}
pagesSeen.add(pageKey);
const distFile = path.join(DIST_DIR, pageKey, "index.html"); const distFile = path.join(DIST_DIR, pageKey, "index.html");
if (!(await exists(distFile))) { if (!(await exists(distFile))) {
failures++; if (!missingDistPage.has(pageKey)) {
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`); missingDistPage.add(pageKey);
failures++;
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
} else {
notes.push(`- WARN missing page already reported: dist/${pageKey}/index.html (from ${rel})`);
}
continue; continue;
} }
pages++; let html = htmlCache.get(pageKey);
const html = await fs.readFile(distFile, "utf8"); if (!html) {
html = await fs.readFile(distFile, "utf8");
htmlCache.set(pageKey, html);
}
for (const paraId of Object.keys(doc.paras)) { for (const paraId of Object.keys(doc.paras)) {
checked++; checked++;
if (!/^p-\d+-/i.test(paraId)) { if (!isParaId(paraId)) {
failures++; failures++;
notes.push(`- INVALID ID: ${rel} (${paraId})`); notes.push(`- INVALID ID: ${rel} (${paraId})`);
continue; continue;
@@ -158,6 +206,7 @@ async function main() {
} }
const warns = notes.filter((x) => x.startsWith("- WARN")); const warns = notes.filter((x) => x.startsWith("- WARN"));
const pages = pagesSeen.size;
if (failures > 0) { if (failures > 0) {
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`); console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
@@ -172,4 +221,4 @@ async function main() {
main().catch((e) => { main().catch((e) => {
console.error("FAIL: annotations check crashed:", e); console.error("FAIL: annotations check crashed:", e);
process.exit(1); process.exit(1);
}); });

View File

@@ -0,0 +1,10 @@
schema: 1
page: archicrat-ia/chapitre-1
paras:
p-0-8d27a7f5:
refs:
- url: https://auth.archicratie.trans-hands.synology.me/authenticated
label: Lien web
kind: (livre / article / vidéo / site / autre) Site
ts: 2026-02-27T12:34:31.704Z
fromIssue: 142

View File

@@ -0,0 +1,9 @@
schema: 1
page: archicrat-ia/chapitre-1
paras:
p-1-8a6c18bf:
comments_editorial:
- text: Yeaha
status: new
ts: 2026-02-27T12:40:39.462Z
fromIssue: 143

View File

@@ -0,0 +1,12 @@
schema: 1
page: archicrat-ia/chapitre-3
paras:
p-0-ace27175:
media:
- type: image
src: /media/archicrat-ia/chapitre-3/p-0-ace27175/Capture_d_e_cran_2025-05-05_a_19.20.40.png
caption: "[Media] p-0-ace27175 — Chapitre 3 — Philosophies du pouvoir et
archicration"
credit: ""
ts: 2026-02-27T12:43:14.259Z
fromIssue: 144

View File

@@ -0,0 +1,19 @@
schema: 1
page: archicrat-ia/chapitre-4
paras:
p-11-67c14c09:
media:
- type: image
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2026-02-16_a_13.07.35.png
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
révolutions industrielles"
credit: ""
ts: 2026-02-26T13:17:41.286Z
fromIssue: 129
- type: image
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2025-05-05_a_19.20.40.png
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
révolutions industrielles"
credit: ""
ts: 2026-02-27T09:17:04.386Z
fromIssue: 127

View File

@@ -1,23 +1,80 @@
// src/pages/annotations-index.json.ts
import type { APIRoute } from "astro"; import type { APIRoute } from "astro";
import * as fs from "node:fs/promises"; import fs from "node:fs/promises";
import * as path from "node:path"; import path from "node:path";
import { parse as parseYAML } from "yaml"; import YAML from "yaml";
const CWD = process.cwd(); const CWD = process.cwd();
const ANNO_DIR = path.join(CWD, "src", "annotations"); const ANNO_ROOT = path.join(CWD, "src", "annotations");
// Strict en CI (ou override explicite) const isObj = (x: any) => !!x && typeof x === "object" && !Array.isArray(x);
const STRICT = const isArr = (x: any) => Array.isArray(x);
process.env.ANNOTATIONS_STRICT === "1" ||
process.env.CI === "1" ||
process.env.CI === "true";
async function exists(p: string): Promise<boolean> { function normPath(s: string) {
try { return String(s || "").replace(/\\/g, "/").replace(/^\/+|\/+$/g, "");
await fs.access(p); }
return true; function paraNum(pid: string) {
} catch { const m = String(pid).match(/^p-(\d+)-/i);
return false; return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
}
function toIso(v: any) {
if (v instanceof Date) return v.toISOString();
return typeof v === "string" ? v : "";
}
function stableSortByTs(arr: any[]) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(toIso(a?.ts)) || 0;
const tb = Date.parse(toIso(b?.ts)) || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function keyMedia(x: any) { return String(x?.src || ""); }
function keyRef(x: any) {
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
}
function keyComment(x: any) { return String(x?.text || "").trim(); }
function uniqUnion(dst: any[], src: any[], keyFn: (x:any)=>string) {
const out = isArr(dst) ? [...dst] : [];
const seen = new Set(out.map((x) => keyFn(x)));
for (const it of (isArr(src) ? src : [])) {
const k = keyFn(it);
if (!k) continue;
if (!seen.has(k)) { seen.add(k); out.push(it); }
}
return out;
}
function deepMergeEntry(dst: any, src: any) {
if (!isObj(dst) || !isObj(src)) return;
for (const [k, v] of Object.entries(src)) {
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
if (isObj(v)) {
if (!isObj((dst as any)[k])) (dst as any)[k] = {};
deepMergeEntry((dst as any)[k], v);
continue;
}
if (isArr(v)) {
const cur = isArr((dst as any)[k]) ? (dst as any)[k] : [];
const seen = new Set(cur.map((x:any) => JSON.stringify(x)));
const out = [...cur];
for (const it of v) {
const s = JSON.stringify(it);
if (!seen.has(s)) { seen.add(s); out.push(it); }
}
(dst as any)[k] = out;
continue;
}
if (!(k in (dst as any)) || (dst as any)[k] == null || (dst as any)[k] === "") (dst as any)[k] = v;
} }
} }
@@ -26,154 +83,98 @@ async function walk(dir: string): Promise<string[]> {
const ents = await fs.readdir(dir, { withFileTypes: true }); const ents = await fs.readdir(dir, { withFileTypes: true });
for (const e of ents) { for (const e of ents) {
const p = path.join(dir, e.name); const p = path.join(dir, e.name);
if (e.isDirectory()) out.push(...(await walk(p))); if (e.isDirectory()) out.push(...await walk(p));
else out.push(p); else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
} }
return out; return out;
} }
function isPlainObject(x: unknown): x is Record<string, unknown> { function inferExpected(relNoExt: string) {
return !!x && typeof x === "object" && !Array.isArray(x); const parts = relNoExt.split("/").filter(Boolean);
} const last = parts.at(-1) || "";
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
function normalizePageKey(s: unknown): string { const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
return String(s ?? "") const paraId = isShard ? last : null;
.replace(/^\/+/, "") return { isShard, pageKey, paraId };
.replace(/\/+$/, "")
.trim();
}
function inferPageKeyFromFile(inDirAbs: string, fileAbs: string): string {
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
return rel.replace(/\.(ya?ml|json)$/i, "");
}
function parseDoc(raw: string, fileAbs: string): unknown {
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
return parseYAML(raw);
}
function hardFailOrCollect(errors: string[], msg: string): void {
if (STRICT) throw new Error(msg);
errors.push(msg);
}
function sanitizeEntry(
fileRel: string,
paraId: string,
entry: unknown,
errors: string[]
): Record<string, unknown> {
if (entry == null) return {};
if (!isPlainObject(entry)) {
hardFailOrCollect(errors, `${fileRel}: paras.${paraId} must be an object`);
return {};
}
const e: Record<string, unknown> = { ...entry };
const arrayFields = [
"refs",
"authors",
"quotes",
"media",
"comments_editorial",
] as const;
for (const k of arrayFields) {
if (e[k] == null) continue;
if (!Array.isArray(e[k])) {
errors.push(`${fileRel}: paras.${paraId}.${k} must be an array (coerced to [])`);
e[k] = [];
}
}
return e;
} }
export const GET: APIRoute = async () => { export const GET: APIRoute = async () => {
if (!(await exists(ANNO_DIR))) { const pages: Record<string, { paras: Record<string, any> }> = {};
const out = { const errors: Array<{ file: string; error: string }> = [];
schema: 1,
generatedAt: new Date().toISOString(),
pages: {},
stats: { pages: 0, paras: 0, errors: 0 },
errors: [] as string[],
};
return new Response(JSON.stringify(out), { let files: string[] = [];
headers: { try {
"Content-Type": "application/json; charset=utf-8", files = await walk(ANNO_ROOT);
"Cache-Control": "no-store", } catch (e: any) {
}, throw new Error(`Missing annotations root: ${ANNO_ROOT} (${e?.message || e})`);
});
} }
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p)); for (const fp of files) {
const rel = normPath(path.relative(ANNO_ROOT, fp));
const relNoExt = rel.replace(/\.ya?ml$/i, "");
const { isShard, pageKey, paraId } = inferExpected(relNoExt);
const pages: Record<string, { paras: Record<string, Record<string, unknown>> }> =
Object.create(null);
const errors: string[] = [];
let paraCount = 0;
for (const f of files) {
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
const pageKey = normalizePageKey(inferPageKeyFromFile(ANNO_DIR, f));
if (!pageKey) {
hardFailOrCollect(errors, `${fileRel}: cannot infer page key`);
continue;
}
let doc: unknown;
try { try {
const raw = await fs.readFile(f, "utf8"); const raw = await fs.readFile(fp, "utf8");
doc = parseDoc(raw, f); const doc = YAML.parse(raw) || {};
} catch (e) {
hardFailOrCollect(errors, `${fileRel}: parse failed: ${String((e as any)?.message ?? e)}`);
continue;
}
if (!isPlainObject(doc) || (doc as any).schema !== 1) { if (!isObj(doc) || doc.schema !== 1) continue;
hardFailOrCollect(errors, `${fileRel}: schema must be 1`);
continue;
}
if ((doc as any).page != null) { const docPage = normPath(doc.page || "");
const declared = normalizePageKey((doc as any).page); if (docPage && docPage !== pageKey) {
if (declared !== pageKey) { throw new Error(`page mismatch (page="${doc.page}" vs path="${pageKey}")`);
hardFailOrCollect(
errors,
`${fileRel}: page mismatch (page="${declared}" vs path="${pageKey}")`
);
} }
} if (!doc.page) doc.page = pageKey;
const parasAny = (doc as any).paras; if (!isObj(doc.paras)) throw new Error(`missing object key "paras"`);
if (!isPlainObject(parasAny)) {
hardFailOrCollect(errors, `${fileRel}: missing object key "paras"`);
continue;
}
if (pages[pageKey]) { const pg = pages[pageKey] ??= { paras: {} };
hardFailOrCollect(errors, `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
continue;
}
const parasOut: Record<string, Record<string, unknown>> = Object.create(null); if (isShard) {
if (!paraId) throw new Error("internal: missing paraId");
if (!(paraId in doc.paras)) {
throw new Error(`shard mismatch: file must contain paras["${paraId}"]`);
}
// ✅ invariant aligné avec build-annotations-index
const keys = Object.keys(doc.paras).map(String);
if (!(keys.length === 1 && keys[0] === paraId)) {
throw new Error(`shard invariant violated: shard must contain ONLY paras["${paraId}"] (got: ${keys.join(", ")})`);
}
for (const [paraId, entry] of Object.entries(parasAny)) { const entry = doc.paras[paraId];
if (!/^p-\d+-/i.test(paraId)) { if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
hardFailOrCollect(errors, `${fileRel}: invalid para id "${paraId}"`); if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
continue;
stableSortByTs(pg.paras[paraId].media);
stableSortByTs(pg.paras[paraId].refs);
stableSortByTs(pg.paras[paraId].comments_editorial);
} else {
for (const [pid, entry] of Object.entries(doc.paras)) {
const p = String(pid);
if (!isObj(pg.paras[p])) pg.paras[p] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
stableSortByTs(pg.paras[p].media);
stableSortByTs(pg.paras[p].refs);
stableSortByTs(pg.paras[p].comments_editorial);
}
} }
parasOut[paraId] = sanitizeEntry(fileRel, paraId, entry, errors); } catch (e: any) {
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
} }
}
pages[pageKey] = { paras: parasOut }; for (const [pk, pg] of Object.entries(pages)) {
paraCount += Object.keys(parasOut).length; const keys = Object.keys(pg.paras || {});
keys.sort((a, b) => {
const ia = paraNum(a);
const ib = paraNum(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
const next: Record<string, any> = {};
for (const k of keys) next[k] = pg.paras[k];
pg.paras = next;
} }
const out = { const out = {
@@ -182,16 +183,17 @@ export const GET: APIRoute = async () => {
pages, pages,
stats: { stats: {
pages: Object.keys(pages).length, pages: Object.keys(pages).length,
paras: paraCount, paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
errors: errors.length, errors: errors.length,
}, },
errors, errors,
}; };
if (errors.length) {
throw new Error(`${errors[0].file}: ${errors[0].error}`);
}
return new Response(JSON.stringify(out), { return new Response(JSON.stringify(out), {
headers: { headers: { "Content-Type": "application/json; charset=utf-8" },
"Content-Type": "application/json; charset=utf-8",
"Cache-Control": "no-store",
},
}); });
}; };