Compare commits
61 Commits
chore/fix-
...
testB-hotp
| Author | SHA1 | Date | |
|---|---|---|---|
| 1b95161de0 | |||
| ebd976bd46 | |||
| f8d57d8fe0 | |||
| 09a4d2c472 | |||
| 1f6dc874d0 | |||
| 4dd63945ee | |||
| ba64b0694b | |||
| 58e5ceda59 | |||
| 08f826ee01 | |||
| 3358d280ec | |||
| 9cb0d5e416 | |||
| a46f058917 | |||
| 604b2199da | |||
| d153f71be6 | |||
| 8f64e4b098 | |||
| 459bf195d8 | |||
| 0c46b0d19b | |||
| bfbdc7b688 | |||
| 8fd53dd4d2 | |||
|
|
c8bbee4f74 | ||
| 04cdf54eb7 | |||
|
|
d6bf645ae9 | ||
| 1ca6bcbd81 | |||
| dec5f8eba7 | |||
| 716c887045 | |||
| 9b1789a164 | |||
| 17fa39c7ff | |||
| 8132e315f4 | |||
| 8d993915d7 | |||
| 497bddd05d | |||
| 7c8e49c1a9 | |||
| 901d28b89b | |||
| 43e2862c89 | |||
| 73fb38c4d1 | |||
| a81d206aba | |||
| 9801ea3cea | |||
| c11189fe11 | |||
| b47edb24cf | |||
| be191b09a0 | |||
| e06587478d | |||
| 402ffb04cd | |||
| 1cbfc02670 | |||
| 28d2fbbd2f | |||
| 225368a952 | |||
| 3574695041 | |||
| ea68025a1d | |||
| 3a08698003 | |||
| 3d583608c2 | |||
|
|
01ae95ab43 | ||
|
|
0d5821c640 | ||
|
|
2bcea39558 | ||
| af85970d4a | |||
| 210f621487 | |||
| 8ad960dc69 | |||
| d45a8b285f | |||
| b6e04a9138 | |||
| dcf1fc2d0b | |||
| 41b0517c6c | |||
| 6b43eb199d | |||
| d40f24e92d | |||
| a5d68d6a7e |
@@ -16,9 +16,13 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: anno-apply-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
apply-approved:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
@@ -29,12 +33,11 @@ jobs:
|
||||
git --version
|
||||
node --version
|
||||
npm --version
|
||||
npm ping --registry=https://registry.npmjs.org
|
||||
|
||||
- name: Derive context (event.json / workflow_dispatch)
|
||||
env:
|
||||
INPUT_ISSUE: ${{ inputs.issue }}
|
||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||
@@ -78,10 +81,12 @@ jobs:
|
||||
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||
}
|
||||
|
||||
const labelName =
|
||||
ev?.label?.name ||
|
||||
ev?.label ||
|
||||
"workflow_dispatch";
|
||||
// label name: best-effort (non-bloquant)
|
||||
let labelName = "workflow_dispatch";
|
||||
const lab = ev?.label;
|
||||
if (typeof lab === "string") labelName = lab;
|
||||
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
|
||||
else if (ev?.label?.name) labelName = ev.label.name;
|
||||
|
||||
const u = new URL(cloneUrl);
|
||||
const origin = u.origin;
|
||||
@@ -106,16 +111,129 @@ jobs:
|
||||
echo "✅ context:"
|
||||
sed -n '1,120p' /tmp/anno.env
|
||||
|
||||
- name: Gate on label state/approved
|
||||
- name: Early gate (label event fast-skip, but tolerant)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
||||
echo "ℹ️ label=$LABEL_NAME => skip"
|
||||
|
||||
echo "ℹ️ event label = $LABEL_NAME"
|
||||
|
||||
# Fast skip on obvious non-approved label events (avoid noise),
|
||||
# BUT do NOT skip if label payload is weird/unknown.
|
||||
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
|
||||
echo "ℹ️ label=$LABEL_NAME => skip early"
|
||||
echo "SKIP=1" >> /tmp/anno.env
|
||||
echo "SKIP_REASON=\"label_not_approved_event\"" >> /tmp/anno.env
|
||||
exit 0
|
||||
fi
|
||||
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
||||
|
||||
echo "✅ continue to API gating (issue=$ISSUE_NUMBER)"
|
||||
|
||||
- name: Fetch issue + hard gate on labels + Type
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||
|
||||
curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||
-o /tmp/issue.json
|
||||
|
||||
node --input-type=module - <<'NODE' >> /tmp/anno.env
|
||||
import fs from "node:fs";
|
||||
|
||||
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json","utf8"));
|
||||
const title = String(issue.title || "");
|
||||
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||
|
||||
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
|
||||
const hasApproved = labels.includes("state/approved");
|
||||
|
||||
function pickLine(key) {
|
||||
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||
const m = body.match(re);
|
||||
return m ? m[1].trim() : "";
|
||||
}
|
||||
|
||||
const typeRaw = pickLine("Type");
|
||||
const type = String(typeRaw || "").trim().toLowerCase();
|
||||
|
||||
const allowed = new Set(["type/media","type/reference","type/comment"]);
|
||||
const proposer = new Set(["type/correction","type/fact-check"]);
|
||||
|
||||
const out = [];
|
||||
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
|
||||
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
|
||||
|
||||
// HARD gate: must currently have state/approved (avoids depending on event payload)
|
||||
if (!hasApproved) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("not_approved_label_present")}`);
|
||||
process.stdout.write(out.join("\n") + "\n");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (!type) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
|
||||
} else if (allowed.has(type)) {
|
||||
// proceed
|
||||
} else if (proposer.has(type)) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("proposer_type:"+type)}`);
|
||||
} else {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("unsupported_type:"+type)}`);
|
||||
}
|
||||
|
||||
process.stdout.write(out.join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
echo "✅ gating result:"
|
||||
grep -E '^(ISSUE_TYPE|SKIP|SKIP_REASON)=' /tmp/anno.env || true
|
||||
|
||||
- name: Comment issue if skipped (Proposer / unsupported / missing Type)
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
|
||||
[[ "${SKIP:-0}" == "1" ]] || exit 0
|
||||
|
||||
# IMPORTANT: do NOT comment for "not_approved_label_present" (avoid spam on other label events)
|
||||
if [[ "${SKIP_REASON:-}" == "not_approved_label_present" || "${SKIP_REASON:-}" == "label_not_approved_event" ]]; then
|
||||
echo "ℹ️ skip reason=${SKIP_REASON} -> no comment"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || exit 0
|
||||
|
||||
REASON="${SKIP_REASON:-}"
|
||||
TYPE="${ISSUE_TYPE:-}"
|
||||
|
||||
if [[ "$REASON" == proposer_type:* ]]; then
|
||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} détecté comme **Proposer** (${TYPE}).\n\n- Ce type est **traité manuellement par les editors**.\n✅ Aucun traitement automatique."
|
||||
elif [[ "$REASON" == unsupported_type:* ]]; then
|
||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} ignoré : Type non supporté par le bot (${TYPE}).\n\nTypes supportés : type/media, type/reference, type/comment."
|
||||
else
|
||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} ignoré : champ 'Type:' manquant ou illisible.\n\nAjoute : Type: type/media|type/reference|type/comment"
|
||||
fi
|
||||
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
|
||||
- name: Checkout default branch
|
||||
run: |
|
||||
@@ -135,7 +253,7 @@ jobs:
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
npm ci
|
||||
npm ci --no-audit --no-fund
|
||||
|
||||
- name: Check apply script exists
|
||||
run: |
|
||||
@@ -154,7 +272,7 @@ jobs:
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
npm run build:clean
|
||||
npm run build
|
||||
|
||||
test -f dist/para-index.json || {
|
||||
echo "❌ missing dist/para-index.json after build"
|
||||
@@ -173,6 +291,7 @@ jobs:
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
test -d .git || { echo "❌ not a git repo (checkout failed)"; echo "APPLY_RC=90" >> /tmp/anno.env; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||
|
||||
@@ -220,7 +339,7 @@ jobs:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
RC="${APPLY_RC:-0}"
|
||||
@@ -229,30 +348,15 @@ jobs:
|
||||
exit 0
|
||||
fi
|
||||
|
||||
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
||||
test -n "${FORGE_TOKEN:-}" || exit 0
|
||||
|
||||
if [[ -f /tmp/apply.log ]]; then
|
||||
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
||||
else
|
||||
BODY="(no apply log found)"
|
||||
fi
|
||||
|
||||
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
||||
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
|
||||
- name: Comment issue if no-op (already applied)
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||
[[ "${NOOP:-0}" == "1" ]] || exit 0
|
||||
|
||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} : rien à appliquer (déjà présent / dédupliqué)."
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
@@ -267,11 +371,12 @@ jobs:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
||||
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip push"; exit 0; }
|
||||
test -d .git || { echo "ℹ️ no git repo -> skip push"; exit 0; }
|
||||
|
||||
AUTH_URL="$(node --input-type=module -e '
|
||||
const [clone, tok] = process.argv.slice(1);
|
||||
@@ -290,7 +395,7 @@ jobs:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip PR"; exit 0; }
|
||||
@@ -333,6 +438,7 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
RC="${APPLY_RC:-0}"
|
||||
|
||||
@@ -1,8 +1,13 @@
|
||||
name: Anno Reject
|
||||
name: Anno Reject (close issue)
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue:
|
||||
description: "Issue number to reject/close"
|
||||
required: true
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
@@ -11,14 +16,26 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: anno-reject-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
reject:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Derive context
|
||||
- name: Tools sanity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
node --version
|
||||
|
||||
- name: Derive context (event.json / workflow_dispatch)
|
||||
env:
|
||||
INPUT_ISSUE: ${{ inputs.issue }}
|
||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||
@@ -29,58 +46,122 @@ jobs:
|
||||
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
const repoObj = ev?.repository || {};
|
||||
|
||||
const cloneUrl =
|
||||
repoObj?.clone_url ||
|
||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||
if (!cloneUrl) throw new Error("No repository url");
|
||||
|
||||
let owner =
|
||||
repoObj?.owner?.login ||
|
||||
repoObj?.owner?.username ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||
|
||||
let repo =
|
||||
repoObj?.name ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||
|
||||
if (!owner || !repo) {
|
||||
if ((!owner || !repo) && cloneUrl) {
|
||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
||||
}
|
||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||
|
||||
const issueNumber = ev?.issue?.number || ev?.issue?.index;
|
||||
if (!issueNumber) throw new Error("No issue number");
|
||||
const issueNumber =
|
||||
ev?.issue?.number ||
|
||||
ev?.issue?.index ||
|
||||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
||||
|
||||
const labelName = ev?.label?.name || ev?.label || "";
|
||||
const u = new URL(cloneUrl);
|
||||
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
||||
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||
}
|
||||
|
||||
// label name: best-effort (non-bloquant)
|
||||
let labelName = "workflow_dispatch";
|
||||
const lab = ev?.label;
|
||||
if (typeof lab === "string") labelName = lab;
|
||||
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
|
||||
|
||||
let apiBase = "";
|
||||
if (process.env.FORGE_API && String(process.env.FORGE_API).trim()) {
|
||||
apiBase = String(process.env.FORGE_API).trim().replace(/\/+$/,"");
|
||||
} else if (cloneUrl) {
|
||||
apiBase = new URL(cloneUrl).origin;
|
||||
} else {
|
||||
apiBase = "";
|
||||
}
|
||||
|
||||
function sh(s){ return JSON.stringify(String(s)); }
|
||||
|
||||
process.stdout.write([
|
||||
`OWNER=${sh(owner)}`,
|
||||
`REPO=${sh(repo)}`,
|
||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||
`LABEL_NAME=${sh(labelName)}`,
|
||||
`API_BASE=${sh(u.origin)}`
|
||||
`API_BASE=${sh(apiBase)}`
|
||||
].join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
- name: Gate on label state/rejected
|
||||
echo "✅ context:"
|
||||
sed -n '1,120p' /tmp/reject.env
|
||||
|
||||
- name: Early gate (fast-skip, tolerant)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/reject.env
|
||||
if [[ "$LABEL_NAME" != "state/rejected" ]]; then
|
||||
echo "ℹ️ label=$LABEL_NAME => skip"
|
||||
echo "ℹ️ event label = $LABEL_NAME"
|
||||
|
||||
if [[ "$LABEL_NAME" != "state/rejected" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
|
||||
echo "ℹ️ label=$LABEL_NAME => skip early"
|
||||
echo "SKIP=1" >> /tmp/reject.env
|
||||
echo "SKIP_REASON=\"label_not_rejected_event\"" >> /tmp/reject.env
|
||||
exit 0
|
||||
fi
|
||||
echo "✅ reject issue=$ISSUE_NUMBER"
|
||||
|
||||
- name: Comment + close issue
|
||||
- name: Comment + close (only if label state/rejected is PRESENT now, and no conflict)
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/reject.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||
test -n "${API_BASE:-}" || { echo "❌ Missing API_BASE"; exit 1; }
|
||||
|
||||
curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||
-o /tmp/reject.issue.json
|
||||
|
||||
node --input-type=module - <<'NODE' > /tmp/reject.flags
|
||||
import fs from "node:fs";
|
||||
const issue = JSON.parse(fs.readFileSync("/tmp/reject.issue.json","utf8"));
|
||||
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
|
||||
const hasApproved = labels.includes("state/approved");
|
||||
const hasRejected = labels.includes("state/rejected");
|
||||
process.stdout.write(`HAS_APPROVED=${hasApproved ? "1":"0"}\nHAS_REJECTED=${hasRejected ? "1":"0"}\n`);
|
||||
NODE
|
||||
|
||||
source /tmp/reject.flags
|
||||
|
||||
# Do nothing unless state/rejected is truly present now (anti payload weird)
|
||||
if [[ "${HAS_REJECTED:-0}" != "1" ]]; then
|
||||
echo "ℹ️ state/rejected not present -> skip"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${HAS_APPROVED:-0}" == "1" && "${HAS_REJECTED:-0}" == "1" ]]; then
|
||||
MSG="⚠️ Conflit d'état sur le ticket #${ISSUE_NUMBER} : labels **state/approved** et **state/rejected** présents.\n\n➡️ Action manuelle requise : retirer l'un des deux labels avant relance."
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
echo "ℹ️ conflict => stop"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
@@ -95,4 +176,6 @@ jobs:
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||
--data-binary '{"state":"closed"}'
|
||||
--data-binary '{"state":"closed"}'
|
||||
|
||||
echo "✅ rejected+closed"
|
||||
@@ -4,22 +4,37 @@ on:
|
||||
issues:
|
||||
types: [opened, edited]
|
||||
|
||||
concurrency:
|
||||
group: auto-label-${{ github.event.issue.number || github.event.issue.index || 'manual' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
label:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Apply labels from Type/State/Category
|
||||
env:
|
||||
FORGE_BASE: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||
# IMPORTANT: préfère FORGE_BASE (LAN) si défini, sinon FORGE_API
|
||||
FORGE_BASE: ${{ vars.FORGE_BASE || vars.FORGE_API || vars.FORGE_API_BASE }}
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
REPO_FULL: ${{ gitea.repository }}
|
||||
EVENT_PATH: ${{ github.event_path }}
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
run: |
|
||||
python3 - <<'PY'
|
||||
import json, os, re, urllib.request, urllib.error
|
||||
import json, os, re, time, urllib.request, urllib.error, socket
|
||||
|
||||
forge = (os.environ.get("FORGE_BASE") or "").rstrip("/")
|
||||
if not forge:
|
||||
raise SystemExit("Missing FORGE_BASE/FORGE_API repo variable (e.g. http://192.168.1.20:3000)")
|
||||
|
||||
token = os.environ.get("FORGE_TOKEN") or ""
|
||||
if not token:
|
||||
raise SystemExit("Missing secret FORGE_TOKEN")
|
||||
|
||||
forge = os.environ["FORGE_BASE"].rstrip("/")
|
||||
token = os.environ["FORGE_TOKEN"]
|
||||
owner, repo = os.environ["REPO_FULL"].split("/", 1)
|
||||
event_path = os.environ["EVENT_PATH"]
|
||||
|
||||
@@ -46,12 +61,9 @@ jobs:
|
||||
print("PARSED:", {"Type": t, "State": s, "Category": c})
|
||||
|
||||
# 1) explicite depuis le body
|
||||
if t:
|
||||
desired.add(t)
|
||||
if s:
|
||||
desired.add(s)
|
||||
if c:
|
||||
desired.add(c)
|
||||
if t: desired.add(t)
|
||||
if s: desired.add(s)
|
||||
if c: desired.add(c)
|
||||
|
||||
# 2) fallback depuis le titre si Type absent
|
||||
if not t:
|
||||
@@ -76,42 +88,56 @@ jobs:
|
||||
"Authorization": f"token {token}",
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "archicratie-auto-label/1.0",
|
||||
"User-Agent": "archicratie-auto-label/1.1",
|
||||
}
|
||||
|
||||
def jreq(method, url, payload=None):
|
||||
def jreq(method, url, payload=None, timeout=60, retries=4, backoff=2.0):
|
||||
data = None if payload is None else json.dumps(payload).encode("utf-8")
|
||||
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=20) as r:
|
||||
b = r.read()
|
||||
return json.loads(b.decode("utf-8")) if b else None
|
||||
except urllib.error.HTTPError as e:
|
||||
b = e.read().decode("utf-8", errors="replace")
|
||||
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
|
||||
last_err = None
|
||||
for i in range(retries):
|
||||
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as r:
|
||||
b = r.read()
|
||||
return json.loads(b.decode("utf-8")) if b else None
|
||||
except urllib.error.HTTPError as e:
|
||||
b = e.read().decode("utf-8", errors="replace")
|
||||
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
|
||||
except (TimeoutError, socket.timeout, urllib.error.URLError) as e:
|
||||
last_err = e
|
||||
# retry only on network/timeout
|
||||
time.sleep(backoff * (i + 1))
|
||||
raise RuntimeError(f"Network/timeout after retries: {method} {url}\n{last_err}")
|
||||
|
||||
# labels repo
|
||||
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000") or []
|
||||
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000", timeout=60) or []
|
||||
name_to_id = {x.get("name"): x.get("id") for x in labels}
|
||||
|
||||
missing = [x for x in desired if x not in name_to_id]
|
||||
if missing:
|
||||
raise SystemExit("Missing labels in repo: " + ", ".join(sorted(missing)))
|
||||
|
||||
wanted_ids = [name_to_id[x] for x in desired]
|
||||
wanted_ids = sorted({int(name_to_id[x]) for x in desired})
|
||||
|
||||
# labels actuels de l'issue
|
||||
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels") or []
|
||||
current_ids = {x.get("id") for x in current if x.get("id") is not None}
|
||||
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
|
||||
current_ids = {int(x.get("id")) for x in current if x.get("id") is not None}
|
||||
|
||||
final_ids = sorted(current_ids.union(wanted_ids))
|
||||
|
||||
# set labels = union (n'enlève rien)
|
||||
# Replace labels = union (n'enlève rien)
|
||||
url = f"{api}/repos/{owner}/{repo}/issues/{number}/labels"
|
||||
try:
|
||||
jreq("PUT", url, {"labels": final_ids})
|
||||
except Exception:
|
||||
jreq("PUT", url, final_ids)
|
||||
|
||||
# IMPORTANT: on n'envoie JAMAIS une liste brute ici (ça a causé le 422)
|
||||
jreq("PUT", url, {"labels": final_ids}, timeout=90, retries=4)
|
||||
|
||||
# vérif post-apply (anti "timeout mais appliqué")
|
||||
post = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
|
||||
post_ids = {int(x.get("id")) for x in post if x.get("id") is not None}
|
||||
|
||||
missing_ids = [i for i in wanted_ids if i not in post_ids]
|
||||
if missing_ids:
|
||||
raise RuntimeError(f"Labels not applied after PUT (missing ids): {missing_ids}")
|
||||
|
||||
print(f"OK labels #{number}: {sorted(desired)}")
|
||||
PY
|
||||
@@ -3,7 +3,7 @@ name: CI
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
branches: [master]
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
@@ -15,7 +15,7 @@ defaults:
|
||||
|
||||
jobs:
|
||||
build-and-anchors:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ on:
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force:
|
||||
description: "Force deploy even if gate would skip (1=yes, 0=no)"
|
||||
description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)"
|
||||
required: false
|
||||
default: "0"
|
||||
|
||||
@@ -14,6 +14,7 @@ env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
DOCKER_API_VERSION: "1.43"
|
||||
COMPOSE_VERSION: "2.29.7"
|
||||
ASTRO_TELEMETRY_DISABLED: "1"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
@@ -25,9 +26,9 @@ concurrency:
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: nas-deploy
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
image: localhost:5000/archicratie/nas-deploy-node22@sha256:fefa8bb307005cebec07796661ab25528dc319c33a8f1e480e1d66f90cd5cff6
|
||||
|
||||
steps:
|
||||
- name: Tools sanity
|
||||
@@ -92,7 +93,7 @@ jobs:
|
||||
|
||||
git log -1 --oneline
|
||||
|
||||
- name: Gate — auto deploy only on annotations/media changes
|
||||
- name: Gate — decide SKIP vs HOTPATCH vs FULL rebuild
|
||||
env:
|
||||
INPUT_FORCE: ${{ inputs.force }}
|
||||
run: |
|
||||
@@ -100,45 +101,70 @@ jobs:
|
||||
source /tmp/deploy.env
|
||||
|
||||
FORCE="${INPUT_FORCE:-0}"
|
||||
|
||||
# liste fichiers touchés (utile pour copier les médias)
|
||||
CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)"
|
||||
printf "%s\n" "$CHANGED" > /tmp/changed.txt
|
||||
|
||||
echo "== changed files =="
|
||||
echo "$CHANGED" | sed -n '1,260p'
|
||||
|
||||
# 0) Forçage manuel
|
||||
if [[ "$FORCE" == "1" ]]; then
|
||||
echo "✅ force=1 -> bypass gate -> deploy allowed"
|
||||
echo "GO=1" >> /tmp/deploy.env
|
||||
echo "MODE='full'" >> /tmp/deploy.env
|
||||
echo "✅ force=1 -> MODE=full (rebuild+restart)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)"
|
||||
echo "== changed files =="
|
||||
echo "$CHANGED" | sed -n '1,240p'
|
||||
# 1) Détection des classes de changements
|
||||
HAS_FULL=0
|
||||
HAS_HOTPATCH=0
|
||||
|
||||
if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then
|
||||
echo "GO=1" >> /tmp/deploy.env
|
||||
echo "✅ deploy allowed (annotations/media change detected)"
|
||||
else
|
||||
echo "GO=0" >> /tmp/deploy.env
|
||||
echo "ℹ️ no annotations/media change -> skip deploy"
|
||||
# FULL si build-impacting (zéro surprise)
|
||||
if echo "$CHANGED" | grep -qE '^(src/content/|src/anchors/|src/pages/|scripts/)'; then
|
||||
HAS_FULL=1
|
||||
fi
|
||||
|
||||
- name: Install docker client + docker compose plugin (v2) + python yaml
|
||||
# HOTPATCH si annotations/media
|
||||
if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then
|
||||
HAS_HOTPATCH=1
|
||||
fi
|
||||
|
||||
echo "Gate flags: HAS_FULL=$HAS_FULL HAS_HOTPATCH=$HAS_HOTPATCH"
|
||||
|
||||
# 2) Décision (priorité au FULL)
|
||||
if [[ "$HAS_FULL" == "1" ]]; then
|
||||
echo "GO=1" >> /tmp/deploy.env
|
||||
echo "MODE='full'" >> /tmp/deploy.env
|
||||
echo "✅ build-impacting change -> MODE=full (rebuild+restart)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$HAS_HOTPATCH" == "1" ]]; then
|
||||
echo "GO=1" >> /tmp/deploy.env
|
||||
echo "MODE='hotpatch'" >> /tmp/deploy.env
|
||||
echo "✅ annotations/media change -> MODE=hotpatch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "GO=0" >> /tmp/deploy.env
|
||||
echo "MODE='skip'" >> /tmp/deploy.env
|
||||
echo "ℹ️ no deploy-relevant change -> skip deploy"
|
||||
|
||||
- name: Toolchain sanity + resolve COMPOSE_PROJECT_NAME
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
apt-get -o Acquire::Retries=5 -o Acquire::ForceIPv4=true update
|
||||
apt-get install -y --no-install-recommends ca-certificates curl docker.io python3 python3-yaml
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
mkdir -p /usr/local/lib/docker/cli-plugins
|
||||
curl -fsSL \
|
||||
"https://github.com/docker/compose/releases/download/v${COMPOSE_VERSION}/docker-compose-linux-x86_64" \
|
||||
-o /usr/local/lib/docker/cli-plugins/docker-compose
|
||||
chmod +x /usr/local/lib/docker/cli-plugins/docker-compose
|
||||
|
||||
# tools are prebaked in the image
|
||||
git --version
|
||||
docker version
|
||||
docker compose version
|
||||
python3 --version
|
||||
python3 -c 'import yaml; print("PyYAML OK")'
|
||||
|
||||
# 🔥 KEY FIX: reuse existing compose project name if containers already exist
|
||||
# Reuse existing compose project name if containers already exist
|
||||
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||
if [[ -z "${PROJ:-}" ]]; then
|
||||
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||
@@ -147,7 +173,12 @@ jobs:
|
||||
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
|
||||
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
|
||||
|
||||
- name: Assert required vars (PUBLIC_GITEA_*)
|
||||
# Assert target containers exist (hotpatch needs them)
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; }
|
||||
done
|
||||
|
||||
- name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
@@ -156,24 +187,26 @@ jobs:
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> vars not required"; exit 0; }
|
||||
|
||||
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
||||
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
||||
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
||||
echo "✅ vars OK"
|
||||
|
||||
- name: Assert deploy files exist
|
||||
- name: Assert deploy files exist — only needed for MODE=full
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> files not required"; exit 0; }
|
||||
|
||||
test -f docker-compose.yml
|
||||
test -f Dockerfile
|
||||
test -f nginx.conf
|
||||
echo "✅ deploy files OK"
|
||||
|
||||
- name: Build + deploy staging (blue) then smoke
|
||||
- name: FULL — Build + deploy staging (blue) then warmup+smoke
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
@@ -182,31 +215,51 @@ jobs:
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||
|
||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||
|
||||
wait_url() {
|
||||
local url="$1"
|
||||
local label="$2"
|
||||
local tries="${3:-60}"
|
||||
for i in $(seq 1 "$tries"); do
|
||||
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||
echo "✅ $label OK ($url)"
|
||||
return 0
|
||||
fi
|
||||
echo "… warmup $label ($i/$tries)"
|
||||
sleep 1
|
||||
done
|
||||
echo "❌ timeout $label ($url)"
|
||||
return 1
|
||||
}
|
||||
|
||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||
echo "TS='$TS'" >> /tmp/deploy.env
|
||||
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
||||
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
||||
|
||||
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
||||
|
||||
docker rm -f archicratie-web-blue || true
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
||||
|
||||
curl -fsS "http://127.0.0.1:8081/para-index.json" >/dev/null
|
||||
curl -fsS "http://127.0.0.1:8081/annotations-index.json" >/dev/null
|
||||
curl -fsS "http://127.0.0.1:8081/pagefind/pagefind.js" >/dev/null
|
||||
# warmup endpoints
|
||||
wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index"
|
||||
wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index"
|
||||
wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js"
|
||||
|
||||
CANON="$(curl -fsS "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||
echo "canonical(blue)=$CANON"
|
||||
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
||||
echo "❌ staging canonical mismatch"; exit 3;
|
||||
echo "❌ staging canonical mismatch"
|
||||
docker logs --tail 120 archicratie-web-blue || true
|
||||
exit 3
|
||||
}
|
||||
|
||||
echo "✅ staging OK"
|
||||
|
||||
- name: Build + deploy live (green) then smoke + rollback if needed
|
||||
- name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
@@ -215,9 +268,27 @@ jobs:
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||
|
||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
||||
|
||||
wait_url() {
|
||||
local url="$1"
|
||||
local label="$2"
|
||||
local tries="${3:-60}"
|
||||
for i in $(seq 1 "$tries"); do
|
||||
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||
echo "✅ $label OK ($url)"
|
||||
return 0
|
||||
fi
|
||||
echo "… warmup $label ($i/$tries)"
|
||||
sleep 1
|
||||
done
|
||||
echo "❌ timeout $label ($url)"
|
||||
return 1
|
||||
}
|
||||
|
||||
rollback() {
|
||||
echo "⚠️ rollback green -> previous image tag (best effort)"
|
||||
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
||||
@@ -225,33 +296,38 @@ jobs:
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
||||
}
|
||||
|
||||
set +e
|
||||
docker compose -p "$PROJ" -f docker-compose.yml build web_green
|
||||
# build/restart green
|
||||
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
|
||||
echo "❌ build green failed"; rollback; exit 4
|
||||
fi
|
||||
|
||||
docker rm -f archicratie-web-green || true
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
||||
|
||||
curl -fsS "http://127.0.0.1:8082/para-index.json" >/dev/null
|
||||
curl -fsS "http://127.0.0.1:8082/annotations-index.json" >/dev/null
|
||||
curl -fsS "http://127.0.0.1:8082/pagefind/pagefind.js" >/dev/null
|
||||
# warmup endpoints
|
||||
if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi
|
||||
if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi
|
||||
if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi
|
||||
|
||||
CANON="$(curl -fsS "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||
echo "canonical(green)=$CANON"
|
||||
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
||||
echo "❌ live canonical mismatch"; rollback; exit 4;
|
||||
echo "❌ live canonical mismatch"
|
||||
docker logs --tail 120 archicratie-web-green || true
|
||||
rollback
|
||||
exit 4
|
||||
}
|
||||
|
||||
echo "✅ live OK"
|
||||
set -e
|
||||
|
||||
- name: Hotpatch annotations-index.json (deep merge shards) into blue+green
|
||||
- name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
python3 - <<'PY'
|
||||
import os, re, json, glob, datetime
|
||||
import os, re, json, glob
|
||||
import yaml
|
||||
import datetime as dt
|
||||
|
||||
@@ -261,7 +337,6 @@ jobs:
|
||||
def is_obj(x): return isinstance(x, dict)
|
||||
def is_arr(x): return isinstance(x, list)
|
||||
|
||||
# --- KEY FIX: YAML timestamps -> datetime; JSON can't dump them
|
||||
def iso_dt(x):
|
||||
if isinstance(x, dt.datetime):
|
||||
if x.tzinfo is None:
|
||||
@@ -273,81 +348,58 @@ jobs:
|
||||
|
||||
def normalize(x):
|
||||
s = iso_dt(x)
|
||||
if s is not None:
|
||||
return s
|
||||
if s is not None: return s
|
||||
if isinstance(x, dict):
|
||||
return {str(k): normalize(v) for k, v in x.items()}
|
||||
if isinstance(x, list):
|
||||
return [normalize(v) for v in x]
|
||||
return x
|
||||
|
||||
def key_media(it):
|
||||
return str((it or {}).get("src",""))
|
||||
|
||||
def key_media(it): return str((it or {}).get("src",""))
|
||||
def key_ref(it):
|
||||
it = it or {}
|
||||
return "||".join([
|
||||
str(it.get("url","")),
|
||||
str(it.get("label","")),
|
||||
str(it.get("kind","")),
|
||||
str(it.get("citation","")),
|
||||
])
|
||||
|
||||
def key_comment(it):
|
||||
return str((it or {}).get("text","")).strip()
|
||||
return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))])
|
||||
def key_comment(it): return str((it or {}).get("text","")).strip()
|
||||
|
||||
def dedup_extend(dst_list, src_list, key_fn):
|
||||
seen = set()
|
||||
out = []
|
||||
seen = set(); out = []
|
||||
for x in (dst_list or []):
|
||||
x = normalize(x)
|
||||
k = key_fn(x)
|
||||
if k and k not in seen:
|
||||
seen.add(k); out.append(x)
|
||||
x = normalize(x); k = key_fn(x)
|
||||
if k and k not in seen: seen.add(k); out.append(x)
|
||||
for x in (src_list or []):
|
||||
x = normalize(x)
|
||||
k = key_fn(x)
|
||||
if k and k not in seen:
|
||||
seen.add(k); out.append(x)
|
||||
x = normalize(x); k = key_fn(x)
|
||||
if k and k not in seen: seen.add(k); out.append(x)
|
||||
return out
|
||||
|
||||
def deep_merge(dst, src):
|
||||
src = normalize(src)
|
||||
for k, v in (src or {}).items():
|
||||
if k in ("media", "refs", "comments_editorial") and is_arr(v):
|
||||
if k == "media":
|
||||
dst[k] = dedup_extend(dst.get(k, []), v, key_media)
|
||||
elif k == "refs":
|
||||
dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
|
||||
else:
|
||||
dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
|
||||
if k in ("media","refs","comments_editorial") and is_arr(v):
|
||||
if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media)
|
||||
elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
|
||||
else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
|
||||
continue
|
||||
|
||||
if is_obj(v):
|
||||
if not is_obj(dst.get(k)):
|
||||
dst[k] = {} if not is_obj(dst.get(k)) else dst.get(k)
|
||||
if not is_obj(dst.get(k)): dst[k] = {}
|
||||
deep_merge(dst[k], v)
|
||||
continue
|
||||
|
||||
if is_arr(v):
|
||||
cur = dst.get(k, [])
|
||||
if not is_arr(cur): cur = []
|
||||
seen = set()
|
||||
out = []
|
||||
seen = set(); out = []
|
||||
for x in cur:
|
||||
x = normalize(x)
|
||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||
if s not in seen:
|
||||
seen.add(s); out.append(x)
|
||||
if s not in seen: seen.add(s); out.append(x)
|
||||
for x in v:
|
||||
x = normalize(x)
|
||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||
if s not in seen:
|
||||
seen.add(s); out.append(x)
|
||||
if s not in seen: seen.add(s); out.append(x)
|
||||
dst[k] = out
|
||||
continue
|
||||
|
||||
# scalaires: set seulement si absent / vide
|
||||
v = normalize(v)
|
||||
if k not in dst or dst.get(k) in (None, ""):
|
||||
dst[k] = v
|
||||
@@ -360,7 +412,6 @@ jobs:
|
||||
for k in ("media","refs","comments_editorial"):
|
||||
arr = entry.get(k)
|
||||
if not is_arr(arr): continue
|
||||
|
||||
def ts(x):
|
||||
x = normalize(x)
|
||||
try:
|
||||
@@ -368,24 +419,22 @@ jobs:
|
||||
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
|
||||
except Exception:
|
||||
return 0
|
||||
|
||||
arr = [normalize(x) for x in arr]
|
||||
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
||||
entry[k] = arr
|
||||
|
||||
pages = {}
|
||||
errors = []
|
||||
|
||||
if not os.path.isdir(ANNO_ROOT):
|
||||
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
|
||||
|
||||
pages = {}
|
||||
errors = []
|
||||
|
||||
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
|
||||
for fp in files:
|
||||
try:
|
||||
with open(fp, "r", encoding="utf-8") as f:
|
||||
doc = yaml.safe_load(f) or {}
|
||||
doc = normalize(doc)
|
||||
|
||||
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
||||
continue
|
||||
|
||||
@@ -423,22 +472,49 @@ jobs:
|
||||
"errors": errors,
|
||||
}
|
||||
|
||||
out = normalize(out)
|
||||
|
||||
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
||||
json.dump(out, f, ensure_ascii=False)
|
||||
|
||||
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
|
||||
PY
|
||||
|
||||
# patch JSON into running containers
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== patch $c =="
|
||||
echo "== patch annotations-index.json into $c =="
|
||||
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
|
||||
done
|
||||
|
||||
# copy changed media files into containers (so new media appears without rebuild)
|
||||
if [[ -s /tmp/changed.txt ]]; then
|
||||
while IFS= read -r f; do
|
||||
[[ -n "$f" ]] || continue
|
||||
if [[ "$f" == public/media/* ]]; then
|
||||
dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/...
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== copy media into $c: $f -> $dest =="
|
||||
docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\""
|
||||
docker cp "$f" "$c:$dest"
|
||||
done
|
||||
fi
|
||||
done < /tmp/changed.txt
|
||||
fi
|
||||
|
||||
# smoke after patch
|
||||
for p in 8081 8082; do
|
||||
echo "== smoke annotations-index on $p =="
|
||||
curl -fsS "http://127.0.0.1:${p}/annotations-index.json" | python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {}))'
|
||||
curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \
|
||||
| python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))'
|
||||
done
|
||||
|
||||
echo "✅ hotpatch annotations-index done"
|
||||
echo "✅ hotpatch done"
|
||||
|
||||
- name: Debug on failure (containers status/logs)
|
||||
if: ${{ failure() }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "== docker ps =="
|
||||
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== logs $c (tail 200) =="
|
||||
docker logs --tail 200 "$c" || true
|
||||
done
|
||||
395
.gitea/workflows/proposer-apply-pr.yml
Normal file
395
.gitea/workflows/proposer-apply-pr.yml
Normal file
@@ -0,0 +1,395 @@
|
||||
name: Proposer Apply (PR)
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue:
|
||||
description: "Issue number to apply (Proposer: correction/fact-check)"
|
||||
required: true
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: proposer-apply-${{ github.event.issue.number || inputs.issue || 'manual' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
apply-proposer:
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Tools sanity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git --version
|
||||
node --version
|
||||
npm --version
|
||||
|
||||
- name: Derive context (event.json / workflow_dispatch)
|
||||
env:
|
||||
INPUT_ISSUE: ${{ inputs.issue }}
|
||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||
|
||||
node --input-type=module - <<'NODE' > /tmp/proposer.env
|
||||
import fs from "node:fs";
|
||||
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
const repoObj = ev?.repository || {};
|
||||
|
||||
const cloneUrl =
|
||||
repoObj?.clone_url ||
|
||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||
|
||||
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||
|
||||
let owner =
|
||||
repoObj?.owner?.login ||
|
||||
repoObj?.owner?.username ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||
|
||||
let repo =
|
||||
repoObj?.name ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||
|
||||
if (!owner || !repo) {
|
||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
||||
}
|
||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||
|
||||
const defaultBranch = repoObj?.default_branch || "main";
|
||||
|
||||
const issueNumber =
|
||||
ev?.issue?.number ||
|
||||
ev?.issue?.index ||
|
||||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
||||
|
||||
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
||||
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||
}
|
||||
|
||||
const labelName =
|
||||
ev?.label?.name ||
|
||||
ev?.label ||
|
||||
"workflow_dispatch";
|
||||
|
||||
const u = new URL(cloneUrl);
|
||||
const origin = u.origin;
|
||||
|
||||
const apiBase = (process.env.FORGE_API && String(process.env.FORGE_API).trim())
|
||||
? String(process.env.FORGE_API).trim().replace(/\/+$/,"")
|
||||
: origin;
|
||||
|
||||
function sh(s){ return JSON.stringify(String(s)); }
|
||||
process.stdout.write([
|
||||
`CLONE_URL=${sh(cloneUrl)}`,
|
||||
`OWNER=${sh(owner)}`,
|
||||
`REPO=${sh(repo)}`,
|
||||
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
|
||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||
`LABEL_NAME=${sh(labelName)}`,
|
||||
`API_BASE=${sh(apiBase)}`
|
||||
].join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
echo "✅ context:"
|
||||
sed -n '1,120p' /tmp/proposer.env
|
||||
|
||||
- name: Gate on label state/approved
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
|
||||
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
||||
echo "ℹ️ label=$LABEL_NAME => skip"
|
||||
echo "SKIP=1" >> /tmp/proposer.env
|
||||
exit 0
|
||||
fi
|
||||
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
||||
|
||||
- name: Fetch issue + API-hard gate on (state/approved present + proposer type)
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||
|
||||
curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||
-o /tmp/issue.json
|
||||
|
||||
node --input-type=module - <<'NODE' >> /tmp/proposer.env
|
||||
import fs from "node:fs";
|
||||
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json","utf8"));
|
||||
const title = String(issue.title || "");
|
||||
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name||"")).filter(Boolean) : [];
|
||||
|
||||
function pickLine(key) {
|
||||
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||
const m = body.match(re);
|
||||
return m ? m[1].trim() : "";
|
||||
}
|
||||
|
||||
const typeRaw = pickLine("Type");
|
||||
const type = String(typeRaw || "").trim().toLowerCase();
|
||||
|
||||
const hasApproved = labels.includes("state/approved");
|
||||
const proposer = new Set(["type/correction","type/fact-check"]);
|
||||
|
||||
const out = [];
|
||||
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
|
||||
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
|
||||
out.push(`HAS_APPROVED=${hasApproved ? "1":"0"}`);
|
||||
|
||||
if (!hasApproved) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("approved_not_present")}`);
|
||||
} else if (!type) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
|
||||
} else if (!proposer.has(type)) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("not_proposer:"+type)}`);
|
||||
}
|
||||
process.stdout.write(out.join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
echo "✅ proposer gating:"
|
||||
grep -E '^(ISSUE_TYPE|HAS_APPROVED|SKIP|SKIP_REASON)=' /tmp/proposer.env || true
|
||||
|
||||
- name: Comment issue if skipped
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
|
||||
[[ "${SKIP:-0}" == "1" ]] || exit 0
|
||||
[[ "$LABEL_NAME" == "state/approved" || "$LABEL_NAME" == "workflow_dispatch" ]] || exit 0
|
||||
|
||||
REASON="${SKIP_REASON:-}"
|
||||
TYPE="${ISSUE_TYPE:-}"
|
||||
|
||||
if [[ "$REASON" == "approved_not_present" ]]; then
|
||||
MSG="ℹ️ Proposer Apply: skip — le label **state/approved** n'est pas présent sur le ticket au moment du run (gate API-hard)."
|
||||
elif [[ "$REASON" == "missing_type" ]]; then
|
||||
MSG="ℹ️ Proposer Apply: skip — champ **Type:** manquant/illisible. Attendu: type/correction ou type/fact-check."
|
||||
else
|
||||
MSG="ℹ️ Proposer Apply: skip — Type non-Proposer (${TYPE}). (Ce workflow ne traite que correction/fact-check.)"
|
||||
fi
|
||||
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD" || true
|
||||
|
||||
- name: Checkout default branch
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
rm -rf .git
|
||||
git init -q
|
||||
git remote add origin "$CLONE_URL"
|
||||
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||
git log -1 --oneline
|
||||
echo "✅ workspace:"
|
||||
ls -la | sed -n '1,120p'
|
||||
|
||||
- name: Detect app dir (repo-root vs ./site)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
APP_DIR="."
|
||||
if [[ -d "site" && -f "site/package.json" ]]; then
|
||||
APP_DIR="site"
|
||||
fi
|
||||
|
||||
echo "APP_DIR=$APP_DIR" >> /tmp/proposer.env
|
||||
echo "✅ APP_DIR=$APP_DIR"
|
||||
ls -la "$APP_DIR" | sed -n '1,120p'
|
||||
test -f "$APP_DIR/package.json" || { echo "❌ package.json missing in APP_DIR=$APP_DIR"; exit 1; }
|
||||
test -d "$APP_DIR/scripts" || { echo "❌ scripts/ missing in APP_DIR=$APP_DIR"; exit 1; }
|
||||
|
||||
- name: NPM harden (reduce flakiness)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
cd "$APP_DIR"
|
||||
npm config set fetch-retries 5
|
||||
npm config set fetch-retry-mintimeout 20000
|
||||
npm config set fetch-retry-maxtimeout 120000
|
||||
npm config set registry https://registry.npmjs.org
|
||||
|
||||
- name: Install deps (APP_DIR)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
cd "$APP_DIR"
|
||||
npm ci --no-audit --no-fund
|
||||
|
||||
- name: Build dist baseline (APP_DIR)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
cd "$APP_DIR"
|
||||
npm run build
|
||||
|
||||
- name: Apply ticket (alias + commit) on bot branch
|
||||
continue-on-error: true
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
|
||||
BOT_GIT_EMAIL: ${{ secrets.BOT_GIT_EMAIL }}
|
||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
|
||||
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
|
||||
|
||||
START_SHA="$(git rev-parse HEAD)"
|
||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||
BR="bot/proposer-${ISSUE_NUMBER}-${TS}"
|
||||
echo "BRANCH=$BR" >> /tmp/proposer.env
|
||||
git checkout -b "$BR"
|
||||
|
||||
export GITEA_OWNER="$OWNER"
|
||||
export GITEA_REPO="$REPO"
|
||||
export FORGE_BASE="$API_BASE"
|
||||
|
||||
LOG="/tmp/proposer-apply.log"
|
||||
set +e
|
||||
(cd "$APP_DIR" && node scripts/apply-ticket.mjs "$ISSUE_NUMBER" --alias --commit) >"$LOG" 2>&1
|
||||
RC=$?
|
||||
set -e
|
||||
|
||||
echo "APPLY_RC=$RC" >> /tmp/proposer.env
|
||||
|
||||
echo "== apply log (tail) =="
|
||||
tail -n 200 "$LOG" || true
|
||||
|
||||
END_SHA="$(git rev-parse HEAD)"
|
||||
if [[ "$RC" -ne 0 ]]; then
|
||||
echo "NOOP=0" >> /tmp/proposer.env
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$START_SHA" == "$END_SHA" ]]; then
|
||||
echo "NOOP=1" >> /tmp/proposer.env
|
||||
else
|
||||
echo "NOOP=0" >> /tmp/proposer.env
|
||||
echo "END_SHA=$END_SHA" >> /tmp/proposer.env
|
||||
fi
|
||||
|
||||
- name: Push bot branch
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
||||
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip push"; exit 0; }
|
||||
[[ -n "${BRANCH:-}" ]] || { echo "ℹ️ BRANCH unset -> skip push"; exit 0; }
|
||||
|
||||
AUTH_URL="$(node --input-type=module -e '
|
||||
const [clone, tok] = process.argv.slice(1);
|
||||
const u = new URL(clone);
|
||||
u.username = "oauth2";
|
||||
u.password = tok;
|
||||
console.log(u.toString());
|
||||
' "$CLONE_URL" "$FORGE_TOKEN")"
|
||||
|
||||
git remote set-url origin "$AUTH_URL"
|
||||
git push -u origin "$BRANCH"
|
||||
|
||||
- name: Create PR + comment issue
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||
[[ "${NOOP:-0}" == "0" ]] || exit 0
|
||||
[[ -n "${BRANCH:-}" ]] || { echo "ℹ️ BRANCH unset -> skip PR"; exit 0; }
|
||||
|
||||
PR_TITLE="proposer: apply ticket #${ISSUE_NUMBER}"
|
||||
PR_BODY="PR auto depuis ticket #${ISSUE_NUMBER} (state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA:-unknown}\n\nMerge si CI OK."
|
||||
|
||||
PR_PAYLOAD="$(node --input-type=module -e '
|
||||
const [title, body, base, head] = process.argv.slice(1);
|
||||
console.log(JSON.stringify({ title, body, base, head, allow_maintainer_edit: true }));
|
||||
' "$PR_TITLE" "$PR_BODY" "$DEFAULT_BRANCH" "${OWNER}:${BRANCH}")"
|
||||
|
||||
PR_JSON="$(curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls" \
|
||||
--data-binary "$PR_PAYLOAD")"
|
||||
|
||||
PR_URL="$(node --input-type=module -e '
|
||||
const pr = JSON.parse(process.argv[1] || "{}");
|
||||
console.log(pr.html_url || pr.url || "");
|
||||
' "$PR_JSON")"
|
||||
|
||||
test -n "$PR_URL" || { echo "❌ PR URL missing. Raw: $PR_JSON"; exit 1; }
|
||||
|
||||
MSG="✅ PR Proposer créée pour ticket #${ISSUE_NUMBER} : ${PR_URL}"
|
||||
C_PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$C_PAYLOAD"
|
||||
|
||||
- name: Finalize (fail job if apply failed)
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
RC="${APPLY_RC:-0}"
|
||||
if [[ "$RC" != "0" ]]; then
|
||||
echo "❌ apply failed (rc=$RC)"
|
||||
exit "$RC"
|
||||
fi
|
||||
echo "✅ apply ok"
|
||||
@@ -3,7 +3,7 @@ on: [push, workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
smoke:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mac-ci
|
||||
steps:
|
||||
- run: node -v && npm -v
|
||||
- run: echo "runner OK"
|
||||
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
@@ -1,12 +1,17 @@
|
||||
#!/usr/bin/env node
|
||||
// scripts/apply-annotation-ticket.mjs
|
||||
//
|
||||
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
|
||||
//
|
||||
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
|
||||
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
|
||||
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
|
||||
//
|
||||
// Robuste, idempotent, non destructif.
|
||||
// Compat rétro : lit (si présent) l'ancien monolithe:
|
||||
// src/annotations/<oeuvre>/<chapitre>.yml
|
||||
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
|
||||
// pour permettre une migration progressive sans perte.
|
||||
//
|
||||
// Robuste, idempotent, non destructif.
|
||||
// DRY RUN si --dry-run
|
||||
// Options: --dry-run --no-download --verify --strict --commit --close
|
||||
//
|
||||
@@ -49,8 +54,8 @@ Flags:
|
||||
--dry-run : n'écrit rien (affiche un aperçu)
|
||||
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
||||
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
|
||||
--strict : refuse si URL ref invalide (http/https) OU caption media vide
|
||||
--commit : git add + git commit (le script commit dans la branche courante)
|
||||
--strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
|
||||
--commit : git add + git commit (commit dans la branche courante)
|
||||
--close : ferme le ticket (nécessite --commit)
|
||||
|
||||
Env requis:
|
||||
@@ -191,6 +196,7 @@ function normalizeChemin(chemin) {
|
||||
}
|
||||
|
||||
function normalizePageKeyFromChemin(chemin) {
|
||||
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
|
||||
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
|
||||
@@ -226,90 +232,156 @@ function isHttpUrl(u) {
|
||||
}
|
||||
}
|
||||
|
||||
/* ------------------------------ para-index (verify + sort) ------------------------------ */
|
||||
function stableSortByTs(arr) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(a?.ts || "") || 0;
|
||||
const tb = Date.parse(b?.ts || "") || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function normPage(s) {
|
||||
let x = String(s || "").trim();
|
||||
if (!x) return "";
|
||||
// retire origin si on a une URL complète
|
||||
x = x.replace(/^https?:\/\/[^/]+/i, "");
|
||||
// enlève query/hash
|
||||
x = x.split("#")[0].split("?")[0];
|
||||
// enlève index.html
|
||||
x = x.replace(/index\.html$/i, "");
|
||||
// enlève slashs de bord
|
||||
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
return x;
|
||||
}
|
||||
|
||||
/* ------------------------------ para-index (verify + order) ------------------------------ */
|
||||
|
||||
async function loadParaOrderFromDist(pageKey) {
|
||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||
if (!(await exists(distIdx))) return null;
|
||||
|
||||
let j;
|
||||
try {
|
||||
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
|
||||
} catch {
|
||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||
if (!(await exists(distIdx))) return null;
|
||||
|
||||
let j;
|
||||
try {
|
||||
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
const want = normPage(pageKey);
|
||||
|
||||
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
|
||||
const items = Array.isArray(j?.items)
|
||||
? j.items
|
||||
: Array.isArray(j?.index?.items)
|
||||
? j.index.items
|
||||
: null;
|
||||
|
||||
if (items) {
|
||||
const ids = [];
|
||||
for (const it of items) {
|
||||
// page peut être dans plein de clés différentes
|
||||
const pageCand = normPage(
|
||||
it?.page ??
|
||||
it?.pageKey ??
|
||||
it?.path ??
|
||||
it?.route ??
|
||||
it?.href ??
|
||||
it?.url ??
|
||||
""
|
||||
);
|
||||
|
||||
// id peut être dans plein de clés différentes
|
||||
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
|
||||
if (id.startsWith("#")) id = id.slice(1);
|
||||
|
||||
if (pageCand === want && id) ids.push(id);
|
||||
}
|
||||
if (ids.length) return ids;
|
||||
}
|
||||
|
||||
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
|
||||
if (j?.byId && typeof j.byId === "object") {
|
||||
const ids = Object.keys(j.byId)
|
||||
.filter((id) => {
|
||||
const meta = j.byId[id] || {};
|
||||
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
|
||||
return pageCand === want;
|
||||
});
|
||||
|
||||
if (ids.length) {
|
||||
ids.sort((a, b) => {
|
||||
const ia = paraIndexFromId(a);
|
||||
const ib = paraIndexFromId(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
return ids;
|
||||
}
|
||||
}
|
||||
|
||||
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
|
||||
const keys = Object.keys(j.pages);
|
||||
const hit = keys.find((k) => normPage(k) === want);
|
||||
if (hit) {
|
||||
const pg = j.pages[hit];
|
||||
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
|
||||
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Support several shapes:
|
||||
// A) { items:[{id,page,...}, ...] }
|
||||
if (Array.isArray(j?.items)) {
|
||||
const ids = [];
|
||||
for (const it of j.items) {
|
||||
const p = String(it?.page || it?.pageKey || "");
|
||||
const id = String(it?.id || it?.paraId || "");
|
||||
if (p === pageKey && id) ids.push(id);
|
||||
}
|
||||
if (ids.length) return ids;
|
||||
}
|
||||
|
||||
// B) { byId: { "p-...": { page:"archicrat-ia/chapitre-4", ... }, ... } }
|
||||
if (j?.byId && typeof j.byId === "object") {
|
||||
// cannot rebuild full order; but can verify existence
|
||||
// return a pseudo-order map from known ids sorted by p-<n>- then alpha
|
||||
const ids = Object.keys(j.byId).filter((id) => String(j.byId[id]?.page || "") === pageKey);
|
||||
if (ids.length) {
|
||||
ids.sort((a, b) => {
|
||||
const ia = paraIndexFromId(a);
|
||||
const ib = paraIndexFromId(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
return ids;
|
||||
}
|
||||
}
|
||||
|
||||
// C) { pages: { "archicrat-ia/chapitre-4": { ids:[...]} } }
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
const pg = j.pages[pageKey];
|
||||
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
|
||||
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function tryVerifyAnchor(pageKey, anchorId) {
|
||||
// 1) dist/para-index.json
|
||||
const order = await loadParaOrderFromDist(pageKey);
|
||||
if (order) return order.includes(anchorId);
|
||||
|
||||
// 2) tests/anchors-baseline.json (fallback)
|
||||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||||
if (await exists(base)) {
|
||||
try {
|
||||
const j = JSON.parse(await fs.readFile(base, "utf8"));
|
||||
const candidates = [];
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
for (const [k, v] of Object.entries(j.pages)) {
|
||||
if (!Array.isArray(v)) continue;
|
||||
if (String(k).includes(pageKey)) candidates.push(...v);
|
||||
async function tryVerifyAnchor(pageKey, anchorId) {
|
||||
// 1) dist/para-index.json : order complet si possible
|
||||
const order = await loadParaOrderFromDist(pageKey);
|
||||
if (order) return order.includes(anchorId);
|
||||
|
||||
// 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
|
||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||
if (await exists(distIdx)) {
|
||||
try {
|
||||
const raw = await fs.readFile(distIdx, "utf8");
|
||||
if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
if (Array.isArray(j?.entries)) {
|
||||
for (const it of j.entries) {
|
||||
const p = String(it?.page || "");
|
||||
const ids = it?.ids;
|
||||
if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids);
|
||||
}
|
||||
}
|
||||
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
// 2) tests/anchors-baseline.json (fallback)
|
||||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||||
if (await exists(base)) {
|
||||
try {
|
||||
const j = JSON.parse(await fs.readFile(base, "utf8"));
|
||||
const candidates = [];
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
for (const [k, v] of Object.entries(j.pages)) {
|
||||
if (!Array.isArray(v)) continue;
|
||||
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
|
||||
}
|
||||
}
|
||||
if (Array.isArray(j?.entries)) {
|
||||
for (const it of j.entries) {
|
||||
const p = String(it?.page || "");
|
||||
const ids = it?.ids;
|
||||
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
|
||||
}
|
||||
}
|
||||
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
return null; // cannot verify
|
||||
}
|
||||
|
||||
return null; // cannot verify
|
||||
}
|
||||
|
||||
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
|
||||
|
||||
function keyMedia(x) {
|
||||
@@ -360,7 +432,6 @@ function deepMergeEntry(dst, src) {
|
||||
}
|
||||
|
||||
if (Array.isArray(v)) {
|
||||
// fallback: union by JSON string
|
||||
const cur = Array.isArray(dst[k]) ? dst[k] : [];
|
||||
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
@@ -382,16 +453,6 @@ function deepMergeEntry(dst, src) {
|
||||
}
|
||||
}
|
||||
|
||||
function stableSortByTs(arr) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(a?.ts || "") || 0;
|
||||
const tb = Date.parse(b?.ts || "") || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
/* ----------------------------- annotations I/O ----------------------------- */
|
||||
|
||||
async function loadAnnoDocYaml(fileAbs, pageKey) {
|
||||
@@ -424,9 +485,7 @@ async function loadAnnoDocYaml(fileAbs, pageKey) {
|
||||
function sortParasObject(paras, order) {
|
||||
const keys = Object.keys(paras || {});
|
||||
const idx = new Map();
|
||||
if (Array.isArray(order)) {
|
||||
order.forEach((id, i) => idx.set(String(id), i));
|
||||
}
|
||||
if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
|
||||
|
||||
keys.sort((a, b) => {
|
||||
const ha = idx.has(a);
|
||||
@@ -448,9 +507,9 @@ function sortParasObject(paras, order) {
|
||||
|
||||
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
|
||||
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||||
|
||||
doc.paras = sortParasObject(doc.paras, order);
|
||||
|
||||
// also sort known lists inside each para for stable diffs
|
||||
for (const e of Object.values(doc.paras || {})) {
|
||||
if (!isPlainObject(e)) continue;
|
||||
stableSortByTs(e.media);
|
||||
@@ -632,7 +691,6 @@ async function main() {
|
||||
const pageKey = normalizePageKeyFromChemin(chemin);
|
||||
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
||||
|
||||
// para order (used for verify + sorting)
|
||||
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
|
||||
|
||||
if (DO_VERIFY) {
|
||||
@@ -641,46 +699,43 @@ async function main() {
|
||||
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
||||
}
|
||||
if (ok === null) {
|
||||
if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`), { __exitCode: 2 });
|
||||
if (STRICT) {
|
||||
throw Object.assign(
|
||||
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
|
||||
{ __exitCode: 2 }
|
||||
);
|
||||
}
|
||||
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
|
||||
}
|
||||
}
|
||||
|
||||
// ✅ SHARD FILE: src/annotations/<pageKey>/<paraId>.yml
|
||||
const annoShardFileAbs = path.join(ANNO_DIR, pageKey, `${ancre}.yml`);
|
||||
const annoShardFileRel = path.relative(CWD, annoShardFileAbs).replace(/\\/g, "/");
|
||||
// ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
|
||||
const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
|
||||
const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
|
||||
|
||||
// legacy (read-only, used as base to avoid losing previously stored data)
|
||||
const annoLegacyFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||||
// legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
|
||||
const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||||
|
||||
console.log("✅ Parsed:", {
|
||||
type,
|
||||
chemin,
|
||||
ancre: `#${ancre}`,
|
||||
pageKey,
|
||||
annoFile: annoShardFileRel,
|
||||
});
|
||||
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
|
||||
|
||||
// load shard doc
|
||||
const doc = await loadAnnoDocYaml(annoShardFileAbs, pageKey);
|
||||
|
||||
// merge legacy para into shard as base (non destructive)
|
||||
if (await exists(annoLegacyFileAbs)) {
|
||||
try {
|
||||
const legacy = await loadAnnoDocYaml(annoLegacyFileAbs, pageKey);
|
||||
const legacyEntry = legacy?.paras?.[ancre];
|
||||
if (isPlainObject(legacyEntry)) {
|
||||
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
||||
deepMergeEntry(doc.paras[ancre], legacyEntry);
|
||||
}
|
||||
} catch {
|
||||
// ignore legacy parse issues (shard still works)
|
||||
}
|
||||
}
|
||||
|
||||
const doc = await loadAnnoDocYaml(shardAbs, pageKey);
|
||||
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
||||
const entry = doc.paras[ancre];
|
||||
|
||||
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
|
||||
if (await exists(legacyAbs)) {
|
||||
try {
|
||||
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
|
||||
const legacyEntry = legacy?.paras?.[ancre];
|
||||
if (isPlainObject(legacyEntry)) {
|
||||
deepMergeEntry(entry, legacyEntry);
|
||||
}
|
||||
} catch {
|
||||
// ignore legacy parse issues; shard still applies new data
|
||||
}
|
||||
}
|
||||
|
||||
const touchedFiles = [];
|
||||
const notes = [];
|
||||
let changed = false;
|
||||
@@ -696,10 +751,13 @@ async function main() {
|
||||
|
||||
const before = entry.comments_editorial.length;
|
||||
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
|
||||
changed = changed || entry.comments_editorial.length !== before;
|
||||
|
||||
if (entry.comments_editorial.length !== before) {
|
||||
changed = true;
|
||||
notes.push(`+ comment added (len=${text.length})`);
|
||||
} else {
|
||||
notes.push(`~ comment already present (dedup)`);
|
||||
}
|
||||
stableSortByTs(entry.comments_editorial);
|
||||
notes.push(changed ? `+ comment added (len=${text.length})` : `~ comment already present (dedup)`);
|
||||
}
|
||||
|
||||
else if (type === "type/reference") {
|
||||
@@ -722,15 +780,24 @@ async function main() {
|
||||
|
||||
const before = entry.refs.length;
|
||||
entry.refs = uniqUnion(entry.refs, [item], keyRef);
|
||||
changed = changed || entry.refs.length !== before;
|
||||
|
||||
if (entry.refs.length !== before) {
|
||||
changed = true;
|
||||
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
|
||||
} else {
|
||||
notes.push(`~ reference already present (dedup)`);
|
||||
}
|
||||
stableSortByTs(entry.refs);
|
||||
notes.push(changed ? `+ reference added (${item.url ? "url" : "label"})` : `~ reference already present (dedup)`);
|
||||
}
|
||||
|
||||
else if (type === "type/media") {
|
||||
if (!Array.isArray(entry.media)) entry.media = [];
|
||||
|
||||
const caption = (title || "").trim();
|
||||
if (STRICT && !caption) {
|
||||
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
|
||||
}
|
||||
const captionFinal = caption || ".";
|
||||
|
||||
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||||
if (!atts.length) notes.push("! no assets found (nothing to download).");
|
||||
|
||||
@@ -739,13 +806,7 @@ async function main() {
|
||||
const dl = a?.browser_download_url || a?.download_url || "";
|
||||
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
||||
|
||||
const caption = (title || "").trim();
|
||||
if (STRICT && !caption) {
|
||||
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
|
||||
}
|
||||
const captionFinal = caption || ".";
|
||||
|
||||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre);
|
||||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
|
||||
const destAbs = path.join(mediaDirAbs, name);
|
||||
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
||||
|
||||
@@ -790,7 +851,7 @@ async function main() {
|
||||
|
||||
if (DRY_RUN) {
|
||||
console.log("\n--- DRY RUN (no write) ---");
|
||||
console.log(`Would update: ${annoShardFileRel}`);
|
||||
console.log(`Would update: ${shardRel}`);
|
||||
for (const n of notes) console.log(" ", n);
|
||||
console.log("\nExcerpt (resulting entry):");
|
||||
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
||||
@@ -798,10 +859,10 @@ async function main() {
|
||||
return;
|
||||
}
|
||||
|
||||
await saveAnnoDocYaml(annoShardFileAbs, doc, paraOrder);
|
||||
touchedFiles.unshift(annoShardFileRel);
|
||||
await saveAnnoDocYaml(shardAbs, doc, paraOrder);
|
||||
touchedFiles.unshift(shardRel);
|
||||
|
||||
console.log(`✅ Updated: ${annoShardFileRel}`);
|
||||
console.log(`✅ Updated: ${shardRel}`);
|
||||
for (const n of notes) console.log(" ", n);
|
||||
|
||||
if (DO_COMMIT) {
|
||||
|
||||
@@ -1,28 +1,106 @@
|
||||
#!/usr/bin/env node
|
||||
// scripts/build-annotations-index.mjs
|
||||
// Construit dist/annotations-index.json à partir de src/annotations/**/*.yml
|
||||
// Supporte:
|
||||
// - monolith : src/annotations/<pageKey>.yml
|
||||
// - shard : src/annotations/<pageKey>/<paraId>.yml (paraId = p-<n>-...)
|
||||
// Invariants:
|
||||
// - doc.schema === 1
|
||||
// - doc.page (si présent) == pageKey déduit du chemin
|
||||
// - shard: doc.paras doit contenir EXACTEMENT la clé paraId (sinon fail)
|
||||
//
|
||||
// Deep-merge non destructif (media/refs/comments dédupliqués), tri stable.
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import YAML from "yaml";
|
||||
|
||||
function parseArgs(argv) {
|
||||
const out = {
|
||||
inDir: "src/annotations",
|
||||
outFile: "dist/annotations-index.json",
|
||||
};
|
||||
const ROOT = process.cwd();
|
||||
const ANNO_ROOT = path.join(ROOT, "src", "annotations");
|
||||
const DIST_DIR = path.join(ROOT, "dist");
|
||||
const OUT = path.join(DIST_DIR, "annotations-index.json");
|
||||
|
||||
for (let i = 0; i < argv.length; i++) {
|
||||
const a = argv[i];
|
||||
function assert(cond, msg) {
|
||||
if (!cond) throw new Error(msg);
|
||||
}
|
||||
|
||||
if (a === "--in" && argv[i + 1]) out.inDir = argv[++i];
|
||||
else if (a.startsWith("--in=")) out.inDir = a.slice("--in=".length);
|
||||
function isObj(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
function isArr(x) {
|
||||
return Array.isArray(x);
|
||||
}
|
||||
|
||||
if (a === "--out" && argv[i + 1]) out.outFile = argv[++i];
|
||||
else if (a.startsWith("--out=")) out.outFile = a.slice("--out=".length);
|
||||
function normPath(s) {
|
||||
return String(s || "")
|
||||
.replace(/\\/g, "/")
|
||||
.replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
|
||||
function paraNum(pid) {
|
||||
const m = String(pid).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||
}
|
||||
|
||||
function stableSortByTs(arr) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(a?.ts || "") || 0;
|
||||
const tb = Date.parse(b?.ts || "") || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function keyMedia(x) { return String(x?.src || ""); }
|
||||
function keyRef(x) {
|
||||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||
}
|
||||
function keyComment(x) { return String(x?.text || "").trim(); }
|
||||
|
||||
function uniqUnion(dst, src, keyFn) {
|
||||
const out = isArr(dst) ? [...dst] : [];
|
||||
const seen = new Set(out.map((x) => keyFn(x)));
|
||||
for (const it of (isArr(src) ? src : [])) {
|
||||
const k = keyFn(it);
|
||||
if (!k) continue;
|
||||
if (!seen.has(k)) {
|
||||
seen.add(k);
|
||||
out.push(it);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
async function exists(p) {
|
||||
try { await fs.access(p); return true; } catch { return false; }
|
||||
function deepMergeEntry(dst, src) {
|
||||
if (!isObj(dst) || !isObj(src)) return;
|
||||
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||
|
||||
if (isObj(v)) {
|
||||
if (!isObj(dst[k])) dst[k] = {};
|
||||
deepMergeEntry(dst[k], v);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isArr(v)) {
|
||||
const cur = isArr(dst[k]) ? dst[k] : [];
|
||||
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
for (const it of v) {
|
||||
const s = JSON.stringify(it);
|
||||
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||
}
|
||||
dst[k] = out;
|
||||
continue;
|
||||
}
|
||||
|
||||
// scalar: set only if missing/empty
|
||||
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
|
||||
}
|
||||
}
|
||||
|
||||
async function walk(dir) {
|
||||
@@ -30,111 +108,116 @@ async function walk(dir) {
|
||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const e of ents) {
|
||||
const p = path.join(dir, e.name);
|
||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
||||
else out.push(p);
|
||||
if (e.isDirectory()) out.push(...await walk(p));
|
||||
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function inferPageKeyFromFile(inDirAbs, fileAbs) {
|
||||
// src/annotations/<page>.yml -> "<page>"
|
||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
function inferExpectedFromRel(relNoExt) {
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const last = parts.at(-1) || "";
|
||||
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||
const paraId = isShard ? last : null;
|
||||
return { isShard, pageKey, paraId };
|
||||
}
|
||||
|
||||
function assert(cond, msg) {
|
||||
if (!cond) throw new Error(msg);
|
||||
}
|
||||
function validateAndNormalizeDoc(doc, relFile, expectedPageKey, expectedParaId) {
|
||||
assert(isObj(doc), `${relFile}: doc must be an object`);
|
||||
assert(doc.schema === 1, `${relFile}: schema must be 1`);
|
||||
assert(isObj(doc.paras), `${relFile}: missing object key "paras"`);
|
||||
|
||||
function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
const gotPage = doc.page != null ? normPath(doc.page) : "";
|
||||
const expPage = normPath(expectedPageKey);
|
||||
|
||||
function normalizePageKey(s) {
|
||||
// pas de / en tête/fin
|
||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
function validateAndNormalizeDoc(doc, pageKey, fileRel) {
|
||||
assert(isPlainObject(doc), `${fileRel}: document must be an object`);
|
||||
assert(doc.schema === 1, `${fileRel}: schema must be 1`);
|
||||
if (doc.page != null) {
|
||||
if (gotPage) {
|
||||
assert(
|
||||
normalizePageKey(doc.page) === pageKey,
|
||||
`${fileRel}: page mismatch (page="${doc.page}" vs path="${pageKey}")`
|
||||
gotPage === expPage,
|
||||
`${relFile}: page mismatch (page="${doc.page}" vs path="${expectedPageKey}")`
|
||||
);
|
||||
} else {
|
||||
doc.page = expPage;
|
||||
}
|
||||
|
||||
if (expectedParaId) {
|
||||
const keys = Object.keys(doc.paras || {}).map(String);
|
||||
assert(
|
||||
keys.includes(expectedParaId),
|
||||
`${relFile}: shard mismatch: must contain paras["${expectedParaId}"]`
|
||||
);
|
||||
assert(
|
||||
keys.length === 1 && keys[0] === expectedParaId,
|
||||
`${relFile}: shard invariant violated: shard file must contain ONLY paras["${expectedParaId}"] (got: ${keys.join(", ")})`
|
||||
);
|
||||
}
|
||||
assert(isPlainObject(doc.paras), `${fileRel}: missing object key "paras"`);
|
||||
|
||||
const parasOut = Object.create(null);
|
||||
|
||||
for (const [paraId, entry] of Object.entries(doc.paras)) {
|
||||
assert(/^p-\d+-/i.test(paraId), `${fileRel}: invalid para id "${paraId}"`);
|
||||
|
||||
// entry peut être vide, mais doit être un objet si présent
|
||||
assert(entry == null || isPlainObject(entry), `${fileRel}: paras.${paraId} must be an object`);
|
||||
|
||||
const e = entry ? { ...entry } : {};
|
||||
|
||||
// Sanity checks (non destructifs : on n’écrase pas, on vérifie juste les types)
|
||||
if (e.refs != null) assert(Array.isArray(e.refs), `${fileRel}: paras.${paraId}.refs must be an array`);
|
||||
if (e.authors != null) assert(Array.isArray(e.authors), `${fileRel}: paras.${paraId}.authors must be an array`);
|
||||
if (e.quotes != null) assert(Array.isArray(e.quotes), `${fileRel}: paras.${paraId}.quotes must be an array`);
|
||||
if (e.media != null) assert(Array.isArray(e.media), `${fileRel}: paras.${paraId}.media must be an array`);
|
||||
if (e.comments_editorial != null) assert(Array.isArray(e.comments_editorial), `${fileRel}: paras.${paraId}.comments_editorial must be an array`);
|
||||
|
||||
parasOut[paraId] = e;
|
||||
}
|
||||
|
||||
return parasOut;
|
||||
}
|
||||
|
||||
async function readDoc(fileAbs) {
|
||||
const raw = await fs.readFile(fileAbs, "utf8");
|
||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
||||
return YAML.parse(raw);
|
||||
return doc;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const { inDir, outFile } = parseArgs(process.argv.slice(2));
|
||||
const CWD = process.cwd();
|
||||
const pages = {};
|
||||
const errors = [];
|
||||
|
||||
const inDirAbs = path.isAbsolute(inDir) ? inDir : path.join(CWD, inDir);
|
||||
const outAbs = path.isAbsolute(outFile) ? outFile : path.join(CWD, outFile);
|
||||
await fs.mkdir(DIST_DIR, { recursive: true });
|
||||
|
||||
// antifragile
|
||||
if (!(await exists(inDirAbs))) {
|
||||
console.log(`ℹ️ annotations-index: skip (input missing): ${inDir}`);
|
||||
process.exit(0);
|
||||
}
|
||||
const files = await walk(ANNO_ROOT);
|
||||
|
||||
const files = (await walk(inDirAbs)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
if (!files.length) {
|
||||
console.log(`ℹ️ annotations-index: skip (no .yml/.yaml/.json found in): ${inDir}`);
|
||||
process.exit(0);
|
||||
}
|
||||
for (const fp of files) {
|
||||
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||
const { isShard, pageKey, paraId } = inferExpectedFromRel(relNoExt);
|
||||
|
||||
const pages = Object.create(null);
|
||||
let paraCount = 0;
|
||||
|
||||
for (const f of files) {
|
||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||
const pageKey = normalizePageKey(inferPageKeyFromFile(inDirAbs, f));
|
||||
assert(pageKey, `${fileRel}: cannot infer page key`);
|
||||
|
||||
let doc;
|
||||
try {
|
||||
doc = await readDoc(f);
|
||||
const raw = await fs.readFile(fp, "utf8");
|
||||
const doc = YAML.parse(raw) || {};
|
||||
|
||||
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||
|
||||
validateAndNormalizeDoc(
|
||||
doc,
|
||||
`src/annotations/${rel}`,
|
||||
pageKey,
|
||||
isShard ? paraId : null
|
||||
);
|
||||
|
||||
const pg = (pages[pageKey] ??= { paras: {} });
|
||||
|
||||
if (isShard) {
|
||||
const entry = doc.paras[paraId];
|
||||
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||
|
||||
stableSortByTs(pg.paras[paraId].media);
|
||||
stableSortByTs(pg.paras[paraId].refs);
|
||||
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||
} else {
|
||||
for (const [pid, entry] of Object.entries(doc.paras || {})) {
|
||||
const p = String(pid);
|
||||
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||
|
||||
stableSortByTs(pg.paras[p].media);
|
||||
stableSortByTs(pg.paras[p].refs);
|
||||
stableSortByTs(pg.paras[p].comments_editorial);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(`${fileRel}: parse failed: ${String(e?.message ?? e)}`);
|
||||
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||
}
|
||||
}
|
||||
|
||||
const paras = validateAndNormalizeDoc(doc, pageKey, fileRel);
|
||||
|
||||
// 1 fichier = 1 page (canon)
|
||||
assert(!pages[pageKey], `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
||||
pages[pageKey] = { paras };
|
||||
paraCount += Object.keys(paras).length;
|
||||
for (const [pageKey, pg] of Object.entries(pages)) {
|
||||
const keys = Object.keys(pg.paras || {});
|
||||
keys.sort((a, b) => {
|
||||
const ia = paraNum(a);
|
||||
const ib = paraNum(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
const next = {};
|
||||
for (const k of keys) next[k] = pg.paras[k];
|
||||
pg.paras = next;
|
||||
}
|
||||
|
||||
const out = {
|
||||
@@ -143,17 +226,21 @@ async function main() {
|
||||
pages,
|
||||
stats: {
|
||||
pages: Object.keys(pages).length,
|
||||
paras: paraCount,
|
||||
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||
errors: errors.length,
|
||||
},
|
||||
errors,
|
||||
};
|
||||
|
||||
await fs.mkdir(path.dirname(outAbs), { recursive: true });
|
||||
await fs.writeFile(outAbs, JSON.stringify(out), "utf8");
|
||||
if (errors.length) {
|
||||
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||
}
|
||||
|
||||
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> ${path.relative(CWD, outAbs)}`);
|
||||
await fs.writeFile(OUT, JSON.stringify(out), "utf8");
|
||||
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> dist/annotations-index.json`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("FAIL: build-annotations-index crashed:", e);
|
||||
console.error(`FAIL: build-annotations-index crashed: ${e?.stack || e?.message || e}`);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
@@ -48,6 +48,9 @@ async function main() {
|
||||
let missing = 0;
|
||||
const notes = [];
|
||||
|
||||
// Optim: éviter de vérifier 100 fois le même fichier media
|
||||
const seenMedia = new Set(); // src string
|
||||
|
||||
for (const f of files) {
|
||||
const rel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||
const raw = await fs.readFile(f, "utf8");
|
||||
@@ -70,6 +73,10 @@ async function main() {
|
||||
const src = String(m?.src || "");
|
||||
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
|
||||
|
||||
// dédupe
|
||||
if (seenMedia.has(src)) continue;
|
||||
seenMedia.add(src);
|
||||
|
||||
checked++;
|
||||
const p = toPublicPathFromUrl(src);
|
||||
if (!p) continue;
|
||||
@@ -94,4 +101,4 @@ async function main() {
|
||||
main().catch((e) => {
|
||||
console.error("FAIL: check-annotations-media crashed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
@@ -27,11 +27,6 @@ function escRe(s) {
|
||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function inferPageKeyFromFile(fileAbs) {
|
||||
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
}
|
||||
|
||||
function normalizePageKey(s) {
|
||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
}
|
||||
@@ -40,6 +35,31 @@ function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function isParaId(s) {
|
||||
return /^p-\d+-/i.test(String(s || ""));
|
||||
}
|
||||
|
||||
/**
|
||||
* Supporte:
|
||||
* - monolith: src/annotations/<pageKey>.yml -> pageKey = rel sans ext
|
||||
* - shard : src/annotations/<pageKey>/<paraId>.yml -> pageKey = dirname(rel), paraId = basename
|
||||
*
|
||||
* shard seulement si le fichier est dans un sous-dossier (anti cas pathologique).
|
||||
*/
|
||||
function inferFromFile(fileAbs) {
|
||||
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
||||
const relNoExt = rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const base = parts[parts.length - 1] || "";
|
||||
const dirParts = parts.slice(0, -1);
|
||||
|
||||
const isShard = dirParts.length > 0 && isParaId(base);
|
||||
const pageKey = isShard ? dirParts.join("/") : relNoExt;
|
||||
const paraId = isShard ? base : "";
|
||||
|
||||
return { pageKey: normalizePageKey(pageKey), paraId };
|
||||
}
|
||||
|
||||
async function loadAliases() {
|
||||
if (!(await exists(ALIASES_PATH))) return {};
|
||||
try {
|
||||
@@ -83,7 +103,11 @@ async function main() {
|
||||
const aliases = await loadAliases();
|
||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
|
||||
let pages = 0;
|
||||
// perf: cache HTML par page (shards = beaucoup de fichiers pour 1 page)
|
||||
const htmlCache = new Map(); // pageKey -> html
|
||||
const missingDistPage = new Set(); // pageKey
|
||||
|
||||
let pagesSeen = new Set();
|
||||
let checked = 0;
|
||||
let failures = 0;
|
||||
const notes = [];
|
||||
@@ -107,7 +131,7 @@ async function main() {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pageKey = normalizePageKey(inferPageKeyFromFile(f));
|
||||
const { pageKey, paraId: shardParaId } = inferFromFile(f);
|
||||
|
||||
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
|
||||
failures++;
|
||||
@@ -121,20 +145,44 @@ async function main() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// shard invariant (fort) : doit contenir paras[paraId]
|
||||
if (shardParaId) {
|
||||
if (!Object.prototype.hasOwnProperty.call(doc.paras, shardParaId)) {
|
||||
failures++;
|
||||
notes.push(`- SHARD MISMATCH: ${rel} (expected paras["${shardParaId}"] present)`);
|
||||
continue;
|
||||
}
|
||||
// si extras -> warning (non destructif)
|
||||
const keys = Object.keys(doc.paras);
|
||||
if (!(keys.length === 1 && keys[0] === shardParaId)) {
|
||||
notes.push(`- WARN shard has extra paras: ${rel} (expected only "${shardParaId}", got ${keys.join(", ")})`);
|
||||
}
|
||||
}
|
||||
|
||||
pagesSeen.add(pageKey);
|
||||
|
||||
const distFile = path.join(DIST_DIR, pageKey, "index.html");
|
||||
if (!(await exists(distFile))) {
|
||||
failures++;
|
||||
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
||||
if (!missingDistPage.has(pageKey)) {
|
||||
missingDistPage.add(pageKey);
|
||||
failures++;
|
||||
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
||||
} else {
|
||||
notes.push(`- WARN missing page already reported: dist/${pageKey}/index.html (from ${rel})`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
pages++;
|
||||
const html = await fs.readFile(distFile, "utf8");
|
||||
let html = htmlCache.get(pageKey);
|
||||
if (!html) {
|
||||
html = await fs.readFile(distFile, "utf8");
|
||||
htmlCache.set(pageKey, html);
|
||||
}
|
||||
|
||||
for (const paraId of Object.keys(doc.paras)) {
|
||||
checked++;
|
||||
|
||||
if (!/^p-\d+-/i.test(paraId)) {
|
||||
if (!isParaId(paraId)) {
|
||||
failures++;
|
||||
notes.push(`- INVALID ID: ${rel} (${paraId})`);
|
||||
continue;
|
||||
@@ -158,6 +206,7 @@ async function main() {
|
||||
}
|
||||
|
||||
const warns = notes.filter((x) => x.startsWith("- WARN"));
|
||||
const pages = pagesSeen.size;
|
||||
|
||||
if (failures > 0) {
|
||||
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
|
||||
@@ -172,4 +221,4 @@ async function main() {
|
||||
main().catch((e) => {
|
||||
console.error("FAIL: annotations check crashed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
@@ -114,7 +114,6 @@ async function runMammoth(docxPath, assetsOutDirWebRoot) {
|
||||
);
|
||||
|
||||
let html = result.value || "";
|
||||
|
||||
// Mammoth gives relative src="image-xx.png" ; we will prefix later
|
||||
return html;
|
||||
}
|
||||
@@ -182,6 +181,25 @@ async function exists(p) {
|
||||
try { await fs.access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
/**
|
||||
* ✅ compat:
|
||||
* - ancien : collection="archicratie" + slug="archicrat-ia/chapitre-3"
|
||||
* - nouveau : collection="archicrat-ia" + slug="chapitre-3"
|
||||
*
|
||||
* But : toujours écrire dans src/content/archicrat-ia/<slugSansPrefix>.mdx
|
||||
*/
|
||||
function normalizeDest(collection, slug) {
|
||||
let outCollection = String(collection || "").trim();
|
||||
let outSlug = String(slug || "").trim().replace(/^\/+|\/+$/g, "");
|
||||
|
||||
if (outCollection === "archicratie" && outSlug.startsWith("archicrat-ia/")) {
|
||||
outCollection = "archicrat-ia";
|
||||
outSlug = outSlug.replace(/^archicrat-ia\//, "");
|
||||
}
|
||||
|
||||
return { outCollection, outSlug };
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = parseArgs(process.argv);
|
||||
const manifestPath = path.resolve(args.manifest);
|
||||
@@ -203,11 +221,14 @@ async function main() {
|
||||
|
||||
for (const it of selected) {
|
||||
const docxPath = path.resolve(it.source);
|
||||
const outFile = path.resolve("src/content", it.collection, `${it.slug}.mdx`);
|
||||
|
||||
const { outCollection, outSlug } = normalizeDest(it.collection, it.slug);
|
||||
|
||||
const outFile = path.resolve("src/content", outCollection, `${outSlug}.mdx`);
|
||||
const outDir = path.dirname(outFile);
|
||||
|
||||
const assetsPublicDir = path.posix.join("/imported", it.collection, it.slug);
|
||||
const assetsDiskDir = path.resolve("public", "imported", it.collection, it.slug);
|
||||
const assetsPublicDir = path.posix.join("/imported", outCollection, outSlug);
|
||||
const assetsDiskDir = path.resolve("public", "imported", outCollection, outSlug);
|
||||
|
||||
if (!(await exists(docxPath))) {
|
||||
throw new Error(`Missing source docx: ${docxPath}`);
|
||||
@@ -241,18 +262,20 @@ async function main() {
|
||||
html = rewriteLocalImageLinks(html, assetsPublicDir);
|
||||
body = html.trim() ? html : "<p>(Import vide)</p>";
|
||||
}
|
||||
|
||||
|
||||
const defaultVersion = process.env.PUBLIC_RELEASE || "0.1.0";
|
||||
|
||||
// ✅ IMPORTANT: archicrat-ia partage edition/status avec archicratie (pas de migration frontmatter)
|
||||
const schemaDefaultsByCollection = {
|
||||
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
|
||||
ia: { edition: "ia", status: "cas_pratique", level: 1 },
|
||||
traite: { edition: "traite", status: "ontodynamique", level: 1 },
|
||||
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
|
||||
atlas: { edition: "atlas", status: "atlas", level: 1 },
|
||||
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
|
||||
"archicrat-ia": { edition: "archicrat-ia", status: "essai_these", level: 1 },
|
||||
ia: { edition: "ia", status: "cas_pratique", level: 1 },
|
||||
traite: { edition: "traite", status: "ontodynamique", level: 1 },
|
||||
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
|
||||
atlas: { edition: "atlas", status: "atlas", level: 1 },
|
||||
};
|
||||
|
||||
const defaults = schemaDefaultsByCollection[it.collection] || { edition: it.collection, status: "draft", level: 1 };
|
||||
const defaults = schemaDefaultsByCollection[outCollection] || { edition: outCollection, status: "draft", level: 1 };
|
||||
|
||||
const fm = [
|
||||
"---",
|
||||
@@ -282,4 +305,4 @@ async function main() {
|
||||
main().catch((e) => {
|
||||
console.error("\nERROR:", e?.message || e);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
@@ -1,2 +1,5 @@
|
||||
{}
|
||||
|
||||
{
|
||||
"/archicrat-ia/chapitre-3/": {
|
||||
"p-1-60c7ea48": "p-1-a21087b0"
|
||||
}
|
||||
}
|
||||
|
||||
10
src/annotations/archicrat-ia/chapitre-1/p-0-8d27a7f5.yml
Normal file
10
src/annotations/archicrat-ia/chapitre-1/p-0-8d27a7f5.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-1
|
||||
paras:
|
||||
p-0-8d27a7f5:
|
||||
refs:
|
||||
- url: https://auth.archicratie.trans-hands.synology.me/authenticated
|
||||
label: Lien web
|
||||
kind: (livre / article / vidéo / site / autre) Site
|
||||
ts: 2026-02-27T12:34:31.704Z
|
||||
fromIssue: 142
|
||||
9
src/annotations/archicrat-ia/chapitre-1/p-1-8a6c18bf.yml
Normal file
9
src/annotations/archicrat-ia/chapitre-1/p-1-8a6c18bf.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-1
|
||||
paras:
|
||||
p-1-8a6c18bf:
|
||||
comments_editorial:
|
||||
- text: Yeaha
|
||||
status: new
|
||||
ts: 2026-02-27T12:40:39.462Z
|
||||
fromIssue: 143
|
||||
18
src/annotations/archicrat-ia/chapitre-3/p-0-ace27175.yml
Normal file
18
src/annotations/archicrat-ia/chapitre-3/p-0-ace27175.yml
Normal file
@@ -0,0 +1,18 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-3
|
||||
paras:
|
||||
p-0-ace27175:
|
||||
media:
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-3/p-0-ace27175/Capture_d_e_cran_2025-05-05_a_19.20.40.png
|
||||
caption: "[Media] p-0-ace27175 — Chapitre 3 — Philosophies du pouvoir et
|
||||
archicration"
|
||||
credit: ""
|
||||
ts: 2026-02-27T12:43:14.259Z
|
||||
fromIssue: 144
|
||||
refs:
|
||||
- url: https://gitea.archicratie.trans-hands.synology.me
|
||||
label: Gitea
|
||||
kind: (livre / article / vidéo / site / autre) Site
|
||||
ts: 2026-03-02T19:53:21.252Z
|
||||
fromIssue: 169
|
||||
11
src/annotations/archicrat-ia/chapitre-3/p-1-60c7ea48.yml
Normal file
11
src/annotations/archicrat-ia/chapitre-3/p-1-60c7ea48.yml
Normal file
@@ -0,0 +1,11 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-3
|
||||
paras:
|
||||
p-1-60c7ea48:
|
||||
refs:
|
||||
- url: https://gitea.archicratie.trans-hands.synology.me
|
||||
label: Gitea
|
||||
kind: (livre / article / vidéo / site / autre) Site
|
||||
ts: 2026-03-02T20:01:55.858Z
|
||||
fromIssue: 172
|
||||
# testB: hotpatch-auto gate proof
|
||||
19
src/annotations/archicrat-ia/chapitre-4/p-11-67c14c09.yml
Normal file
19
src/annotations/archicrat-ia/chapitre-4/p-11-67c14c09.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-4
|
||||
paras:
|
||||
p-11-67c14c09:
|
||||
media:
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2026-02-16_a_13.07.35.png
|
||||
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||
révolutions industrielles"
|
||||
credit: ""
|
||||
ts: 2026-02-26T13:17:41.286Z
|
||||
fromIssue: 129
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2025-05-05_a_19.20.40.png
|
||||
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||
révolutions industrielles"
|
||||
credit: ""
|
||||
ts: 2026-02-27T09:17:04.386Z
|
||||
fromIssue: 127
|
||||
@@ -3,14 +3,11 @@ import { getCollection } from "astro:content";
|
||||
|
||||
const { currentSlug } = Astro.props;
|
||||
|
||||
const entries = (await getCollection("archicratie"))
|
||||
.filter((e) => e.slug.startsWith("archicrat-ia/"))
|
||||
// ✅ Après migration : TOC = collection "archicrat-ia"
|
||||
const entries = (await getCollection("archicrat-ia"))
|
||||
.sort((a, b) => (a.data.order ?? 0) - (b.data.order ?? 0));
|
||||
|
||||
// ✅ On route l’Essai-thèse sur /archicrat-ia/<slug-sans-prefix>/
|
||||
// (Astro trailingSlash = always → on garde le "/" final)
|
||||
const strip = (s) => String(s || "").replace(/^archicrat-ia\//, "");
|
||||
const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
const href = (slug) => `/archicrat-ia/${slug}/`;
|
||||
---
|
||||
|
||||
<nav class="toc-global" aria-label="Table des matières — ArchiCraT-IA">
|
||||
@@ -163,4 +160,4 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
const active = document.querySelector(".toc-global .toc-item.is-active");
|
||||
if (active) active.scrollIntoView({ block: "nearest" });
|
||||
})();
|
||||
</script>
|
||||
</script>
|
||||
@@ -14,7 +14,7 @@ source:
|
||||
---
|
||||
Ce chapitre se tient à un point nodal de notre essai-thèse : il ouvre un espace d’exploration systématique des formes conceptuelles et philosophiques à travers lesquelles le pouvoir se configure comme régime de régulation. Il ne s’agit pas ici de revenir une nouvelle fois sur les fondements de l’autorité, ni d’interroger la légitimité politique au sens classique du terme, ni même d’enquêter sur la genèse des institutions. L’ambition est autre, structurelle, transversale, morphologique, elle tentera d’arpenter, à même les dispositifs, les pensées, les théorisations et les expériences, les modalités différentiées par lesquelles s’instaurent, s’éprouvent et se disputent les formes de régulation du vivre-ensemble.
|
||||
|
||||
Dès lors, ce chapitre ne postule aucun fondement, ne cherche aucun point d’origine, ne prétend restituer aucune ontologie stable du politique. Ce qu’il donne à lire, c’est une cartographie dynamique des régimes de régulation, traversée par des formes irréductibles, non homogènes, souvent conflictuelles, parfois incompatibles, mais toutes pensées comme des configurations singulières.
|
||||
Dès lors, ce chapitre ne postule aucun fondement, ne cherche aucun point d’origine, ne prétend restituer aucune ontologie stable du politique. Ce qu’il donne à lire, c’est une cartographie dynamique des régimes de régulation, traversée par des formes irréductibles, non homogènes, souvent conflictuelles, parfois incompatibles, mais toutes pensées comme des configurations singulières, et souvent complémentaires.
|
||||
|
||||
Ainsi, loin d’être une galerie illustrative de théories politiques juxtaposées, le chapitre s’agence comme une topologie critique, une plongée stratigraphique dans les scènes où s’articule la régulation — entendue ici non comme stabilisation externe ou ajustement technico-fonctionnel, mais comme dispositif instituant, tension structurante, scène traversée de conflictualité et d’exigence normative. Car à nos yeux, la régulation n’est pas ce qui vient après le pouvoir, elle en est la forme même constitutive — son architecture, son rythme, son épaisseur. Elle est ce par quoi le pouvoir ne se contente pas d’être exercé, mais s’institue, se justifie, se dispute, se recompose.
|
||||
|
||||
@@ -2,7 +2,7 @@ import { defineCollection, z } from "astro:content";
|
||||
|
||||
const linkSchema = z.object({
|
||||
type: z.enum(["definition", "appui", "transposition"]),
|
||||
target: z.string().min(1), // URL interne (ex: /glossaire/archicratie/) ou slug
|
||||
target: z.string().min(1),
|
||||
note: z.string().optional()
|
||||
});
|
||||
|
||||
@@ -12,7 +12,6 @@ const baseTextSchema = z.object({
|
||||
version: z.string().min(1),
|
||||
concepts: z.array(z.string().min(1)).default([]),
|
||||
links: z.array(linkSchema).default([]),
|
||||
// optionnels mais utiles dès maintenant
|
||||
order: z.number().int().nonnegative().optional(),
|
||||
summary: z.string().optional()
|
||||
});
|
||||
@@ -50,20 +49,31 @@ const atlas = defineCollection({
|
||||
})
|
||||
});
|
||||
|
||||
// ✅ NOUVELLE collection : archicrat-ia (Essai-thèse)
|
||||
// NOTE : on accepte temporairement edition/status "archicratie/modele_sociopolitique"
|
||||
// si tes MDX n’ont pas encore été normalisés.
|
||||
// Quand tu voudras "strict", on passera à edition="archicrat-ia" status="essai_these"
|
||||
// + update frontmatter des 7 fichiers.
|
||||
const archicratIa = defineCollection({
|
||||
type: "content",
|
||||
schema: baseTextSchema.extend({
|
||||
edition: z.union([z.literal("archicrat-ia"), z.literal("archicratie")]),
|
||||
status: z.union([z.literal("essai_these"), z.literal("modele_sociopolitique")])
|
||||
})
|
||||
});
|
||||
|
||||
// Glossaire (référentiel terminologique)
|
||||
const glossaire = defineCollection({
|
||||
type: "content",
|
||||
schema: z.object({
|
||||
title: z.string().min(1), // Titre public (souvent identique au terme)
|
||||
term: z.string().min(1), // Terme canonique
|
||||
title: z.string().min(1),
|
||||
term: z.string().min(1),
|
||||
aliases: z.array(z.string().min(1)).default([]),
|
||||
edition: z.literal("glossaire"),
|
||||
status: z.literal("referentiel"),
|
||||
version: z.string().min(1),
|
||||
// Micro-définition affichable en popover (courte, stable)
|
||||
definitionShort: z.string().min(1),
|
||||
concepts: z.array(z.string().min(1)).default([]),
|
||||
// Liens typés (vers ouvrages ou autres termes)
|
||||
links: z.array(linkSchema).default([])
|
||||
})
|
||||
});
|
||||
@@ -73,5 +83,8 @@ export const collections = {
|
||||
archicratie,
|
||||
ia,
|
||||
glossaire,
|
||||
atlas
|
||||
};
|
||||
atlas,
|
||||
|
||||
// ⚠️ clé avec tiret => doit être quotée
|
||||
"archicrat-ia": archicratIa
|
||||
};
|
||||
@@ -1,23 +1,80 @@
|
||||
// src/pages/annotations-index.json.ts
|
||||
import type { APIRoute } from "astro";
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
import { parse as parseYAML } from "yaml";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import YAML from "yaml";
|
||||
|
||||
const CWD = process.cwd();
|
||||
const ANNO_DIR = path.join(CWD, "src", "annotations");
|
||||
const ANNO_ROOT = path.join(CWD, "src", "annotations");
|
||||
|
||||
// Strict en CI (ou override explicite)
|
||||
const STRICT =
|
||||
process.env.ANNOTATIONS_STRICT === "1" ||
|
||||
process.env.CI === "1" ||
|
||||
process.env.CI === "true";
|
||||
const isObj = (x: any) => !!x && typeof x === "object" && !Array.isArray(x);
|
||||
const isArr = (x: any) => Array.isArray(x);
|
||||
|
||||
async function exists(p: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
function normPath(s: string) {
|
||||
return String(s || "").replace(/\\/g, "/").replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
function paraNum(pid: string) {
|
||||
const m = String(pid).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||
}
|
||||
function toIso(v: any) {
|
||||
if (v instanceof Date) return v.toISOString();
|
||||
return typeof v === "string" ? v : "";
|
||||
}
|
||||
function stableSortByTs(arr: any[]) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(toIso(a?.ts)) || 0;
|
||||
const tb = Date.parse(toIso(b?.ts)) || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function keyMedia(x: any) { return String(x?.src || ""); }
|
||||
function keyRef(x: any) {
|
||||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||
}
|
||||
function keyComment(x: any) { return String(x?.text || "").trim(); }
|
||||
|
||||
function uniqUnion(dst: any[], src: any[], keyFn: (x:any)=>string) {
|
||||
const out = isArr(dst) ? [...dst] : [];
|
||||
const seen = new Set(out.map((x) => keyFn(x)));
|
||||
for (const it of (isArr(src) ? src : [])) {
|
||||
const k = keyFn(it);
|
||||
if (!k) continue;
|
||||
if (!seen.has(k)) { seen.add(k); out.push(it); }
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function deepMergeEntry(dst: any, src: any) {
|
||||
if (!isObj(dst) || !isObj(src)) return;
|
||||
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||
|
||||
if (isObj(v)) {
|
||||
if (!isObj((dst as any)[k])) (dst as any)[k] = {};
|
||||
deepMergeEntry((dst as any)[k], v);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isArr(v)) {
|
||||
const cur = isArr((dst as any)[k]) ? (dst as any)[k] : [];
|
||||
const seen = new Set(cur.map((x:any) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
for (const it of v) {
|
||||
const s = JSON.stringify(it);
|
||||
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||
}
|
||||
(dst as any)[k] = out;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!(k in (dst as any)) || (dst as any)[k] == null || (dst as any)[k] === "") (dst as any)[k] = v;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,154 +83,98 @@ async function walk(dir: string): Promise<string[]> {
|
||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const e of ents) {
|
||||
const p = path.join(dir, e.name);
|
||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
||||
else out.push(p);
|
||||
if (e.isDirectory()) out.push(...await walk(p));
|
||||
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function isPlainObject(x: unknown): x is Record<string, unknown> {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function normalizePageKey(s: unknown): string {
|
||||
return String(s ?? "")
|
||||
.replace(/^\/+/, "")
|
||||
.replace(/\/+$/, "")
|
||||
.trim();
|
||||
}
|
||||
|
||||
function inferPageKeyFromFile(inDirAbs: string, fileAbs: string): string {
|
||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
}
|
||||
|
||||
function parseDoc(raw: string, fileAbs: string): unknown {
|
||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
||||
return parseYAML(raw);
|
||||
}
|
||||
|
||||
function hardFailOrCollect(errors: string[], msg: string): void {
|
||||
if (STRICT) throw new Error(msg);
|
||||
errors.push(msg);
|
||||
}
|
||||
|
||||
function sanitizeEntry(
|
||||
fileRel: string,
|
||||
paraId: string,
|
||||
entry: unknown,
|
||||
errors: string[]
|
||||
): Record<string, unknown> {
|
||||
if (entry == null) return {};
|
||||
|
||||
if (!isPlainObject(entry)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: paras.${paraId} must be an object`);
|
||||
return {};
|
||||
}
|
||||
|
||||
const e: Record<string, unknown> = { ...entry };
|
||||
|
||||
const arrayFields = [
|
||||
"refs",
|
||||
"authors",
|
||||
"quotes",
|
||||
"media",
|
||||
"comments_editorial",
|
||||
] as const;
|
||||
|
||||
for (const k of arrayFields) {
|
||||
if (e[k] == null) continue;
|
||||
if (!Array.isArray(e[k])) {
|
||||
errors.push(`${fileRel}: paras.${paraId}.${k} must be an array (coerced to [])`);
|
||||
e[k] = [];
|
||||
}
|
||||
}
|
||||
|
||||
return e;
|
||||
function inferExpected(relNoExt: string) {
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const last = parts.at(-1) || "";
|
||||
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||
const paraId = isShard ? last : null;
|
||||
return { isShard, pageKey, paraId };
|
||||
}
|
||||
|
||||
export const GET: APIRoute = async () => {
|
||||
if (!(await exists(ANNO_DIR))) {
|
||||
const out = {
|
||||
schema: 1,
|
||||
generatedAt: new Date().toISOString(),
|
||||
pages: {},
|
||||
stats: { pages: 0, paras: 0, errors: 0 },
|
||||
errors: [] as string[],
|
||||
};
|
||||
const pages: Record<string, { paras: Record<string, any> }> = {};
|
||||
const errors: Array<{ file: string; error: string }> = [];
|
||||
|
||||
return new Response(JSON.stringify(out), {
|
||||
headers: {
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Cache-Control": "no-store",
|
||||
},
|
||||
});
|
||||
let files: string[] = [];
|
||||
try {
|
||||
files = await walk(ANNO_ROOT);
|
||||
} catch (e: any) {
|
||||
throw new Error(`Missing annotations root: ${ANNO_ROOT} (${e?.message || e})`);
|
||||
}
|
||||
|
||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
for (const fp of files) {
|
||||
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||
const { isShard, pageKey, paraId } = inferExpected(relNoExt);
|
||||
|
||||
const pages: Record<string, { paras: Record<string, Record<string, unknown>> }> =
|
||||
Object.create(null);
|
||||
|
||||
const errors: string[] = [];
|
||||
let paraCount = 0;
|
||||
|
||||
for (const f of files) {
|
||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||
const pageKey = normalizePageKey(inferPageKeyFromFile(ANNO_DIR, f));
|
||||
|
||||
if (!pageKey) {
|
||||
hardFailOrCollect(errors, `${fileRel}: cannot infer page key`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let doc: unknown;
|
||||
try {
|
||||
const raw = await fs.readFile(f, "utf8");
|
||||
doc = parseDoc(raw, f);
|
||||
} catch (e) {
|
||||
hardFailOrCollect(errors, `${fileRel}: parse failed: ${String((e as any)?.message ?? e)}`);
|
||||
continue;
|
||||
}
|
||||
const raw = await fs.readFile(fp, "utf8");
|
||||
const doc = YAML.parse(raw) || {};
|
||||
|
||||
if (!isPlainObject(doc) || (doc as any).schema !== 1) {
|
||||
hardFailOrCollect(errors, `${fileRel}: schema must be 1`);
|
||||
continue;
|
||||
}
|
||||
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||
|
||||
if ((doc as any).page != null) {
|
||||
const declared = normalizePageKey((doc as any).page);
|
||||
if (declared !== pageKey) {
|
||||
hardFailOrCollect(
|
||||
errors,
|
||||
`${fileRel}: page mismatch (page="${declared}" vs path="${pageKey}")`
|
||||
);
|
||||
const docPage = normPath(doc.page || "");
|
||||
if (docPage && docPage !== pageKey) {
|
||||
throw new Error(`page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
||||
}
|
||||
}
|
||||
if (!doc.page) doc.page = pageKey;
|
||||
|
||||
const parasAny = (doc as any).paras;
|
||||
if (!isPlainObject(parasAny)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: missing object key "paras"`);
|
||||
continue;
|
||||
}
|
||||
if (!isObj(doc.paras)) throw new Error(`missing object key "paras"`);
|
||||
|
||||
if (pages[pageKey]) {
|
||||
hardFailOrCollect(errors, `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
||||
continue;
|
||||
}
|
||||
const pg = pages[pageKey] ??= { paras: {} };
|
||||
|
||||
const parasOut: Record<string, Record<string, unknown>> = Object.create(null);
|
||||
if (isShard) {
|
||||
if (!paraId) throw new Error("internal: missing paraId");
|
||||
if (!(paraId in doc.paras)) {
|
||||
throw new Error(`shard mismatch: file must contain paras["${paraId}"]`);
|
||||
}
|
||||
// ✅ invariant aligné avec build-annotations-index
|
||||
const keys = Object.keys(doc.paras).map(String);
|
||||
if (!(keys.length === 1 && keys[0] === paraId)) {
|
||||
throw new Error(`shard invariant violated: shard must contain ONLY paras["${paraId}"] (got: ${keys.join(", ")})`);
|
||||
}
|
||||
|
||||
for (const [paraId, entry] of Object.entries(parasAny)) {
|
||||
if (!/^p-\d+-/i.test(paraId)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: invalid para id "${paraId}"`);
|
||||
continue;
|
||||
const entry = doc.paras[paraId];
|
||||
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||
|
||||
stableSortByTs(pg.paras[paraId].media);
|
||||
stableSortByTs(pg.paras[paraId].refs);
|
||||
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||
} else {
|
||||
for (const [pid, entry] of Object.entries(doc.paras)) {
|
||||
const p = String(pid);
|
||||
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||
|
||||
stableSortByTs(pg.paras[p].media);
|
||||
stableSortByTs(pg.paras[p].refs);
|
||||
stableSortByTs(pg.paras[p].comments_editorial);
|
||||
}
|
||||
}
|
||||
parasOut[paraId] = sanitizeEntry(fileRel, paraId, entry, errors);
|
||||
} catch (e: any) {
|
||||
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||
}
|
||||
}
|
||||
|
||||
pages[pageKey] = { paras: parasOut };
|
||||
paraCount += Object.keys(parasOut).length;
|
||||
for (const [pk, pg] of Object.entries(pages)) {
|
||||
const keys = Object.keys(pg.paras || {});
|
||||
keys.sort((a, b) => {
|
||||
const ia = paraNum(a);
|
||||
const ib = paraNum(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
const next: Record<string, any> = {};
|
||||
for (const k of keys) next[k] = pg.paras[k];
|
||||
pg.paras = next;
|
||||
}
|
||||
|
||||
const out = {
|
||||
@@ -182,16 +183,17 @@ export const GET: APIRoute = async () => {
|
||||
pages,
|
||||
stats: {
|
||||
pages: Object.keys(pages).length,
|
||||
paras: paraCount,
|
||||
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||
errors: errors.length,
|
||||
},
|
||||
errors,
|
||||
};
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||
}
|
||||
|
||||
return new Response(JSON.stringify(out), {
|
||||
headers: {
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Cache-Control": "no-store",
|
||||
},
|
||||
headers: { "Content-Type": "application/json; charset=utf-8" },
|
||||
});
|
||||
};
|
||||
};
|
||||
@@ -5,12 +5,11 @@ import EditionToc from "../../components/EditionToc.astro";
|
||||
import LocalToc from "../../components/LocalToc.astro";
|
||||
|
||||
export async function getStaticPaths() {
|
||||
const entries = (await getCollection("archicratie"))
|
||||
.filter((e) => e.slug.startsWith("archicrat-ia/"));
|
||||
// ✅ Après migration : plus de filtre par prefix, on prend toute la collection
|
||||
const entries = await getCollection("archicrat-ia");
|
||||
|
||||
return entries.map((entry) => ({
|
||||
// ✅ inline : jamais de helper externe (évite "stripPrefix is not defined")
|
||||
params: { slug: entry.slug.replace(/^archicrat-ia\//, "") },
|
||||
params: { slug: entry.slug },
|
||||
props: { entry },
|
||||
}));
|
||||
}
|
||||
@@ -35,4 +34,4 @@ const { Content, headings } = await entry.render();
|
||||
|
||||
<h1>{entry.data.title}</h1>
|
||||
<Content />
|
||||
</EditionLayout>
|
||||
</EditionLayout>
|
||||
@@ -2,13 +2,12 @@
|
||||
import SiteLayout from "../../layouts/SiteLayout.astro";
|
||||
import { getCollection } from "astro:content";
|
||||
|
||||
const entries = (await getCollection("archicratie"))
|
||||
.filter((e) => e.slug.startsWith("archicrat-ia/"));
|
||||
// ✅ Après migration physique : collection = "archicrat-ia", slug = "chapitre-3" (sans prefix)
|
||||
const entries = await getCollection("archicrat-ia");
|
||||
|
||||
entries.sort((a, b) => (a.data.order ?? 9999) - (b.data.order ?? 9999));
|
||||
|
||||
const strip = (slug) => slug.replace(/^archicrat-ia\//, "");
|
||||
const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
const href = (slug) => `/archicrat-ia/${slug}/`;
|
||||
---
|
||||
|
||||
<SiteLayout title="Essai-thèse — ArchiCraT-IA">
|
||||
@@ -19,4 +18,4 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
<li><a href={href(e.slug)}>{e.data.title}</a></li>
|
||||
))}
|
||||
</ul>
|
||||
</SiteLayout>
|
||||
</SiteLayout>
|
||||
Reference in New Issue
Block a user