Compare commits
73 Commits
chore/fix-
...
chore/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
| 9e1b704aa6 | |||
| 0b4a31a432 | |||
| c617dc3979 | |||
| 1b95161de0 | |||
| ebd976bd46 | |||
| f8d57d8fe0 | |||
| 09a4d2c472 | |||
| 1f6dc874d0 | |||
| 4dd63945ee | |||
| ba64b0694b | |||
| 58e5ceda59 | |||
| 08f826ee01 | |||
| 3358d280ec | |||
| 9cb0d5e416 | |||
| a46f058917 | |||
| 604b2199da | |||
| d153f71be6 | |||
| 8f64e4b098 | |||
| 459bf195d8 | |||
| 0c46b0d19b | |||
| bfbdc7b688 | |||
| 8fd53dd4d2 | |||
|
|
c8bbee4f74 | ||
| 04cdf54eb7 | |||
|
|
d6bf645ae9 | ||
| 1ca6bcbd81 | |||
| dec5f8eba7 | |||
| 716c887045 | |||
| 9b1789a164 | |||
| 17fa39c7ff | |||
| 8132e315f4 | |||
| 8d993915d7 | |||
| 497bddd05d | |||
| 7c8e49c1a9 | |||
| 901d28b89b | |||
| 43e2862c89 | |||
| 73fb38c4d1 | |||
| a81d206aba | |||
| 9801ea3cea | |||
| c11189fe11 | |||
| b47edb24cf | |||
| be191b09a0 | |||
| e06587478d | |||
| 402ffb04cd | |||
| 1cbfc02670 | |||
| 28d2fbbd2f | |||
| 225368a952 | |||
| 3574695041 | |||
| ea68025a1d | |||
| 3a08698003 | |||
| 3d583608c2 | |||
|
|
01ae95ab43 | ||
|
|
0d5821c640 | ||
|
|
2bcea39558 | ||
| af85970d4a | |||
| 210f621487 | |||
| 8ad960dc69 | |||
| d45a8b285f | |||
| b6e04a9138 | |||
| dcf1fc2d0b | |||
| 41b0517c6c | |||
| 6b43eb199d | |||
| d40f24e92d | |||
| 480a61b071 | |||
| a5d68d6a7e | |||
| 390f2c33e5 | |||
| 16485dc4a9 | |||
| a43ce5f188 | |||
| 0519ae2dd0 | |||
| 0d5b790e52 | |||
| 342e21b9ea | |||
| 4dec9e182b | |||
| c7ae883c6a |
@@ -16,9 +16,13 @@ defaults:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: anno-apply-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
apply-approved:
|
apply-approved:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
@@ -29,12 +33,11 @@ jobs:
|
|||||||
git --version
|
git --version
|
||||||
node --version
|
node --version
|
||||||
npm --version
|
npm --version
|
||||||
npm ping --registry=https://registry.npmjs.org
|
|
||||||
|
|
||||||
- name: Derive context (event.json / workflow_dispatch)
|
- name: Derive context (event.json / workflow_dispatch)
|
||||||
env:
|
env:
|
||||||
INPUT_ISSUE: ${{ inputs.issue }}
|
INPUT_ISSUE: ${{ inputs.issue }}
|
||||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||||
@@ -78,10 +81,12 @@ jobs:
|
|||||||
throw new Error("No issue number in event.json or workflow_dispatch input");
|
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||||
}
|
}
|
||||||
|
|
||||||
const labelName =
|
// label name: best-effort (non-bloquant)
|
||||||
ev?.label?.name ||
|
let labelName = "workflow_dispatch";
|
||||||
ev?.label ||
|
const lab = ev?.label;
|
||||||
"workflow_dispatch";
|
if (typeof lab === "string") labelName = lab;
|
||||||
|
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
|
||||||
|
else if (ev?.label?.name) labelName = ev.label.name;
|
||||||
|
|
||||||
const u = new URL(cloneUrl);
|
const u = new URL(cloneUrl);
|
||||||
const origin = u.origin;
|
const origin = u.origin;
|
||||||
@@ -106,16 +111,129 @@ jobs:
|
|||||||
echo "✅ context:"
|
echo "✅ context:"
|
||||||
sed -n '1,120p' /tmp/anno.env
|
sed -n '1,120p' /tmp/anno.env
|
||||||
|
|
||||||
- name: Gate on label state/approved
|
- name: Early gate (label event fast-skip, but tolerant)
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
|
||||||
echo "ℹ️ label=$LABEL_NAME => skip"
|
echo "ℹ️ event label = $LABEL_NAME"
|
||||||
|
|
||||||
|
# Fast skip on obvious non-approved label events (avoid noise),
|
||||||
|
# BUT do NOT skip if label payload is weird/unknown.
|
||||||
|
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
|
||||||
|
echo "ℹ️ label=$LABEL_NAME => skip early"
|
||||||
echo "SKIP=1" >> /tmp/anno.env
|
echo "SKIP=1" >> /tmp/anno.env
|
||||||
|
echo "SKIP_REASON=\"label_not_approved_event\"" >> /tmp/anno.env
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
|
||||||
|
echo "✅ continue to API gating (issue=$ISSUE_NUMBER)"
|
||||||
|
|
||||||
|
- name: Fetch issue + hard gate on labels + Type
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/anno.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||||
|
|
||||||
|
curl -fsS \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Accept: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||||
|
-o /tmp/issue.json
|
||||||
|
|
||||||
|
node --input-type=module - <<'NODE' >> /tmp/anno.env
|
||||||
|
import fs from "node:fs";
|
||||||
|
|
||||||
|
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json","utf8"));
|
||||||
|
const title = String(issue.title || "");
|
||||||
|
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||||
|
|
||||||
|
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
|
||||||
|
const hasApproved = labels.includes("state/approved");
|
||||||
|
|
||||||
|
function pickLine(key) {
|
||||||
|
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||||
|
const m = body.match(re);
|
||||||
|
return m ? m[1].trim() : "";
|
||||||
|
}
|
||||||
|
|
||||||
|
const typeRaw = pickLine("Type");
|
||||||
|
const type = String(typeRaw || "").trim().toLowerCase();
|
||||||
|
|
||||||
|
const allowed = new Set(["type/media","type/reference","type/comment"]);
|
||||||
|
const proposer = new Set(["type/correction","type/fact-check"]);
|
||||||
|
|
||||||
|
const out = [];
|
||||||
|
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
|
||||||
|
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
|
||||||
|
|
||||||
|
// HARD gate: must currently have state/approved (avoids depending on event payload)
|
||||||
|
if (!hasApproved) {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("not_approved_label_present")}`);
|
||||||
|
process.stdout.write(out.join("\n") + "\n");
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!type) {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
|
||||||
|
} else if (allowed.has(type)) {
|
||||||
|
// proceed
|
||||||
|
} else if (proposer.has(type)) {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("proposer_type:"+type)}`);
|
||||||
|
} else {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("unsupported_type:"+type)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stdout.write(out.join("\n") + "\n");
|
||||||
|
NODE
|
||||||
|
|
||||||
|
echo "✅ gating result:"
|
||||||
|
grep -E '^(ISSUE_TYPE|SKIP|SKIP_REASON)=' /tmp/anno.env || true
|
||||||
|
|
||||||
|
- name: Comment issue if skipped (Proposer / unsupported / missing Type)
|
||||||
|
if: ${{ always() }}
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/anno.env || true
|
||||||
|
|
||||||
|
[[ "${SKIP:-0}" == "1" ]] || exit 0
|
||||||
|
|
||||||
|
# IMPORTANT: do NOT comment for "not_approved_label_present" (avoid spam on other label events)
|
||||||
|
if [[ "${SKIP_REASON:-}" == "not_approved_label_present" || "${SKIP_REASON:-}" == "label_not_approved_event" ]]; then
|
||||||
|
echo "ℹ️ skip reason=${SKIP_REASON} -> no comment"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
test -n "${FORGE_TOKEN:-}" || exit 0
|
||||||
|
|
||||||
|
REASON="${SKIP_REASON:-}"
|
||||||
|
TYPE="${ISSUE_TYPE:-}"
|
||||||
|
|
||||||
|
if [[ "$REASON" == proposer_type:* ]]; then
|
||||||
|
MSG="ℹ️ Ticket #${ISSUE_NUMBER} détecté comme **Proposer** (${TYPE}).\n\n- Ce type est **traité manuellement par les editors**.\n✅ Aucun traitement automatique."
|
||||||
|
elif [[ "$REASON" == unsupported_type:* ]]; then
|
||||||
|
MSG="ℹ️ Ticket #${ISSUE_NUMBER} ignoré : Type non supporté par le bot (${TYPE}).\n\nTypes supportés : type/media, type/reference, type/comment."
|
||||||
|
else
|
||||||
|
MSG="ℹ️ Ticket #${ISSUE_NUMBER} ignoré : champ 'Type:' manquant ou illisible.\n\nAjoute : Type: type/media|type/reference|type/comment"
|
||||||
|
fi
|
||||||
|
|
||||||
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
|
curl -fsS -X POST \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
|
--data-binary "$PAYLOAD"
|
||||||
|
|
||||||
- name: Checkout default branch
|
- name: Checkout default branch
|
||||||
run: |
|
run: |
|
||||||
@@ -135,7 +253,7 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
npm ci
|
npm ci --no-audit --no-fund
|
||||||
|
|
||||||
- name: Check apply script exists
|
- name: Check apply script exists
|
||||||
run: |
|
run: |
|
||||||
@@ -154,7 +272,7 @@ jobs:
|
|||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
npm run build:clean
|
npm run build
|
||||||
|
|
||||||
test -f dist/para-index.json || {
|
test -f dist/para-index.json || {
|
||||||
echo "❌ missing dist/para-index.json after build"
|
echo "❌ missing dist/para-index.json after build"
|
||||||
@@ -173,6 +291,7 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
test -d .git || { echo "❌ not a git repo (checkout failed)"; echo "APPLY_RC=90" >> /tmp/anno.env; exit 0; }
|
||||||
|
|
||||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||||
|
|
||||||
@@ -220,7 +339,7 @@ jobs:
|
|||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env || true
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
RC="${APPLY_RC:-0}"
|
RC="${APPLY_RC:-0}"
|
||||||
@@ -229,30 +348,15 @@ jobs:
|
|||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
test -n "${FORGE_TOKEN:-}" || exit 0
|
||||||
|
|
||||||
|
if [[ -f /tmp/apply.log ]]; then
|
||||||
|
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
||||||
|
else
|
||||||
|
BODY="(no apply log found)"
|
||||||
|
fi
|
||||||
|
|
||||||
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
||||||
|
|
||||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
|
||||||
|
|
||||||
curl -fsS -X POST \
|
|
||||||
-H "Authorization: token $FORGE_TOKEN" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
|
||||||
--data-binary "$PAYLOAD"
|
|
||||||
|
|
||||||
- name: Comment issue if no-op (already applied)
|
|
||||||
if: ${{ always() }}
|
|
||||||
env:
|
|
||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
|
||||||
run: |
|
|
||||||
set -euo pipefail
|
|
||||||
source /tmp/anno.env
|
|
||||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
|
||||||
|
|
||||||
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
|
||||||
[[ "${NOOP:-0}" == "1" ]] || exit 0
|
|
||||||
|
|
||||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} : rien à appliquer (déjà présent / dédupliqué)."
|
|
||||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
curl -fsS -X POST \
|
curl -fsS -X POST \
|
||||||
@@ -267,11 +371,12 @@ jobs:
|
|||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env || true
|
||||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
||||||
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip push"; exit 0; }
|
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip push"; exit 0; }
|
||||||
|
test -d .git || { echo "ℹ️ no git repo -> skip push"; exit 0; }
|
||||||
|
|
||||||
AUTH_URL="$(node --input-type=module -e '
|
AUTH_URL="$(node --input-type=module -e '
|
||||||
const [clone, tok] = process.argv.slice(1);
|
const [clone, tok] = process.argv.slice(1);
|
||||||
@@ -290,7 +395,7 @@ jobs:
|
|||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env || true
|
||||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip PR"; exit 0; }
|
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip PR"; exit 0; }
|
||||||
@@ -333,6 +438,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env || true
|
source /tmp/anno.env || true
|
||||||
|
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
RC="${APPLY_RC:-0}"
|
RC="${APPLY_RC:-0}"
|
||||||
|
|||||||
@@ -1,8 +1,13 @@
|
|||||||
name: Anno Reject
|
name: Anno Reject (close issue)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
issues:
|
issues:
|
||||||
types: [labeled]
|
types: [labeled]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
issue:
|
||||||
|
description: "Issue number to reject/close"
|
||||||
|
required: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||||
@@ -11,14 +16,26 @@ defaults:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: anno-reject-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
reject:
|
reject:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Derive context
|
- name: Tools sanity
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
node --version
|
||||||
|
|
||||||
|
- name: Derive context (event.json / workflow_dispatch)
|
||||||
|
env:
|
||||||
|
INPUT_ISSUE: ${{ inputs.issue }}
|
||||||
|
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||||
@@ -29,58 +46,122 @@ jobs:
|
|||||||
|
|
||||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||||
const repoObj = ev?.repository || {};
|
const repoObj = ev?.repository || {};
|
||||||
|
|
||||||
const cloneUrl =
|
const cloneUrl =
|
||||||
repoObj?.clone_url ||
|
repoObj?.clone_url ||
|
||||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||||
if (!cloneUrl) throw new Error("No repository url");
|
|
||||||
|
|
||||||
let owner =
|
let owner =
|
||||||
repoObj?.owner?.login ||
|
repoObj?.owner?.login ||
|
||||||
repoObj?.owner?.username ||
|
repoObj?.owner?.username ||
|
||||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||||
|
|
||||||
let repo =
|
let repo =
|
||||||
repoObj?.name ||
|
repoObj?.name ||
|
||||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||||
|
|
||||||
if (!owner || !repo) {
|
if ((!owner || !repo) && cloneUrl) {
|
||||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||||
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
||||||
}
|
}
|
||||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||||
|
|
||||||
const issueNumber = ev?.issue?.number || ev?.issue?.index;
|
const issueNumber =
|
||||||
if (!issueNumber) throw new Error("No issue number");
|
ev?.issue?.number ||
|
||||||
|
ev?.issue?.index ||
|
||||||
|
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
||||||
|
|
||||||
const labelName = ev?.label?.name || ev?.label || "";
|
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
||||||
const u = new URL(cloneUrl);
|
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||||
|
}
|
||||||
|
|
||||||
|
// label name: best-effort (non-bloquant)
|
||||||
|
let labelName = "workflow_dispatch";
|
||||||
|
const lab = ev?.label;
|
||||||
|
if (typeof lab === "string") labelName = lab;
|
||||||
|
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
|
||||||
|
|
||||||
|
let apiBase = "";
|
||||||
|
if (process.env.FORGE_API && String(process.env.FORGE_API).trim()) {
|
||||||
|
apiBase = String(process.env.FORGE_API).trim().replace(/\/+$/,"");
|
||||||
|
} else if (cloneUrl) {
|
||||||
|
apiBase = new URL(cloneUrl).origin;
|
||||||
|
} else {
|
||||||
|
apiBase = "";
|
||||||
|
}
|
||||||
|
|
||||||
function sh(s){ return JSON.stringify(String(s)); }
|
function sh(s){ return JSON.stringify(String(s)); }
|
||||||
|
|
||||||
process.stdout.write([
|
process.stdout.write([
|
||||||
`OWNER=${sh(owner)}`,
|
`OWNER=${sh(owner)}`,
|
||||||
`REPO=${sh(repo)}`,
|
`REPO=${sh(repo)}`,
|
||||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||||
`LABEL_NAME=${sh(labelName)}`,
|
`LABEL_NAME=${sh(labelName)}`,
|
||||||
`API_BASE=${sh(u.origin)}`
|
`API_BASE=${sh(apiBase)}`
|
||||||
].join("\n") + "\n");
|
].join("\n") + "\n");
|
||||||
NODE
|
NODE
|
||||||
|
|
||||||
- name: Gate on label state/rejected
|
echo "✅ context:"
|
||||||
|
sed -n '1,120p' /tmp/reject.env
|
||||||
|
|
||||||
|
- name: Early gate (fast-skip, tolerant)
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/reject.env
|
source /tmp/reject.env
|
||||||
if [[ "$LABEL_NAME" != "state/rejected" ]]; then
|
echo "ℹ️ event label = $LABEL_NAME"
|
||||||
echo "ℹ️ label=$LABEL_NAME => skip"
|
|
||||||
|
if [[ "$LABEL_NAME" != "state/rejected" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
|
||||||
|
echo "ℹ️ label=$LABEL_NAME => skip early"
|
||||||
|
echo "SKIP=1" >> /tmp/reject.env
|
||||||
|
echo "SKIP_REASON=\"label_not_rejected_event\"" >> /tmp/reject.env
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
echo "✅ reject issue=$ISSUE_NUMBER"
|
|
||||||
|
|
||||||
- name: Comment + close issue
|
- name: Comment + close (only if label state/rejected is PRESENT now, and no conflict)
|
||||||
env:
|
env:
|
||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/reject.env
|
source /tmp/reject.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||||
|
test -n "${API_BASE:-}" || { echo "❌ Missing API_BASE"; exit 1; }
|
||||||
|
|
||||||
|
curl -fsS \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Accept: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||||
|
-o /tmp/reject.issue.json
|
||||||
|
|
||||||
|
node --input-type=module - <<'NODE' > /tmp/reject.flags
|
||||||
|
import fs from "node:fs";
|
||||||
|
const issue = JSON.parse(fs.readFileSync("/tmp/reject.issue.json","utf8"));
|
||||||
|
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
|
||||||
|
const hasApproved = labels.includes("state/approved");
|
||||||
|
const hasRejected = labels.includes("state/rejected");
|
||||||
|
process.stdout.write(`HAS_APPROVED=${hasApproved ? "1":"0"}\nHAS_REJECTED=${hasRejected ? "1":"0"}\n`);
|
||||||
|
NODE
|
||||||
|
|
||||||
|
source /tmp/reject.flags
|
||||||
|
|
||||||
|
# Do nothing unless state/rejected is truly present now (anti payload weird)
|
||||||
|
if [[ "${HAS_REJECTED:-0}" != "1" ]]; then
|
||||||
|
echo "ℹ️ state/rejected not present -> skip"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "${HAS_APPROVED:-0}" == "1" && "${HAS_REJECTED:-0}" == "1" ]]; then
|
||||||
|
MSG="⚠️ Conflit d'état sur le ticket #${ISSUE_NUMBER} : labels **state/approved** et **state/rejected** présents.\n\n➡️ Action manuelle requise : retirer l'un des deux labels avant relance."
|
||||||
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
curl -fsS -X POST \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
|
--data-binary "$PAYLOAD"
|
||||||
|
echo "ℹ️ conflict => stop"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
|
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
|
||||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
@@ -96,3 +177,5 @@ jobs:
|
|||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||||
--data-binary '{"state":"closed"}'
|
--data-binary '{"state":"closed"}'
|
||||||
|
|
||||||
|
echo "✅ rejected+closed"
|
||||||
@@ -4,22 +4,37 @@ on:
|
|||||||
issues:
|
issues:
|
||||||
types: [opened, edited]
|
types: [opened, edited]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: auto-label-${{ github.event.issue.number || github.event.issue.index || 'manual' }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
label:
|
label:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
|
container:
|
||||||
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Apply labels from Type/State/Category
|
- name: Apply labels from Type/State/Category
|
||||||
env:
|
env:
|
||||||
FORGE_BASE: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
# IMPORTANT: préfère FORGE_BASE (LAN) si défini, sinon FORGE_API
|
||||||
|
FORGE_BASE: ${{ vars.FORGE_BASE || vars.FORGE_API || vars.FORGE_API_BASE }}
|
||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
REPO_FULL: ${{ gitea.repository }}
|
REPO_FULL: ${{ gitea.repository }}
|
||||||
EVENT_PATH: ${{ github.event_path }}
|
EVENT_PATH: ${{ github.event_path }}
|
||||||
|
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||||
run: |
|
run: |
|
||||||
python3 - <<'PY'
|
python3 - <<'PY'
|
||||||
import json, os, re, urllib.request, urllib.error
|
import json, os, re, time, urllib.request, urllib.error, socket
|
||||||
|
|
||||||
|
forge = (os.environ.get("FORGE_BASE") or "").rstrip("/")
|
||||||
|
if not forge:
|
||||||
|
raise SystemExit("Missing FORGE_BASE/FORGE_API repo variable (e.g. http://192.168.1.20:3000)")
|
||||||
|
|
||||||
|
token = os.environ.get("FORGE_TOKEN") or ""
|
||||||
|
if not token:
|
||||||
|
raise SystemExit("Missing secret FORGE_TOKEN")
|
||||||
|
|
||||||
forge = os.environ["FORGE_BASE"].rstrip("/")
|
|
||||||
token = os.environ["FORGE_TOKEN"]
|
|
||||||
owner, repo = os.environ["REPO_FULL"].split("/", 1)
|
owner, repo = os.environ["REPO_FULL"].split("/", 1)
|
||||||
event_path = os.environ["EVENT_PATH"]
|
event_path = os.environ["EVENT_PATH"]
|
||||||
|
|
||||||
@@ -46,12 +61,9 @@ jobs:
|
|||||||
print("PARSED:", {"Type": t, "State": s, "Category": c})
|
print("PARSED:", {"Type": t, "State": s, "Category": c})
|
||||||
|
|
||||||
# 1) explicite depuis le body
|
# 1) explicite depuis le body
|
||||||
if t:
|
if t: desired.add(t)
|
||||||
desired.add(t)
|
if s: desired.add(s)
|
||||||
if s:
|
if c: desired.add(c)
|
||||||
desired.add(s)
|
|
||||||
if c:
|
|
||||||
desired.add(c)
|
|
||||||
|
|
||||||
# 2) fallback depuis le titre si Type absent
|
# 2) fallback depuis le titre si Type absent
|
||||||
if not t:
|
if not t:
|
||||||
@@ -76,42 +88,56 @@ jobs:
|
|||||||
"Authorization": f"token {token}",
|
"Authorization": f"token {token}",
|
||||||
"Accept": "application/json",
|
"Accept": "application/json",
|
||||||
"Content-Type": "application/json",
|
"Content-Type": "application/json",
|
||||||
"User-Agent": "archicratie-auto-label/1.0",
|
"User-Agent": "archicratie-auto-label/1.1",
|
||||||
}
|
}
|
||||||
|
|
||||||
def jreq(method, url, payload=None):
|
def jreq(method, url, payload=None, timeout=60, retries=4, backoff=2.0):
|
||||||
data = None if payload is None else json.dumps(payload).encode("utf-8")
|
data = None if payload is None else json.dumps(payload).encode("utf-8")
|
||||||
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
last_err = None
|
||||||
try:
|
for i in range(retries):
|
||||||
with urllib.request.urlopen(req, timeout=20) as r:
|
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
||||||
b = r.read()
|
try:
|
||||||
return json.loads(b.decode("utf-8")) if b else None
|
with urllib.request.urlopen(req, timeout=timeout) as r:
|
||||||
except urllib.error.HTTPError as e:
|
b = r.read()
|
||||||
b = e.read().decode("utf-8", errors="replace")
|
return json.loads(b.decode("utf-8")) if b else None
|
||||||
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
|
except urllib.error.HTTPError as e:
|
||||||
|
b = e.read().decode("utf-8", errors="replace")
|
||||||
|
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
|
||||||
|
except (TimeoutError, socket.timeout, urllib.error.URLError) as e:
|
||||||
|
last_err = e
|
||||||
|
# retry only on network/timeout
|
||||||
|
time.sleep(backoff * (i + 1))
|
||||||
|
raise RuntimeError(f"Network/timeout after retries: {method} {url}\n{last_err}")
|
||||||
|
|
||||||
# labels repo
|
# labels repo
|
||||||
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000") or []
|
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000", timeout=60) or []
|
||||||
name_to_id = {x.get("name"): x.get("id") for x in labels}
|
name_to_id = {x.get("name"): x.get("id") for x in labels}
|
||||||
|
|
||||||
missing = [x for x in desired if x not in name_to_id]
|
missing = [x for x in desired if x not in name_to_id]
|
||||||
if missing:
|
if missing:
|
||||||
raise SystemExit("Missing labels in repo: " + ", ".join(sorted(missing)))
|
raise SystemExit("Missing labels in repo: " + ", ".join(sorted(missing)))
|
||||||
|
|
||||||
wanted_ids = [name_to_id[x] for x in desired]
|
wanted_ids = sorted({int(name_to_id[x]) for x in desired})
|
||||||
|
|
||||||
# labels actuels de l'issue
|
# labels actuels de l'issue
|
||||||
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels") or []
|
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
|
||||||
current_ids = {x.get("id") for x in current if x.get("id") is not None}
|
current_ids = {int(x.get("id")) for x in current if x.get("id") is not None}
|
||||||
|
|
||||||
final_ids = sorted(current_ids.union(wanted_ids))
|
final_ids = sorted(current_ids.union(wanted_ids))
|
||||||
|
|
||||||
# set labels = union (n'enlève rien)
|
# Replace labels = union (n'enlève rien)
|
||||||
url = f"{api}/repos/{owner}/{repo}/issues/{number}/labels"
|
url = f"{api}/repos/{owner}/{repo}/issues/{number}/labels"
|
||||||
try:
|
|
||||||
jreq("PUT", url, {"labels": final_ids})
|
# IMPORTANT: on n'envoie JAMAIS une liste brute ici (ça a causé le 422)
|
||||||
except Exception:
|
jreq("PUT", url, {"labels": final_ids}, timeout=90, retries=4)
|
||||||
jreq("PUT", url, final_ids)
|
|
||||||
|
# vérif post-apply (anti "timeout mais appliqué")
|
||||||
|
post = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
|
||||||
|
post_ids = {int(x.get("id")) for x in post if x.get("id") is not None}
|
||||||
|
|
||||||
|
missing_ids = [i for i in wanted_ids if i not in post_ids]
|
||||||
|
if missing_ids:
|
||||||
|
raise RuntimeError(f"Labels not applied after PUT (missing ids): {missing_ids}")
|
||||||
|
|
||||||
print(f"OK labels #{number}: {sorted(desired)}")
|
print(f"OK labels #{number}: {sorted(desired)}")
|
||||||
PY
|
PY
|
||||||
@@ -3,7 +3,7 @@ name: CI
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [master]
|
branches: [main]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@@ -15,7 +15,7 @@ defaults:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-anchors:
|
build-and-anchors:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
force:
|
force:
|
||||||
description: "Force deploy even if gate would skip (1=yes, 0=no)"
|
description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)"
|
||||||
required: false
|
required: false
|
||||||
default: "0"
|
default: "0"
|
||||||
|
|
||||||
@@ -14,6 +14,7 @@ env:
|
|||||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||||
DOCKER_API_VERSION: "1.43"
|
DOCKER_API_VERSION: "1.43"
|
||||||
COMPOSE_VERSION: "2.29.7"
|
COMPOSE_VERSION: "2.29.7"
|
||||||
|
ASTRO_TELEMETRY_DISABLED: "1"
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -25,9 +26,9 @@ concurrency:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
deploy:
|
deploy:
|
||||||
runs-on: ubuntu-latest
|
runs-on: nas-deploy
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
image: localhost:5000/archicratie/nas-deploy-node22@sha256:fefa8bb307005cebec07796661ab25528dc319c33a8f1e480e1d66f90cd5cff6
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Tools sanity
|
- name: Tools sanity
|
||||||
@@ -37,135 +38,188 @@ jobs:
|
|||||||
node --version
|
node --version
|
||||||
npm --version
|
npm --version
|
||||||
|
|
||||||
- name: Checkout (from event.json, no external actions)
|
- name: Checkout (push or workflow_dispatch, no external actions)
|
||||||
|
env:
|
||||||
|
EVENT_JSON: /var/run/act/workflow/event.json
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
EVENT_JSON="/var/run/act/workflow/event.json"
|
|
||||||
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||||
|
|
||||||
# Node prints REPO_URL, DEFAULT_BRANCH, REF, SHA_CAND (may be empty in workflow_dispatch)
|
node --input-type=module <<'NODE'
|
||||||
OUT="$(node --input-type=module -e '
|
import fs from "node:fs";
|
||||||
import fs from "node:fs";
|
|
||||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON,"utf8"));
|
|
||||||
|
|
||||||
const repoObj = ev?.repository || {};
|
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||||
const repo =
|
const repoObj = ev?.repository || {};
|
||||||
repoObj?.clone_url ||
|
|
||||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
|
||||||
|
|
||||||
if (!repo) throw new Error("No repository url in event.json");
|
const cloneUrl =
|
||||||
|
repoObj?.clone_url ||
|
||||||
|
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||||
|
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||||
|
|
||||||
const defaultBranch = repoObj?.default_branch || "main";
|
const defaultBranch = repoObj?.default_branch || "main";
|
||||||
|
|
||||||
const ref =
|
// Push-range (most reliable for change detection)
|
||||||
ev?.ref || `refs/heads/${defaultBranch}`;
|
const before = String(ev?.before || "").trim();
|
||||||
|
const after =
|
||||||
|
(process.env.GITHUB_SHA && String(process.env.GITHUB_SHA).trim()) ||
|
||||||
|
String(ev?.after || ev?.sha || ev?.head_commit?.id || ev?.pull_request?.head?.sha || "").trim();
|
||||||
|
|
||||||
const sha =
|
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
|
||||||
ev?.after ||
|
|
||||||
ev?.pull_request?.head?.sha ||
|
|
||||||
ev?.head_commit?.id ||
|
|
||||||
ev?.sha ||
|
|
||||||
"";
|
|
||||||
|
|
||||||
process.stdout.write(
|
fs.writeFileSync("/tmp/deploy.env", [
|
||||||
`REPO_URL=${JSON.stringify(repo)}\n` +
|
`REPO_URL=${shq(cloneUrl)}`,
|
||||||
`DEFAULT_BRANCH=${JSON.stringify(defaultBranch)}\n` +
|
`DEFAULT_BRANCH=${shq(defaultBranch)}`,
|
||||||
`REF=${JSON.stringify(ref)}\n` +
|
`BEFORE=${shq(before)}`,
|
||||||
`SHA_CAND=${JSON.stringify(sha)}\n`
|
`AFTER=${shq(after)}`
|
||||||
);
|
].join("\n") + "\n");
|
||||||
' EVENT_JSON="$EVENT_JSON")" || { echo "❌ Cannot parse event.json"; exit 1; }"
|
NODE
|
||||||
|
|
||||||
eval "$OUT"
|
source /tmp/deploy.env
|
||||||
|
echo "Repo URL: $REPO_URL"
|
||||||
echo "Repo URL: $REPO_URL"
|
echo "Default branch: $DEFAULT_BRANCH"
|
||||||
echo "Default branch: $DEFAULT_BRANCH"
|
echo "BEFORE: ${BEFORE:-<empty>}"
|
||||||
echo "Ref: $REF"
|
echo "AFTER: ${AFTER:-<empty>}"
|
||||||
echo "SHA candidate: ${SHA_CAND:-<empty>}"
|
|
||||||
|
|
||||||
rm -rf .git
|
rm -rf .git
|
||||||
git init -q
|
git init -q
|
||||||
git remote add origin "$REPO_URL"
|
git remote add origin "$REPO_URL"
|
||||||
|
|
||||||
if [[ -n "${SHA_CAND:-}" ]]; then
|
# Checkout AFTER (or default branch if missing)
|
||||||
echo "Checkout by SHA: $SHA_CAND"
|
if [[ -n "${AFTER:-}" ]]; then
|
||||||
git fetch --depth 1 origin "$SHA_CAND"
|
git fetch --depth 50 origin "$AFTER"
|
||||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||||
else
|
else
|
||||||
# workflow_dispatch often has no SHA; fetch by ref/branch
|
git fetch --depth 50 origin "$DEFAULT_BRANCH"
|
||||||
REF_TO_FETCH="$REF"
|
git -c advice.detachedHead=false checkout -q "origin/$DEFAULT_BRANCH"
|
||||||
if [[ "$REF_TO_FETCH" == refs/heads/* ]]; then
|
AFTER="$(git rev-parse HEAD)"
|
||||||
REF_TO_FETCH="${REF_TO_FETCH#refs/heads/}"
|
echo "AFTER='$AFTER'" >> /tmp/deploy.env
|
||||||
fi
|
echo "Resolved AFTER: $AFTER"
|
||||||
echo "Checkout by ref: $REF_TO_FETCH"
|
|
||||||
git fetch --depth 1 origin "$REF_TO_FETCH"
|
|
||||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
|
||||||
fi
|
fi
|
||||||
|
|
||||||
SHA="$(git rev-parse HEAD)"
|
|
||||||
git log -1 --oneline
|
git log -1 --oneline
|
||||||
echo "SHA=$SHA" >> /tmp/deploy.env
|
|
||||||
echo "REPO_URL=$REPO_URL" >> /tmp/deploy.env
|
|
||||||
echo "DEFAULT_BRANCH=$DEFAULT_BRANCH" >> /tmp/deploy.env
|
|
||||||
|
|
||||||
- name: Gate — auto deploy only on annotations/media changes
|
- name: Gate — decide SKIP vs HOTPATCH vs FULL rebuild
|
||||||
env:
|
env:
|
||||||
INPUT_FORCE: ${{ inputs.force }}
|
INPUT_FORCE: ${{ inputs.force }}
|
||||||
|
EVENT_JSON: /var/run/act/workflow/event.json
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
|
|
||||||
FORCE="${INPUT_FORCE:-0}"
|
FORCE="${INPUT_FORCE:-0}"
|
||||||
if [[ "$FORCE" == "1" ]]; then
|
|
||||||
echo "✅ force=1 -> bypass gate -> deploy allowed"
|
# Lire before/after du push depuis event.json (merge-proof)
|
||||||
echo "GO=1" >> /tmp/deploy.env
|
node --input-type=module <<'NODE'
|
||||||
exit 0
|
import fs from "node:fs";
|
||||||
|
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||||
|
const before = ev?.before || "";
|
||||||
|
const after = ev?.after || ev?.sha || "";
|
||||||
|
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
|
||||||
|
fs.writeFileSync("/tmp/gate.env", [
|
||||||
|
`EV_BEFORE=${shq(before)}`,
|
||||||
|
`EV_AFTER=${shq(after)}`
|
||||||
|
].join("\n") + "\n");
|
||||||
|
NODE
|
||||||
|
|
||||||
|
source /tmp/gate.env
|
||||||
|
|
||||||
|
BEFORE="${EV_BEFORE:-}"
|
||||||
|
AFTER="${EV_AFTER:-}"
|
||||||
|
if [[ -z "${AFTER:-}" ]]; then
|
||||||
|
AFTER="${SHA:-}"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Robust changed-files list (merge commits included)
|
echo "Gate ctx: BEFORE=${BEFORE:-<empty>} AFTER=${AFTER:-<empty>} FORCE=${FORCE}"
|
||||||
# Prefer diff vs first parent; fallback to git show.
|
|
||||||
if git rev-parse "${SHA}^" >/dev/null 2>&1; then
|
# Produire une liste CHANGED fiable :
|
||||||
CHANGED="$(git diff --name-only "${SHA}^" "$SHA" || true)"
|
# - si BEFORE/AFTER valides -> git diff before..after
|
||||||
|
# - sinon fallback -> diff parent1..after ou show after
|
||||||
|
CHANGED=""
|
||||||
|
Z40="0000000000000000000000000000000000000000"
|
||||||
|
|
||||||
|
if [[ -n "${BEFORE:-}" && "${BEFORE}" != "${Z40}" ]] \
|
||||||
|
&& git cat-file -e "${BEFORE}^{commit}" 2>/dev/null \
|
||||||
|
&& git cat-file -e "${AFTER}^{commit}" 2>/dev/null; then
|
||||||
|
CHANGED="$(git diff --name-only "${BEFORE}" "${AFTER}" || true)"
|
||||||
else
|
else
|
||||||
CHANGED=""
|
P1="$(git rev-parse "${AFTER}^" 2>/dev/null || true)"
|
||||||
fi
|
if [[ -n "${P1:-}" ]] && git cat-file -e "${P1}^{commit}" 2>/dev/null; then
|
||||||
if [[ -z "$CHANGED" ]]; then
|
CHANGED="$(git diff --name-only "${P1}" "${AFTER}" || true)"
|
||||||
CHANGED="$(git show --name-only --pretty="" -m "$SHA" | sed '/^$/d' || true)"
|
else
|
||||||
|
CHANGED="$(git show --name-only --pretty="" "${AFTER}" | sed '/^$/d' || true)"
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
echo "== changed files =="
|
printf "%s\n" "${CHANGED}" > /tmp/changed.txt
|
||||||
echo "$CHANGED" | sed -n '1,200p'
|
|
||||||
|
|
||||||
if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then
|
echo "== changed files (first 200) =="
|
||||||
echo "GO=1" >> /tmp/deploy.env
|
sed -n '1,200p' /tmp/changed.txt || true
|
||||||
echo "✅ deploy allowed (annotations/media change detected)"
|
|
||||||
|
# Flags
|
||||||
|
HAS_FULL=0
|
||||||
|
HAS_HOTPATCH=0
|
||||||
|
|
||||||
|
# FULL si build-impacting (ce que tu veux : content/anchors/pages/scripts)
|
||||||
|
if grep -qE '^(src/content/|src/anchors/|src/pages/|scripts/)' /tmp/changed.txt; then
|
||||||
|
HAS_FULL=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# HOTPATCH si annotations/media touchés
|
||||||
|
if grep -qE '^(src/annotations/|public/media/)' /tmp/changed.txt; then
|
||||||
|
HAS_HOTPATCH=1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Gate flags: HAS_FULL=${HAS_FULL} HAS_HOTPATCH=${HAS_HOTPATCH}"
|
||||||
|
|
||||||
|
# Décision
|
||||||
|
if [[ "${FORCE}" == "1" ]]; then
|
||||||
|
GO=1
|
||||||
|
MODE="full"
|
||||||
|
echo "✅ force=1 -> MODE=full (rebuild+restart)"
|
||||||
|
elif [[ "${HAS_FULL}" == "1" ]]; then
|
||||||
|
GO=1
|
||||||
|
MODE="full"
|
||||||
|
echo "✅ build-impacting change -> MODE=full (rebuild+restart)"
|
||||||
|
elif [[ "${HAS_HOTPATCH}" == "1" ]]; then
|
||||||
|
GO=1
|
||||||
|
MODE="hotpatch"
|
||||||
|
echo "✅ annotations/media change -> MODE=hotpatch"
|
||||||
else
|
else
|
||||||
echo "GO=0" >> /tmp/deploy.env
|
GO=0
|
||||||
echo "ℹ️ no annotations/media change -> skip deploy"
|
MODE="skip"
|
||||||
|
echo "ℹ️ no relevant change -> skip deploy"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
- name: Install docker client + docker compose plugin (v2)
|
echo "GO=${GO}" >> /tmp/deploy.env
|
||||||
|
echo "MODE='${MODE}'" >> /tmp/deploy.env
|
||||||
|
|
||||||
|
- name: Toolchain sanity + resolve COMPOSE_PROJECT_NAME
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
# Must have docker socket mounted by runner
|
# tools are prebaked in the image
|
||||||
test -S /var/run/docker.sock || { echo "❌ /var/run/docker.sock missing in job container"; exit 10; }
|
git --version
|
||||||
|
|
||||||
apt-get -o Acquire::Retries=5 -o Acquire::ForceIPv4=true update
|
|
||||||
apt-get install -y --no-install-recommends ca-certificates curl docker.io
|
|
||||||
rm -rf /var/lib/apt/lists/*
|
|
||||||
|
|
||||||
mkdir -p /usr/local/lib/docker/cli-plugins
|
|
||||||
curl -fsSL \
|
|
||||||
"https://github.com/docker/compose/releases/download/v${COMPOSE_VERSION}/docker-compose-linux-x86_64" \
|
|
||||||
-o /usr/local/lib/docker/cli-plugins/docker-compose
|
|
||||||
chmod +x /usr/local/lib/docker/cli-plugins/docker-compose
|
|
||||||
|
|
||||||
docker version
|
docker version
|
||||||
docker compose version
|
docker compose version
|
||||||
|
python3 -c 'import yaml; print("PyYAML OK")'
|
||||||
|
|
||||||
- name: Assert required vars (PUBLIC_GITEA_*)
|
# Reuse existing compose project name if containers already exist
|
||||||
|
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||||
|
if [[ -z "${PROJ:-}" ]]; then
|
||||||
|
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||||
|
fi
|
||||||
|
if [[ -z "${PROJ:-}" ]]; then PROJ="archicratie-web"; fi
|
||||||
|
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
|
||||||
|
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
|
||||||
|
|
||||||
|
# Assert target containers exist (hotpatch needs them)
|
||||||
|
for c in archicratie-web-blue archicratie-web-green; do
|
||||||
|
docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; }
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full
|
||||||
env:
|
env:
|
||||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
@@ -174,24 +228,26 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> vars not required"; exit 0; }
|
||||||
|
|
||||||
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
||||||
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
||||||
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
||||||
echo "✅ vars OK"
|
echo "✅ vars OK"
|
||||||
|
|
||||||
- name: Assert deploy files exist
|
- name: Assert deploy files exist — only needed for MODE=full
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> files not required"; exit 0; }
|
||||||
|
|
||||||
test -f docker-compose.yml
|
test -f docker-compose.yml
|
||||||
test -f Dockerfile
|
test -f Dockerfile
|
||||||
test -f nginx.conf
|
test -f nginx.conf
|
||||||
echo "✅ deploy files OK"
|
echo "✅ deploy files OK"
|
||||||
|
|
||||||
- name: Build + deploy staging (blue) then smoke
|
- name: FULL — Build + deploy staging (blue) then warmup+smoke
|
||||||
env:
|
env:
|
||||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
@@ -200,27 +256,51 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||||
|
|
||||||
|
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||||
|
|
||||||
|
wait_url() {
|
||||||
|
local url="$1"
|
||||||
|
local label="$2"
|
||||||
|
local tries="${3:-60}"
|
||||||
|
for i in $(seq 1 "$tries"); do
|
||||||
|
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||||
|
echo "✅ $label OK ($url)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
echo "… warmup $label ($i/$tries)"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "❌ timeout $label ($url)"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||||
echo "TS=$TS" >> /tmp/deploy.env
|
echo "TS='$TS'" >> /tmp/deploy.env
|
||||||
|
|
||||||
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
||||||
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
||||||
|
|
||||||
docker compose build --no-cache web_blue
|
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
||||||
docker compose up -d --force-recreate web_blue
|
docker rm -f archicratie-web-blue || true
|
||||||
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
||||||
|
|
||||||
curl -fsS "http://127.0.0.1:8081/para-index.json" >/dev/null
|
# warmup endpoints
|
||||||
curl -fsS "http://127.0.0.1:8081/annotations-index.json" >/dev/null
|
wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index"
|
||||||
curl -fsS "http://127.0.0.1:8081/pagefind/pagefind.js" >/dev/null
|
wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index"
|
||||||
|
wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js"
|
||||||
|
|
||||||
CANON="$(curl -fsS "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||||
echo "canonical(blue)=$CANON"
|
echo "canonical(blue)=$CANON"
|
||||||
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || { echo "❌ staging canonical mismatch"; exit 3; }
|
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
||||||
|
echo "❌ staging canonical mismatch"
|
||||||
|
docker logs --tail 120 archicratie-web-blue || true
|
||||||
|
exit 3
|
||||||
|
}
|
||||||
|
|
||||||
echo "✅ staging OK"
|
echo "✅ staging OK"
|
||||||
|
|
||||||
- name: Build + deploy live (green) then smoke + rollback if needed
|
- name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed
|
||||||
env:
|
env:
|
||||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
@@ -229,31 +309,253 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||||
|
|
||||||
|
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||||
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
||||||
|
|
||||||
|
wait_url() {
|
||||||
|
local url="$1"
|
||||||
|
local label="$2"
|
||||||
|
local tries="${3:-60}"
|
||||||
|
for i in $(seq 1 "$tries"); do
|
||||||
|
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||||
|
echo "✅ $label OK ($url)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
echo "… warmup $label ($i/$tries)"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "❌ timeout $label ($url)"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
rollback() {
|
rollback() {
|
||||||
echo "⚠️ rollback green -> previous image tag (best effort)"
|
echo "⚠️ rollback green -> previous image tag (best effort)"
|
||||||
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
||||||
docker compose up -d --force-recreate web_green || true
|
docker rm -f archicratie-web-green || true
|
||||||
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
||||||
}
|
}
|
||||||
|
|
||||||
set +e
|
# build/restart green
|
||||||
docker compose build --no-cache web_green
|
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
|
||||||
BRC=$?
|
echo "❌ build green failed"; rollback; exit 4
|
||||||
[[ "$BRC" -eq 0 ]] || { echo "❌ build green failed"; rollback; exit 4; }
|
fi
|
||||||
|
|
||||||
docker compose up -d --force-recreate web_green
|
docker rm -f archicratie-web-green || true
|
||||||
URC=$?
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
||||||
[[ "$URC" -eq 0 ]] || { echo "❌ up green failed"; rollback; exit 4; }
|
|
||||||
|
|
||||||
curl -fsS "http://127.0.0.1:8082/para-index.json" >/dev/null || { rollback; exit 4; }
|
# warmup endpoints
|
||||||
curl -fsS "http://127.0.0.1:8082/annotations-index.json" >/dev/null || { rollback; exit 4; }
|
if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi
|
||||||
curl -fsS "http://127.0.0.1:8082/pagefind/pagefind.js" >/dev/null || { rollback; exit 4; }
|
if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi
|
||||||
|
if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi
|
||||||
|
|
||||||
CANON="$(curl -fsS "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||||
echo "canonical(green)=$CANON"
|
echo "canonical(green)=$CANON"
|
||||||
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || { echo "❌ live canonical mismatch"; rollback; exit 4; }
|
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
||||||
|
echo "❌ live canonical mismatch"
|
||||||
|
docker logs --tail 120 archicratie-web-green || true
|
||||||
|
rollback
|
||||||
|
exit 4
|
||||||
|
}
|
||||||
|
|
||||||
echo "✅ live OK"
|
echo "✅ live OK"
|
||||||
set -e
|
|
||||||
|
- name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/deploy.env
|
||||||
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
python3 - <<'PY'
|
||||||
|
import os, re, json, glob
|
||||||
|
import yaml
|
||||||
|
import datetime as dt
|
||||||
|
|
||||||
|
ROOT = os.getcwd()
|
||||||
|
ANNO_ROOT = os.path.join(ROOT, "src", "annotations")
|
||||||
|
|
||||||
|
def is_obj(x): return isinstance(x, dict)
|
||||||
|
def is_arr(x): return isinstance(x, list)
|
||||||
|
|
||||||
|
def iso_dt(x):
|
||||||
|
if isinstance(x, dt.datetime):
|
||||||
|
if x.tzinfo is None:
|
||||||
|
return x.isoformat()
|
||||||
|
return x.astimezone(dt.timezone.utc).isoformat().replace("+00:00","Z")
|
||||||
|
if isinstance(x, dt.date):
|
||||||
|
return x.isoformat()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def normalize(x):
|
||||||
|
s = iso_dt(x)
|
||||||
|
if s is not None: return s
|
||||||
|
if isinstance(x, dict):
|
||||||
|
return {str(k): normalize(v) for k, v in x.items()}
|
||||||
|
if isinstance(x, list):
|
||||||
|
return [normalize(v) for v in x]
|
||||||
|
return x
|
||||||
|
|
||||||
|
def key_media(it): return str((it or {}).get("src",""))
|
||||||
|
def key_ref(it):
|
||||||
|
it = it or {}
|
||||||
|
return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))])
|
||||||
|
def key_comment(it): return str((it or {}).get("text","")).strip()
|
||||||
|
|
||||||
|
def dedup_extend(dst_list, src_list, key_fn):
|
||||||
|
seen = set(); out = []
|
||||||
|
for x in (dst_list or []):
|
||||||
|
x = normalize(x); k = key_fn(x)
|
||||||
|
if k and k not in seen: seen.add(k); out.append(x)
|
||||||
|
for x in (src_list or []):
|
||||||
|
x = normalize(x); k = key_fn(x)
|
||||||
|
if k and k not in seen: seen.add(k); out.append(x)
|
||||||
|
return out
|
||||||
|
|
||||||
|
def deep_merge(dst, src):
|
||||||
|
src = normalize(src)
|
||||||
|
for k, v in (src or {}).items():
|
||||||
|
if k in ("media","refs","comments_editorial") and is_arr(v):
|
||||||
|
if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media)
|
||||||
|
elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
|
||||||
|
else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if is_obj(v):
|
||||||
|
if not is_obj(dst.get(k)): dst[k] = {}
|
||||||
|
deep_merge(dst[k], v)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if is_arr(v):
|
||||||
|
cur = dst.get(k, [])
|
||||||
|
if not is_arr(cur): cur = []
|
||||||
|
seen = set(); out = []
|
||||||
|
for x in cur:
|
||||||
|
x = normalize(x)
|
||||||
|
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||||
|
if s not in seen: seen.add(s); out.append(x)
|
||||||
|
for x in v:
|
||||||
|
x = normalize(x)
|
||||||
|
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||||
|
if s not in seen: seen.add(s); out.append(x)
|
||||||
|
dst[k] = out
|
||||||
|
continue
|
||||||
|
|
||||||
|
v = normalize(v)
|
||||||
|
if k not in dst or dst.get(k) in (None, ""):
|
||||||
|
dst[k] = v
|
||||||
|
|
||||||
|
def para_num(pid):
|
||||||
|
m = re.match(r"^p-(\d+)-", str(pid))
|
||||||
|
return int(m.group(1)) if m else 10**9
|
||||||
|
|
||||||
|
def sort_lists(entry):
|
||||||
|
for k in ("media","refs","comments_editorial"):
|
||||||
|
arr = entry.get(k)
|
||||||
|
if not is_arr(arr): continue
|
||||||
|
def ts(x):
|
||||||
|
x = normalize(x)
|
||||||
|
try:
|
||||||
|
s = str((x or {}).get("ts",""))
|
||||||
|
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
|
||||||
|
except Exception:
|
||||||
|
return 0
|
||||||
|
arr = [normalize(x) for x in arr]
|
||||||
|
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
||||||
|
entry[k] = arr
|
||||||
|
|
||||||
|
if not os.path.isdir(ANNO_ROOT):
|
||||||
|
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
|
||||||
|
|
||||||
|
pages = {}
|
||||||
|
errors = []
|
||||||
|
|
||||||
|
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
|
||||||
|
for fp in files:
|
||||||
|
try:
|
||||||
|
with open(fp, "r", encoding="utf-8") as f:
|
||||||
|
doc = yaml.safe_load(f) or {}
|
||||||
|
doc = normalize(doc)
|
||||||
|
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
||||||
|
continue
|
||||||
|
|
||||||
|
page = str(doc.get("page","")).strip().strip("/")
|
||||||
|
paras = doc.get("paras") or {}
|
||||||
|
if not page or not isinstance(paras, dict):
|
||||||
|
continue
|
||||||
|
|
||||||
|
pg = pages.setdefault(page, {"paras": {}})
|
||||||
|
for pid, entry in paras.items():
|
||||||
|
pid = str(pid)
|
||||||
|
if pid not in pg["paras"] or not isinstance(pg["paras"].get(pid), dict):
|
||||||
|
pg["paras"][pid] = {}
|
||||||
|
if isinstance(entry, dict):
|
||||||
|
deep_merge(pg["paras"][pid], entry)
|
||||||
|
sort_lists(pg["paras"][pid])
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
errors.append({"file": os.path.relpath(fp, ROOT), "error": str(e)})
|
||||||
|
|
||||||
|
for page, obj in pages.items():
|
||||||
|
keys = list((obj.get("paras") or {}).keys())
|
||||||
|
keys.sort(key=lambda k: (para_num(k), k))
|
||||||
|
obj["paras"] = {k: obj["paras"][k] for k in keys}
|
||||||
|
|
||||||
|
out = {
|
||||||
|
"schema": 1,
|
||||||
|
"generatedAt": dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc).isoformat().replace("+00:00","Z"),
|
||||||
|
"pages": pages,
|
||||||
|
"stats": {
|
||||||
|
"pages": len(pages),
|
||||||
|
"paras": sum(len(v.get("paras") or {}) for v in pages.values()),
|
||||||
|
"errors": len(errors),
|
||||||
|
},
|
||||||
|
"errors": errors,
|
||||||
|
}
|
||||||
|
|
||||||
|
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
||||||
|
json.dump(out, f, ensure_ascii=False)
|
||||||
|
|
||||||
|
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
|
||||||
|
PY
|
||||||
|
|
||||||
|
# patch JSON into running containers
|
||||||
|
for c in archicratie-web-blue archicratie-web-green; do
|
||||||
|
echo "== patch annotations-index.json into $c =="
|
||||||
|
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
|
||||||
|
done
|
||||||
|
|
||||||
|
# copy changed media files into containers (so new media appears without rebuild)
|
||||||
|
if [[ -s /tmp/changed.txt ]]; then
|
||||||
|
while IFS= read -r f; do
|
||||||
|
[[ -n "$f" ]] || continue
|
||||||
|
if [[ "$f" == public/media/* ]]; then
|
||||||
|
dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/...
|
||||||
|
for c in archicratie-web-blue archicratie-web-green; do
|
||||||
|
echo "== copy media into $c: $f -> $dest =="
|
||||||
|
docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\""
|
||||||
|
docker cp "$f" "$c:$dest"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
done < /tmp/changed.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
# smoke after patch
|
||||||
|
for p in 8081 8082; do
|
||||||
|
echo "== smoke annotations-index on $p =="
|
||||||
|
curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \
|
||||||
|
| python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))'
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "✅ hotpatch done"
|
||||||
|
|
||||||
|
- name: Debug on failure (containers status/logs)
|
||||||
|
if: ${{ failure() }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "== docker ps =="
|
||||||
|
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true
|
||||||
|
for c in archicratie-web-blue archicratie-web-green; do
|
||||||
|
echo "== logs $c (tail 200) =="
|
||||||
|
docker logs --tail 200 "$c" || true
|
||||||
|
done
|
||||||
395
.gitea/workflows/proposer-apply-pr.yml
Normal file
395
.gitea/workflows/proposer-apply-pr.yml
Normal file
@@ -0,0 +1,395 @@
|
|||||||
|
name: Proposer Apply (PR)
|
||||||
|
|
||||||
|
on:
|
||||||
|
issues:
|
||||||
|
types: [labeled]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
issue:
|
||||||
|
description: "Issue number to apply (Proposer: correction/fact-check)"
|
||||||
|
required: true
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: proposer-apply-${{ github.event.issue.number || inputs.issue || 'manual' }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
apply-proposer:
|
||||||
|
runs-on: mac-ci
|
||||||
|
container:
|
||||||
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Tools sanity
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
git --version
|
||||||
|
node --version
|
||||||
|
npm --version
|
||||||
|
|
||||||
|
- name: Derive context (event.json / workflow_dispatch)
|
||||||
|
env:
|
||||||
|
INPUT_ISSUE: ${{ inputs.issue }}
|
||||||
|
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||||
|
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||||
|
|
||||||
|
node --input-type=module - <<'NODE' > /tmp/proposer.env
|
||||||
|
import fs from "node:fs";
|
||||||
|
|
||||||
|
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||||
|
const repoObj = ev?.repository || {};
|
||||||
|
|
||||||
|
const cloneUrl =
|
||||||
|
repoObj?.clone_url ||
|
||||||
|
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||||
|
|
||||||
|
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||||
|
|
||||||
|
let owner =
|
||||||
|
repoObj?.owner?.login ||
|
||||||
|
repoObj?.owner?.username ||
|
||||||
|
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||||
|
|
||||||
|
let repo =
|
||||||
|
repoObj?.name ||
|
||||||
|
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||||
|
|
||||||
|
if (!owner || !repo) {
|
||||||
|
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||||
|
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
||||||
|
}
|
||||||
|
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||||
|
|
||||||
|
const defaultBranch = repoObj?.default_branch || "main";
|
||||||
|
|
||||||
|
const issueNumber =
|
||||||
|
ev?.issue?.number ||
|
||||||
|
ev?.issue?.index ||
|
||||||
|
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
||||||
|
|
||||||
|
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
||||||
|
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||||
|
}
|
||||||
|
|
||||||
|
const labelName =
|
||||||
|
ev?.label?.name ||
|
||||||
|
ev?.label ||
|
||||||
|
"workflow_dispatch";
|
||||||
|
|
||||||
|
const u = new URL(cloneUrl);
|
||||||
|
const origin = u.origin;
|
||||||
|
|
||||||
|
const apiBase = (process.env.FORGE_API && String(process.env.FORGE_API).trim())
|
||||||
|
? String(process.env.FORGE_API).trim().replace(/\/+$/,"")
|
||||||
|
: origin;
|
||||||
|
|
||||||
|
function sh(s){ return JSON.stringify(String(s)); }
|
||||||
|
process.stdout.write([
|
||||||
|
`CLONE_URL=${sh(cloneUrl)}`,
|
||||||
|
`OWNER=${sh(owner)}`,
|
||||||
|
`REPO=${sh(repo)}`,
|
||||||
|
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
|
||||||
|
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||||
|
`LABEL_NAME=${sh(labelName)}`,
|
||||||
|
`API_BASE=${sh(apiBase)}`
|
||||||
|
].join("\n") + "\n");
|
||||||
|
NODE
|
||||||
|
|
||||||
|
echo "✅ context:"
|
||||||
|
sed -n '1,120p' /tmp/proposer.env
|
||||||
|
|
||||||
|
- name: Gate on label state/approved
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env
|
||||||
|
|
||||||
|
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
||||||
|
echo "ℹ️ label=$LABEL_NAME => skip"
|
||||||
|
echo "SKIP=1" >> /tmp/proposer.env
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
||||||
|
|
||||||
|
- name: Fetch issue + API-hard gate on (state/approved present + proposer type)
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||||
|
|
||||||
|
curl -fsS \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Accept: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||||
|
-o /tmp/issue.json
|
||||||
|
|
||||||
|
node --input-type=module - <<'NODE' >> /tmp/proposer.env
|
||||||
|
import fs from "node:fs";
|
||||||
|
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json","utf8"));
|
||||||
|
const title = String(issue.title || "");
|
||||||
|
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||||
|
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name||"")).filter(Boolean) : [];
|
||||||
|
|
||||||
|
function pickLine(key) {
|
||||||
|
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||||
|
const m = body.match(re);
|
||||||
|
return m ? m[1].trim() : "";
|
||||||
|
}
|
||||||
|
|
||||||
|
const typeRaw = pickLine("Type");
|
||||||
|
const type = String(typeRaw || "").trim().toLowerCase();
|
||||||
|
|
||||||
|
const hasApproved = labels.includes("state/approved");
|
||||||
|
const proposer = new Set(["type/correction","type/fact-check"]);
|
||||||
|
|
||||||
|
const out = [];
|
||||||
|
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
|
||||||
|
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
|
||||||
|
out.push(`HAS_APPROVED=${hasApproved ? "1":"0"}`);
|
||||||
|
|
||||||
|
if (!hasApproved) {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("approved_not_present")}`);
|
||||||
|
} else if (!type) {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
|
||||||
|
} else if (!proposer.has(type)) {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("not_proposer:"+type)}`);
|
||||||
|
}
|
||||||
|
process.stdout.write(out.join("\n") + "\n");
|
||||||
|
NODE
|
||||||
|
|
||||||
|
echo "✅ proposer gating:"
|
||||||
|
grep -E '^(ISSUE_TYPE|HAS_APPROVED|SKIP|SKIP_REASON)=' /tmp/proposer.env || true
|
||||||
|
|
||||||
|
- name: Comment issue if skipped
|
||||||
|
if: ${{ always() }}
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env || true
|
||||||
|
|
||||||
|
[[ "${SKIP:-0}" == "1" ]] || exit 0
|
||||||
|
[[ "$LABEL_NAME" == "state/approved" || "$LABEL_NAME" == "workflow_dispatch" ]] || exit 0
|
||||||
|
|
||||||
|
REASON="${SKIP_REASON:-}"
|
||||||
|
TYPE="${ISSUE_TYPE:-}"
|
||||||
|
|
||||||
|
if [[ "$REASON" == "approved_not_present" ]]; then
|
||||||
|
MSG="ℹ️ Proposer Apply: skip — le label **state/approved** n'est pas présent sur le ticket au moment du run (gate API-hard)."
|
||||||
|
elif [[ "$REASON" == "missing_type" ]]; then
|
||||||
|
MSG="ℹ️ Proposer Apply: skip — champ **Type:** manquant/illisible. Attendu: type/correction ou type/fact-check."
|
||||||
|
else
|
||||||
|
MSG="ℹ️ Proposer Apply: skip — Type non-Proposer (${TYPE}). (Ce workflow ne traite que correction/fact-check.)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
curl -fsS -X POST \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
|
--data-binary "$PAYLOAD" || true
|
||||||
|
|
||||||
|
- name: Checkout default branch
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
rm -rf .git
|
||||||
|
git init -q
|
||||||
|
git remote add origin "$CLONE_URL"
|
||||||
|
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
||||||
|
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||||
|
git log -1 --oneline
|
||||||
|
echo "✅ workspace:"
|
||||||
|
ls -la | sed -n '1,120p'
|
||||||
|
|
||||||
|
- name: Detect app dir (repo-root vs ./site)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
APP_DIR="."
|
||||||
|
if [[ -d "site" && -f "site/package.json" ]]; then
|
||||||
|
APP_DIR="site"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "APP_DIR=$APP_DIR" >> /tmp/proposer.env
|
||||||
|
echo "✅ APP_DIR=$APP_DIR"
|
||||||
|
ls -la "$APP_DIR" | sed -n '1,120p'
|
||||||
|
test -f "$APP_DIR/package.json" || { echo "❌ package.json missing in APP_DIR=$APP_DIR"; exit 1; }
|
||||||
|
test -d "$APP_DIR/scripts" || { echo "❌ scripts/ missing in APP_DIR=$APP_DIR"; exit 1; }
|
||||||
|
|
||||||
|
- name: NPM harden (reduce flakiness)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
cd "$APP_DIR"
|
||||||
|
npm config set fetch-retries 5
|
||||||
|
npm config set fetch-retry-mintimeout 20000
|
||||||
|
npm config set fetch-retry-maxtimeout 120000
|
||||||
|
npm config set registry https://registry.npmjs.org
|
||||||
|
|
||||||
|
- name: Install deps (APP_DIR)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
cd "$APP_DIR"
|
||||||
|
npm ci --no-audit --no-fund
|
||||||
|
|
||||||
|
- name: Build dist baseline (APP_DIR)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
cd "$APP_DIR"
|
||||||
|
npm run build
|
||||||
|
|
||||||
|
- name: Apply ticket (alias + commit) on bot branch
|
||||||
|
continue-on-error: true
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
|
||||||
|
BOT_GIT_EMAIL: ${{ secrets.BOT_GIT_EMAIL }}
|
||||||
|
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
|
||||||
|
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
|
||||||
|
|
||||||
|
START_SHA="$(git rev-parse HEAD)"
|
||||||
|
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||||
|
BR="bot/proposer-${ISSUE_NUMBER}-${TS}"
|
||||||
|
echo "BRANCH=$BR" >> /tmp/proposer.env
|
||||||
|
git checkout -b "$BR"
|
||||||
|
|
||||||
|
export GITEA_OWNER="$OWNER"
|
||||||
|
export GITEA_REPO="$REPO"
|
||||||
|
export FORGE_BASE="$API_BASE"
|
||||||
|
|
||||||
|
LOG="/tmp/proposer-apply.log"
|
||||||
|
set +e
|
||||||
|
(cd "$APP_DIR" && node scripts/apply-ticket.mjs "$ISSUE_NUMBER" --alias --commit) >"$LOG" 2>&1
|
||||||
|
RC=$?
|
||||||
|
set -e
|
||||||
|
|
||||||
|
echo "APPLY_RC=$RC" >> /tmp/proposer.env
|
||||||
|
|
||||||
|
echo "== apply log (tail) =="
|
||||||
|
tail -n 200 "$LOG" || true
|
||||||
|
|
||||||
|
END_SHA="$(git rev-parse HEAD)"
|
||||||
|
if [[ "$RC" -ne 0 ]]; then
|
||||||
|
echo "NOOP=0" >> /tmp/proposer.env
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ "$START_SHA" == "$END_SHA" ]]; then
|
||||||
|
echo "NOOP=1" >> /tmp/proposer.env
|
||||||
|
else
|
||||||
|
echo "NOOP=0" >> /tmp/proposer.env
|
||||||
|
echo "END_SHA=$END_SHA" >> /tmp/proposer.env
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Push bot branch
|
||||||
|
if: ${{ always() }}
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env || true
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
|
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
||||||
|
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip push"; exit 0; }
|
||||||
|
[[ -n "${BRANCH:-}" ]] || { echo "ℹ️ BRANCH unset -> skip push"; exit 0; }
|
||||||
|
|
||||||
|
AUTH_URL="$(node --input-type=module -e '
|
||||||
|
const [clone, tok] = process.argv.slice(1);
|
||||||
|
const u = new URL(clone);
|
||||||
|
u.username = "oauth2";
|
||||||
|
u.password = tok;
|
||||||
|
console.log(u.toString());
|
||||||
|
' "$CLONE_URL" "$FORGE_TOKEN")"
|
||||||
|
|
||||||
|
git remote set-url origin "$AUTH_URL"
|
||||||
|
git push -u origin "$BRANCH"
|
||||||
|
|
||||||
|
- name: Create PR + comment issue
|
||||||
|
if: ${{ always() }}
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env || true
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
|
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||||
|
[[ "${NOOP:-0}" == "0" ]] || exit 0
|
||||||
|
[[ -n "${BRANCH:-}" ]] || { echo "ℹ️ BRANCH unset -> skip PR"; exit 0; }
|
||||||
|
|
||||||
|
PR_TITLE="proposer: apply ticket #${ISSUE_NUMBER}"
|
||||||
|
PR_BODY="PR auto depuis ticket #${ISSUE_NUMBER} (state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA:-unknown}\n\nMerge si CI OK."
|
||||||
|
|
||||||
|
PR_PAYLOAD="$(node --input-type=module -e '
|
||||||
|
const [title, body, base, head] = process.argv.slice(1);
|
||||||
|
console.log(JSON.stringify({ title, body, base, head, allow_maintainer_edit: true }));
|
||||||
|
' "$PR_TITLE" "$PR_BODY" "$DEFAULT_BRANCH" "${OWNER}:${BRANCH}")"
|
||||||
|
|
||||||
|
PR_JSON="$(curl -fsS -X POST \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls" \
|
||||||
|
--data-binary "$PR_PAYLOAD")"
|
||||||
|
|
||||||
|
PR_URL="$(node --input-type=module -e '
|
||||||
|
const pr = JSON.parse(process.argv[1] || "{}");
|
||||||
|
console.log(pr.html_url || pr.url || "");
|
||||||
|
' "$PR_JSON")"
|
||||||
|
|
||||||
|
test -n "$PR_URL" || { echo "❌ PR URL missing. Raw: $PR_JSON"; exit 1; }
|
||||||
|
|
||||||
|
MSG="✅ PR Proposer créée pour ticket #${ISSUE_NUMBER} : ${PR_URL}"
|
||||||
|
C_PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
|
curl -fsS -X POST \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
|
--data-binary "$C_PAYLOAD"
|
||||||
|
|
||||||
|
- name: Finalize (fail job if apply failed)
|
||||||
|
if: ${{ always() }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/proposer.env || true
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
|
RC="${APPLY_RC:-0}"
|
||||||
|
if [[ "$RC" != "0" ]]; then
|
||||||
|
echo "❌ apply failed (rc=$RC)"
|
||||||
|
exit "$RC"
|
||||||
|
fi
|
||||||
|
echo "✅ apply ok"
|
||||||
@@ -3,7 +3,7 @@ on: [push, workflow_dispatch]
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
smoke:
|
smoke:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
steps:
|
steps:
|
||||||
- run: node -v && npm -v
|
- run: node -v && npm -v
|
||||||
- run: echo "runner OK"
|
- run: echo "runner OK"
|
||||||
|
|||||||
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
@@ -1,9 +1,18 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
// scripts/apply-annotation-ticket.mjs
|
// scripts/apply-annotation-ticket.mjs
|
||||||
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers src/annotations + public/media
|
|
||||||
// Robuste, idempotent, non destructif
|
|
||||||
//
|
//
|
||||||
// DRY RUN par défaut si --dry-run
|
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
|
||||||
|
//
|
||||||
|
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
|
||||||
|
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
|
||||||
|
//
|
||||||
|
// Compat rétro : lit (si présent) l'ancien monolithe:
|
||||||
|
// src/annotations/<oeuvre>/<chapitre>.yml
|
||||||
|
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
|
||||||
|
// pour permettre une migration progressive sans perte.
|
||||||
|
//
|
||||||
|
// Robuste, idempotent, non destructif.
|
||||||
|
// DRY RUN si --dry-run
|
||||||
// Options: --dry-run --no-download --verify --strict --commit --close
|
// Options: --dry-run --no-download --verify --strict --commit --close
|
||||||
//
|
//
|
||||||
// Env requis:
|
// Env requis:
|
||||||
@@ -36,7 +45,7 @@ import YAML from "yaml";
|
|||||||
|
|
||||||
function usage(exitCode = 0) {
|
function usage(exitCode = 0) {
|
||||||
console.log(`
|
console.log(`
|
||||||
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/
|
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/ (shard par paragraphe)
|
||||||
|
|
||||||
Usage:
|
Usage:
|
||||||
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
|
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
|
||||||
@@ -44,9 +53,9 @@ Usage:
|
|||||||
Flags:
|
Flags:
|
||||||
--dry-run : n'écrit rien (affiche un aperçu)
|
--dry-run : n'écrit rien (affiche un aperçu)
|
||||||
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
||||||
--verify : tente de vérifier que (page, ancre) existent (baseline/dist si dispo)
|
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
|
||||||
--strict : refuse si URL ref invalide (http/https) OU caption media vide
|
--strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
|
||||||
--commit : git add + git commit (le script commit dans la branche courante)
|
--commit : git add + git commit (commit dans la branche courante)
|
||||||
--close : ferme le ticket (nécessite --commit)
|
--close : ferme le ticket (nécessite --commit)
|
||||||
|
|
||||||
Env requis:
|
Env requis:
|
||||||
@@ -57,7 +66,7 @@ Env optionnel:
|
|||||||
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||||||
ANNO_DIR (défaut: src/annotations)
|
ANNO_DIR (défaut: src/annotations)
|
||||||
PUBLIC_DIR (défaut: public)
|
PUBLIC_DIR (défaut: public)
|
||||||
MEDIA_ROOT (défaut URL: /media) -> écrit dans public/media/...
|
MEDIA_ROOT (défaut URL: /media)
|
||||||
|
|
||||||
Exit codes:
|
Exit codes:
|
||||||
0 ok
|
0 ok
|
||||||
@@ -102,6 +111,8 @@ const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations");
|
|||||||
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
|
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
|
||||||
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
|
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
|
||||||
|
|
||||||
|
/* --------------------------------- helpers -------------------------------- */
|
||||||
|
|
||||||
function getEnv(name, fallback = "") {
|
function getEnv(name, fallback = "") {
|
||||||
return (process.env[name] ?? fallback).trim();
|
return (process.env[name] ?? fallback).trim();
|
||||||
}
|
}
|
||||||
@@ -123,7 +134,12 @@ function runQuiet(cmd, args, opts = {}) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function exists(p) {
|
async function exists(p) {
|
||||||
try { await fs.access(p); return true; } catch { return false; }
|
try {
|
||||||
|
await fs.access(p);
|
||||||
|
return true;
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function inferOwnerRepoFromGit() {
|
function inferOwnerRepoFromGit() {
|
||||||
@@ -140,6 +156,371 @@ function gitHasStagedChanges() {
|
|||||||
return r.status === 1;
|
return r.status === 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function escapeRegExp(s) {
|
||||||
|
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
|
}
|
||||||
|
|
||||||
|
function pickLine(body, key) {
|
||||||
|
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||||
|
const m = String(body || "").match(re);
|
||||||
|
return m ? m[1].trim() : "";
|
||||||
|
}
|
||||||
|
|
||||||
|
function pickSection(body, markers) {
|
||||||
|
const text = String(body || "").replace(/\r\n/g, "\n");
|
||||||
|
const idx = markers
|
||||||
|
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
||||||
|
.filter((x) => x.i >= 0)
|
||||||
|
.sort((a, b) => a.i - b.i)[0];
|
||||||
|
if (!idx) return "";
|
||||||
|
|
||||||
|
const start = idx.i + idx.m.length;
|
||||||
|
const tail = text.slice(start);
|
||||||
|
|
||||||
|
const stops = ["\n## ", "\n---", "\nJustification", "\nProposition", "\nSources"];
|
||||||
|
let end = tail.length;
|
||||||
|
for (const s of stops) {
|
||||||
|
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
||||||
|
if (j >= 0 && j < end) end = j;
|
||||||
|
}
|
||||||
|
return tail.slice(0, end).trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeChemin(chemin) {
|
||||||
|
let c = String(chemin || "").trim();
|
||||||
|
if (!c) return "";
|
||||||
|
if (!c.startsWith("/")) c = "/" + c;
|
||||||
|
if (!c.endsWith("/")) c = c + "/";
|
||||||
|
c = c.replace(/\/{2,}/g, "/");
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizePageKeyFromChemin(chemin) {
|
||||||
|
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
|
||||||
|
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeAnchorId(s) {
|
||||||
|
let a = String(s || "").trim();
|
||||||
|
if (a.startsWith("#")) a = a.slice(1);
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
|
||||||
|
function assert(cond, msg, code = 1) {
|
||||||
|
if (!cond) {
|
||||||
|
const e = new Error(msg);
|
||||||
|
e.__exitCode = code;
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isPlainObject(x) {
|
||||||
|
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
function paraIndexFromId(id) {
|
||||||
|
const m = String(id).match(/^p-(\d+)-/i);
|
||||||
|
return m ? Number(m[1]) : Number.NaN;
|
||||||
|
}
|
||||||
|
|
||||||
|
function isHttpUrl(u) {
|
||||||
|
try {
|
||||||
|
const x = new URL(String(u));
|
||||||
|
return x.protocol === "http:" || x.protocol === "https:";
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function stableSortByTs(arr) {
|
||||||
|
if (!Array.isArray(arr)) return;
|
||||||
|
arr.sort((a, b) => {
|
||||||
|
const ta = Date.parse(a?.ts || "") || 0;
|
||||||
|
const tb = Date.parse(b?.ts || "") || 0;
|
||||||
|
if (ta !== tb) return ta - tb;
|
||||||
|
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function normPage(s) {
|
||||||
|
let x = String(s || "").trim();
|
||||||
|
if (!x) return "";
|
||||||
|
// retire origin si on a une URL complète
|
||||||
|
x = x.replace(/^https?:\/\/[^/]+/i, "");
|
||||||
|
// enlève query/hash
|
||||||
|
x = x.split("#")[0].split("?")[0];
|
||||||
|
// enlève index.html
|
||||||
|
x = x.replace(/index\.html$/i, "");
|
||||||
|
// enlève slashs de bord
|
||||||
|
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
|
||||||
|
return x;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------ para-index (verify + order) ------------------------------ */
|
||||||
|
|
||||||
|
async function loadParaOrderFromDist(pageKey) {
|
||||||
|
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||||
|
if (!(await exists(distIdx))) return null;
|
||||||
|
|
||||||
|
let j;
|
||||||
|
try {
|
||||||
|
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
|
||||||
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const want = normPage(pageKey);
|
||||||
|
|
||||||
|
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
|
||||||
|
const items = Array.isArray(j?.items)
|
||||||
|
? j.items
|
||||||
|
: Array.isArray(j?.index?.items)
|
||||||
|
? j.index.items
|
||||||
|
: null;
|
||||||
|
|
||||||
|
if (items) {
|
||||||
|
const ids = [];
|
||||||
|
for (const it of items) {
|
||||||
|
// page peut être dans plein de clés différentes
|
||||||
|
const pageCand = normPage(
|
||||||
|
it?.page ??
|
||||||
|
it?.pageKey ??
|
||||||
|
it?.path ??
|
||||||
|
it?.route ??
|
||||||
|
it?.href ??
|
||||||
|
it?.url ??
|
||||||
|
""
|
||||||
|
);
|
||||||
|
|
||||||
|
// id peut être dans plein de clés différentes
|
||||||
|
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
|
||||||
|
if (id.startsWith("#")) id = id.slice(1);
|
||||||
|
|
||||||
|
if (pageCand === want && id) ids.push(id);
|
||||||
|
}
|
||||||
|
if (ids.length) return ids;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
|
||||||
|
if (j?.byId && typeof j.byId === "object") {
|
||||||
|
const ids = Object.keys(j.byId)
|
||||||
|
.filter((id) => {
|
||||||
|
const meta = j.byId[id] || {};
|
||||||
|
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
|
||||||
|
return pageCand === want;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (ids.length) {
|
||||||
|
ids.sort((a, b) => {
|
||||||
|
const ia = paraIndexFromId(a);
|
||||||
|
const ib = paraIndexFromId(b);
|
||||||
|
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
});
|
||||||
|
return ids;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
|
||||||
|
if (j?.pages && typeof j.pages === "object") {
|
||||||
|
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
|
||||||
|
const keys = Object.keys(j.pages);
|
||||||
|
const hit = keys.find((k) => normPage(k) === want);
|
||||||
|
if (hit) {
|
||||||
|
const pg = j.pages[hit];
|
||||||
|
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
|
||||||
|
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function tryVerifyAnchor(pageKey, anchorId) {
|
||||||
|
// 1) dist/para-index.json : order complet si possible
|
||||||
|
const order = await loadParaOrderFromDist(pageKey);
|
||||||
|
if (order) return order.includes(anchorId);
|
||||||
|
|
||||||
|
// 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
|
||||||
|
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||||
|
if (await exists(distIdx)) {
|
||||||
|
try {
|
||||||
|
const raw = await fs.readFile(distIdx, "utf8");
|
||||||
|
if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2) tests/anchors-baseline.json (fallback)
|
||||||
|
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||||||
|
if (await exists(base)) {
|
||||||
|
try {
|
||||||
|
const j = JSON.parse(await fs.readFile(base, "utf8"));
|
||||||
|
const candidates = [];
|
||||||
|
if (j?.pages && typeof j.pages === "object") {
|
||||||
|
for (const [k, v] of Object.entries(j.pages)) {
|
||||||
|
if (!Array.isArray(v)) continue;
|
||||||
|
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Array.isArray(j?.entries)) {
|
||||||
|
for (const it of j.entries) {
|
||||||
|
const p = String(it?.page || "");
|
||||||
|
const ids = it?.ids;
|
||||||
|
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null; // cannot verify
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
|
||||||
|
|
||||||
|
function keyMedia(x) {
|
||||||
|
return String(x?.src || "");
|
||||||
|
}
|
||||||
|
function keyRef(x) {
|
||||||
|
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||||
|
}
|
||||||
|
function keyComment(x) {
|
||||||
|
return String(x?.text || "").trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
function uniqUnion(dstArr, srcArr, keyFn) {
|
||||||
|
const out = Array.isArray(dstArr) ? [...dstArr] : [];
|
||||||
|
const seen = new Set(out.map((x) => keyFn(x)));
|
||||||
|
for (const it of (Array.isArray(srcArr) ? srcArr : [])) {
|
||||||
|
const k = keyFn(it);
|
||||||
|
if (!k) continue;
|
||||||
|
if (!seen.has(k)) {
|
||||||
|
seen.add(k);
|
||||||
|
out.push(it);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
function deepMergeEntry(dst, src) {
|
||||||
|
if (!isPlainObject(dst) || !isPlainObject(src)) return;
|
||||||
|
|
||||||
|
for (const [k, v] of Object.entries(src)) {
|
||||||
|
if (k === "media" && Array.isArray(v)) {
|
||||||
|
dst.media = uniqUnion(dst.media, v, keyMedia);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (k === "refs" && Array.isArray(v)) {
|
||||||
|
dst.refs = uniqUnion(dst.refs, v, keyRef);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
if (k === "comments_editorial" && Array.isArray(v)) {
|
||||||
|
dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isPlainObject(v)) {
|
||||||
|
if (!isPlainObject(dst[k])) dst[k] = {};
|
||||||
|
deepMergeEntry(dst[k], v);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (Array.isArray(v)) {
|
||||||
|
const cur = Array.isArray(dst[k]) ? dst[k] : [];
|
||||||
|
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||||
|
const out = [...cur];
|
||||||
|
for (const it of v) {
|
||||||
|
const s = JSON.stringify(it);
|
||||||
|
if (!seen.has(s)) {
|
||||||
|
seen.add(s);
|
||||||
|
out.push(it);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
dst[k] = out;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// scalar: set only if missing/empty
|
||||||
|
if (!(k in dst) || dst[k] == null || dst[k] === "") {
|
||||||
|
dst[k] = v;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------- annotations I/O ----------------------------- */
|
||||||
|
|
||||||
|
async function loadAnnoDocYaml(fileAbs, pageKey) {
|
||||||
|
if (!(await exists(fileAbs))) {
|
||||||
|
return { schema: 1, page: pageKey, paras: {} };
|
||||||
|
}
|
||||||
|
|
||||||
|
const raw = await fs.readFile(fileAbs, "utf8");
|
||||||
|
let doc;
|
||||||
|
try {
|
||||||
|
doc = YAML.parse(raw);
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`, 2);
|
||||||
|
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`, 2);
|
||||||
|
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`, 2);
|
||||||
|
|
||||||
|
if (doc.page != null) {
|
||||||
|
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
|
||||||
|
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`, 2);
|
||||||
|
} else {
|
||||||
|
doc.page = pageKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortParasObject(paras, order) {
|
||||||
|
const keys = Object.keys(paras || {});
|
||||||
|
const idx = new Map();
|
||||||
|
if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
|
||||||
|
|
||||||
|
keys.sort((a, b) => {
|
||||||
|
const ha = idx.has(a);
|
||||||
|
const hb = idx.has(b);
|
||||||
|
if (ha && hb) return idx.get(a) - idx.get(b);
|
||||||
|
if (ha && !hb) return -1;
|
||||||
|
if (!ha && hb) return 1;
|
||||||
|
|
||||||
|
const ia = paraIndexFromId(a);
|
||||||
|
const ib = paraIndexFromId(b);
|
||||||
|
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
});
|
||||||
|
|
||||||
|
const out = {};
|
||||||
|
for (const k of keys) out[k] = paras[k];
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
|
||||||
|
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||||||
|
|
||||||
|
doc.paras = sortParasObject(doc.paras, order);
|
||||||
|
|
||||||
|
for (const e of Object.values(doc.paras || {})) {
|
||||||
|
if (!isPlainObject(e)) continue;
|
||||||
|
stableSortByTs(e.media);
|
||||||
|
stableSortByTs(e.refs);
|
||||||
|
stableSortByTs(e.comments_editorial);
|
||||||
|
}
|
||||||
|
|
||||||
|
const out = YAML.stringify(doc);
|
||||||
|
await fs.writeFile(fileAbs, out, "utf8");
|
||||||
|
}
|
||||||
|
|
||||||
/* ------------------------------ gitea helpers ------------------------------ */
|
/* ------------------------------ gitea helpers ------------------------------ */
|
||||||
|
|
||||||
function apiBaseNorm(forgeApiBase) {
|
function apiBaseNorm(forgeApiBase) {
|
||||||
@@ -167,7 +548,7 @@ async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
|
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
|
||||||
// ✅ Gitea: /issues/{index}/assets
|
// Gitea: /issues/{index}/assets
|
||||||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
|
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
|
||||||
try {
|
try {
|
||||||
const json = await giteaGET(url, token);
|
const json = await giteaGET(url, token);
|
||||||
@@ -215,200 +596,43 @@ async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ------------------------------ parsing helpers ---------------------------- */
|
/* ------------------------------ media helpers ------------------------------ */
|
||||||
|
|
||||||
function escapeRegExp(s) {
|
function inferMediaTypeFromFilename(name) {
|
||||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
const n = String(name || "").toLowerCase();
|
||||||
|
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
|
||||||
|
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
|
||||||
|
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
|
||||||
|
return "link";
|
||||||
}
|
}
|
||||||
|
|
||||||
function pickLine(body, key) {
|
function sanitizeFilename(name) {
|
||||||
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
return String(name || "file")
|
||||||
const m = String(body || "").match(re);
|
.replace(/[\/\\]/g, "_")
|
||||||
return m ? m[1].trim() : "";
|
.replace(/[^\w.\-]+/g, "_")
|
||||||
|
.replace(/_+/g, "_")
|
||||||
|
.slice(0, 180);
|
||||||
}
|
}
|
||||||
|
|
||||||
function pickSection(body, markers) {
|
async function downloadToFile(url, token, destAbs) {
|
||||||
const text = String(body || "").replace(/\r\n/g, "\n");
|
const res = await fetch(url, {
|
||||||
const idx = markers
|
headers: {
|
||||||
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
Authorization: `token ${token}`,
|
||||||
.filter((x) => x.i >= 0)
|
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||||
.sort((a, b) => a.i - b.i)[0];
|
},
|
||||||
if (!idx) return "";
|
redirect: "follow",
|
||||||
|
|
||||||
const start = idx.i + idx.m.length;
|
|
||||||
const tail = text.slice(start);
|
|
||||||
|
|
||||||
const stops = [
|
|
||||||
"\n## ",
|
|
||||||
"\n---",
|
|
||||||
"\nJustification",
|
|
||||||
"\nProposition",
|
|
||||||
"\nSources",
|
|
||||||
];
|
|
||||||
let end = tail.length;
|
|
||||||
for (const s of stops) {
|
|
||||||
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
|
||||||
if (j >= 0 && j < end) end = j;
|
|
||||||
}
|
|
||||||
return tail.slice(0, end).trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeChemin(chemin) {
|
|
||||||
let c = String(chemin || "").trim();
|
|
||||||
if (!c) return "";
|
|
||||||
if (!c.startsWith("/")) c = "/" + c;
|
|
||||||
if (!c.endsWith("/")) c = c + "/";
|
|
||||||
c = c.replace(/\/{2,}/g, "/");
|
|
||||||
return c;
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizePageKeyFromChemin(chemin) {
|
|
||||||
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeAnchorId(s) {
|
|
||||||
let a = String(s || "").trim();
|
|
||||||
if (a.startsWith("#")) a = a.slice(1);
|
|
||||||
return a;
|
|
||||||
}
|
|
||||||
|
|
||||||
function assert(cond, msg, code = 1) {
|
|
||||||
if (!cond) {
|
|
||||||
const e = new Error(msg);
|
|
||||||
e.__exitCode = code;
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function isPlainObject(x) {
|
|
||||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ----------------------------- verify helpers ------------------------------ */
|
|
||||||
|
|
||||||
function paraIndexFromId(id) {
|
|
||||||
const m = String(id).match(/^p-(\d+)-/i);
|
|
||||||
return m ? Number(m[1]) : Number.NaN;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function tryVerifyAnchor(pageKey, anchorId) {
|
|
||||||
// 1) dist/para-index.json (si build déjà faite)
|
|
||||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
|
||||||
if (await exists(distIdx)) {
|
|
||||||
const raw = await fs.readFile(distIdx, "utf8");
|
|
||||||
const idx = JSON.parse(raw);
|
|
||||||
const byId = idx?.byId;
|
|
||||||
if (byId && typeof byId === "object" && byId[anchorId] != null) return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
// 2) tests/anchors-baseline.json (si dispo)
|
|
||||||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
|
||||||
if (await exists(base)) {
|
|
||||||
const raw = await fs.readFile(base, "utf8");
|
|
||||||
const j = JSON.parse(raw);
|
|
||||||
|
|
||||||
// tolérant: cherche un array d'ids associé à la page
|
|
||||||
const candidates = [];
|
|
||||||
|
|
||||||
// cas 1: j.pages[...]
|
|
||||||
if (j?.pages && typeof j.pages === "object") {
|
|
||||||
for (const [k, v] of Object.entries(j.pages)) {
|
|
||||||
if (!Array.isArray(v)) continue;
|
|
||||||
// on matche large: pageKey inclus dans le path
|
|
||||||
if (String(k).includes(pageKey)) candidates.push(...v);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// cas 2: j.entries = [{page, ids}]
|
|
||||||
if (Array.isArray(j?.entries)) {
|
|
||||||
for (const it of j.entries) {
|
|
||||||
const p = String(it?.page || "");
|
|
||||||
const ids = it?.ids;
|
|
||||||
if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (candidates.length) {
|
|
||||||
return candidates.some((x) => String(x) === anchorId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// impossible à vérifier
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ----------------------------- annotations I/O ----------------------------- */
|
|
||||||
|
|
||||||
async function loadAnnoDoc(fileAbs, pageKey) {
|
|
||||||
if (!(await exists(fileAbs))) {
|
|
||||||
return { schema: 1, page: pageKey, paras: {} };
|
|
||||||
}
|
|
||||||
|
|
||||||
const raw = await fs.readFile(fileAbs, "utf8");
|
|
||||||
let doc;
|
|
||||||
try {
|
|
||||||
doc = YAML.parse(raw);
|
|
||||||
} catch (e) {
|
|
||||||
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`);
|
|
||||||
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`);
|
|
||||||
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`);
|
|
||||||
|
|
||||||
if (doc.page != null) {
|
|
||||||
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
|
|
||||||
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
|
||||||
} else {
|
|
||||||
doc.page = pageKey;
|
|
||||||
}
|
|
||||||
|
|
||||||
return doc;
|
|
||||||
}
|
|
||||||
|
|
||||||
function sortParasObject(paras) {
|
|
||||||
const keys = Object.keys(paras || {});
|
|
||||||
keys.sort((a, b) => {
|
|
||||||
const ia = paraIndexFromId(a);
|
|
||||||
const ib = paraIndexFromId(b);
|
|
||||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
|
||||||
return String(a).localeCompare(String(b));
|
|
||||||
});
|
});
|
||||||
const out = {};
|
if (!res.ok) {
|
||||||
for (const k of keys) out[k] = paras[k];
|
const t = await res.text().catch(() => "");
|
||||||
return out;
|
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
|
||||||
|
}
|
||||||
|
const buf = Buffer.from(await res.arrayBuffer());
|
||||||
|
await fs.mkdir(path.dirname(destAbs), { recursive: true });
|
||||||
|
await fs.writeFile(destAbs, buf);
|
||||||
|
return buf.length;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function saveAnnoDocYaml(fileAbs, doc) {
|
/* ------------------------------ type parsers ------------------------------ */
|
||||||
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
|
||||||
doc.paras = sortParasObject(doc.paras);
|
|
||||||
const out = YAML.stringify(doc);
|
|
||||||
await fs.writeFile(fileAbs, out, "utf8");
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ------------------------------ apply per type ----------------------------- */
|
|
||||||
|
|
||||||
function ensureEntry(doc, paraId) {
|
|
||||||
if (!doc.paras[paraId] || !isPlainObject(doc.paras[paraId])) doc.paras[paraId] = {};
|
|
||||||
return doc.paras[paraId];
|
|
||||||
}
|
|
||||||
|
|
||||||
function uniqPush(arr, item, keyFn) {
|
|
||||||
const k = keyFn(item);
|
|
||||||
const exists = arr.some((x) => keyFn(x) === k);
|
|
||||||
if (!exists) arr.push(item);
|
|
||||||
return !exists;
|
|
||||||
}
|
|
||||||
|
|
||||||
function stableSortByTs(arr) {
|
|
||||||
if (!Array.isArray(arr)) return;
|
|
||||||
arr.sort((a, b) => {
|
|
||||||
const ta = Date.parse(a?.ts || "") || 0;
|
|
||||||
const tb = Date.parse(b?.ts || "") || 0;
|
|
||||||
if (ta !== tb) return ta - tb;
|
|
||||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseReferenceBlock(body) {
|
function parseReferenceBlock(body) {
|
||||||
const block =
|
const block =
|
||||||
@@ -431,50 +655,6 @@ function parseReferenceBlock(body) {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function inferMediaTypeFromFilename(name) {
|
|
||||||
const n = String(name || "").toLowerCase();
|
|
||||||
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
|
|
||||||
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
|
|
||||||
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
|
|
||||||
return "link";
|
|
||||||
}
|
|
||||||
|
|
||||||
function sanitizeFilename(name) {
|
|
||||||
return String(name || "file")
|
|
||||||
.replace(/[\/\\]/g, "_")
|
|
||||||
.replace(/[^\w.\-]+/g, "_")
|
|
||||||
.replace(/_+/g, "_")
|
|
||||||
.slice(0, 180);
|
|
||||||
}
|
|
||||||
|
|
||||||
function isHttpUrl(u) {
|
|
||||||
try {
|
|
||||||
const x = new URL(String(u));
|
|
||||||
return x.protocol === "http:" || x.protocol === "https:";
|
|
||||||
} catch {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function downloadToFile(url, token, destAbs) {
|
|
||||||
const res = await fetch(url, {
|
|
||||||
headers: {
|
|
||||||
// la plupart des /attachments sont publics, mais on garde le token “au cas où”
|
|
||||||
Authorization: `token ${token}`,
|
|
||||||
"User-Agent": "archicratie-apply-annotation/1.0",
|
|
||||||
},
|
|
||||||
redirect: "follow",
|
|
||||||
});
|
|
||||||
if (!res.ok) {
|
|
||||||
const t = await res.text().catch(() => "");
|
|
||||||
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
|
|
||||||
}
|
|
||||||
const buf = Buffer.from(await res.arrayBuffer());
|
|
||||||
await fs.mkdir(path.dirname(destAbs), { recursive: true });
|
|
||||||
await fs.writeFile(destAbs, buf);
|
|
||||||
return buf.length;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ----------------------------------- main ---------------------------------- */
|
/* ----------------------------------- main ---------------------------------- */
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
@@ -511,29 +691,53 @@ async function main() {
|
|||||||
const pageKey = normalizePageKeyFromChemin(chemin);
|
const pageKey = normalizePageKeyFromChemin(chemin);
|
||||||
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
||||||
|
|
||||||
|
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
|
||||||
|
|
||||||
if (DO_VERIFY) {
|
if (DO_VERIFY) {
|
||||||
const ok = await tryVerifyAnchor(pageKey, ancre);
|
const ok = await tryVerifyAnchor(pageKey, ancre);
|
||||||
if (ok === false) {
|
if (ok === false) {
|
||||||
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
||||||
}
|
}
|
||||||
if (ok === null) {
|
if (ok === null) {
|
||||||
// pas de source de vérité dispo
|
if (STRICT) {
|
||||||
if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de baseline/dist)`), { __exitCode: 2 });
|
throw Object.assign(
|
||||||
console.warn("⚠️ verify: impossible de vérifier (pas de baseline/dist) — on continue.");
|
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
|
||||||
|
{ __exitCode: 2 }
|
||||||
|
);
|
||||||
|
}
|
||||||
|
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const annoFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
// ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
|
||||||
const annoFileRel = path.relative(CWD, annoFileAbs).replace(/\\/g, "/");
|
const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
|
||||||
|
const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
|
||||||
|
|
||||||
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: annoFileRel });
|
// legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
|
||||||
|
const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||||||
|
|
||||||
const doc = await loadAnnoDoc(annoFileAbs, pageKey);
|
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
|
||||||
const entry = ensureEntry(doc, ancre);
|
|
||||||
|
// load shard doc
|
||||||
|
const doc = await loadAnnoDocYaml(shardAbs, pageKey);
|
||||||
|
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
||||||
|
const entry = doc.paras[ancre];
|
||||||
|
|
||||||
|
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
|
||||||
|
if (await exists(legacyAbs)) {
|
||||||
|
try {
|
||||||
|
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
|
||||||
|
const legacyEntry = legacy?.paras?.[ancre];
|
||||||
|
if (isPlainObject(legacyEntry)) {
|
||||||
|
deepMergeEntry(entry, legacyEntry);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// ignore legacy parse issues; shard still applies new data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const touchedFiles = [];
|
const touchedFiles = [];
|
||||||
const notes = [];
|
const notes = [];
|
||||||
|
|
||||||
let changed = false;
|
let changed = false;
|
||||||
const nowIso = new Date().toISOString();
|
const nowIso = new Date().toISOString();
|
||||||
|
|
||||||
@@ -545,16 +749,19 @@ async function main() {
|
|||||||
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
|
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
|
||||||
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
|
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
|
||||||
|
|
||||||
const added = uniqPush(entry.comments_editorial, item, (x) => `${(x?.text || "").trim()}`);
|
const before = entry.comments_editorial.length;
|
||||||
if (added) { changed = true; notes.push(`+ comment added (len=${text.length})`); }
|
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
|
||||||
else notes.push(`~ comment already present (dedup)`);
|
if (entry.comments_editorial.length !== before) {
|
||||||
|
changed = true;
|
||||||
|
notes.push(`+ comment added (len=${text.length})`);
|
||||||
|
} else {
|
||||||
|
notes.push(`~ comment already present (dedup)`);
|
||||||
|
}
|
||||||
stableSortByTs(entry.comments_editorial);
|
stableSortByTs(entry.comments_editorial);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type === "type/reference") {
|
else if (type === "type/reference") {
|
||||||
const ref = parseReferenceBlock(body);
|
const ref = parseReferenceBlock(body);
|
||||||
|
|
||||||
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
|
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
|
||||||
|
|
||||||
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
|
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
|
||||||
@@ -571,34 +778,35 @@ async function main() {
|
|||||||
};
|
};
|
||||||
if (ref.citation) item.citation = ref.citation;
|
if (ref.citation) item.citation = ref.citation;
|
||||||
|
|
||||||
const added = uniqPush(entry.refs, item, (x) => `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`);
|
const before = entry.refs.length;
|
||||||
if (added) { changed = true; notes.push(`+ reference added (${item.url ? "url" : "label"})`); }
|
entry.refs = uniqUnion(entry.refs, [item], keyRef);
|
||||||
else notes.push(`~ reference already present (dedup)`);
|
if (entry.refs.length !== before) {
|
||||||
|
changed = true;
|
||||||
|
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
|
||||||
|
} else {
|
||||||
|
notes.push(`~ reference already present (dedup)`);
|
||||||
|
}
|
||||||
stableSortByTs(entry.refs);
|
stableSortByTs(entry.refs);
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type === "type/media") {
|
else if (type === "type/media") {
|
||||||
if (!Array.isArray(entry.media)) entry.media = [];
|
if (!Array.isArray(entry.media)) entry.media = [];
|
||||||
|
|
||||||
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
const caption = (title || "").trim();
|
||||||
|
if (STRICT && !caption) {
|
||||||
if (!atts.length) {
|
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
|
||||||
notes.push("! no assets found (nothing to download).");
|
|
||||||
}
|
}
|
||||||
|
const captionFinal = caption || ".";
|
||||||
|
|
||||||
|
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||||||
|
if (!atts.length) notes.push("! no assets found (nothing to download).");
|
||||||
|
|
||||||
for (const a of atts) {
|
for (const a of atts) {
|
||||||
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
|
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
|
||||||
const dl = a?.browser_download_url || a?.download_url || "";
|
const dl = a?.browser_download_url || a?.download_url || "";
|
||||||
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
||||||
|
|
||||||
// caption = title du ticket (fallback ".")
|
const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
|
||||||
const caption = (title || "").trim() || ".";
|
|
||||||
if (STRICT && !caption.trim()) {
|
|
||||||
throw Object.assign(new Error("Ticket media (strict): caption vide."), { __exitCode: 2 });
|
|
||||||
}
|
|
||||||
|
|
||||||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre);
|
|
||||||
const destAbs = path.join(mediaDirAbs, name);
|
const destAbs = path.join(mediaDirAbs, name);
|
||||||
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
||||||
|
|
||||||
@@ -608,21 +816,24 @@ async function main() {
|
|||||||
const bytes = await downloadToFile(dl, token, destAbs);
|
const bytes = await downloadToFile(dl, token, destAbs);
|
||||||
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
|
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
|
||||||
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
|
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
|
||||||
|
changed = true;
|
||||||
} else {
|
} else {
|
||||||
notes.push(`(dry) would download ${name} -> ${urlPath}`);
|
notes.push(`(dry) would download ${name} -> ${urlPath}`);
|
||||||
|
changed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
const item = {
|
const item = {
|
||||||
type: inferMediaTypeFromFilename(name),
|
type: inferMediaTypeFromFilename(name),
|
||||||
src: urlPath,
|
src: urlPath,
|
||||||
caption,
|
caption: captionFinal,
|
||||||
credit: "",
|
credit: "",
|
||||||
ts: nowIso,
|
ts: nowIso,
|
||||||
fromIssue: issueNum,
|
fromIssue: issueNum,
|
||||||
};
|
};
|
||||||
|
|
||||||
const added = uniqPush(entry.media, item, (x) => String(x?.src || ""));
|
const before = entry.media.length;
|
||||||
if (added) changed = true;
|
entry.media = uniqUnion(entry.media, [item], keyMedia);
|
||||||
|
if (entry.media.length !== before) changed = true;
|
||||||
}
|
}
|
||||||
|
|
||||||
stableSortByTs(entry.media);
|
stableSortByTs(entry.media);
|
||||||
@@ -640,7 +851,7 @@ async function main() {
|
|||||||
|
|
||||||
if (DRY_RUN) {
|
if (DRY_RUN) {
|
||||||
console.log("\n--- DRY RUN (no write) ---");
|
console.log("\n--- DRY RUN (no write) ---");
|
||||||
console.log(`Would update: ${annoFileRel}`);
|
console.log(`Would update: ${shardRel}`);
|
||||||
for (const n of notes) console.log(" ", n);
|
for (const n of notes) console.log(" ", n);
|
||||||
console.log("\nExcerpt (resulting entry):");
|
console.log("\nExcerpt (resulting entry):");
|
||||||
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
||||||
@@ -648,10 +859,10 @@ async function main() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await saveAnnoDocYaml(annoFileAbs, doc);
|
await saveAnnoDocYaml(shardAbs, doc, paraOrder);
|
||||||
touchedFiles.unshift(annoFileRel);
|
touchedFiles.unshift(shardRel);
|
||||||
|
|
||||||
console.log(`✅ Updated: ${annoFileRel}`);
|
console.log(`✅ Updated: ${shardRel}`);
|
||||||
for (const n of notes) console.log(" ", n);
|
for (const n of notes) console.log(" ", n);
|
||||||
|
|
||||||
if (DO_COMMIT) {
|
if (DO_COMMIT) {
|
||||||
|
|||||||
@@ -1,28 +1,106 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
// scripts/build-annotations-index.mjs
|
// scripts/build-annotations-index.mjs
|
||||||
|
// Construit dist/annotations-index.json à partir de src/annotations/**/*.yml
|
||||||
|
// Supporte:
|
||||||
|
// - monolith : src/annotations/<pageKey>.yml
|
||||||
|
// - shard : src/annotations/<pageKey>/<paraId>.yml (paraId = p-<n>-...)
|
||||||
|
// Invariants:
|
||||||
|
// - doc.schema === 1
|
||||||
|
// - doc.page (si présent) == pageKey déduit du chemin
|
||||||
|
// - shard: doc.paras doit contenir EXACTEMENT la clé paraId (sinon fail)
|
||||||
|
//
|
||||||
|
// Deep-merge non destructif (media/refs/comments dédupliqués), tri stable.
|
||||||
|
|
||||||
import fs from "node:fs/promises";
|
import fs from "node:fs/promises";
|
||||||
import path from "node:path";
|
import path from "node:path";
|
||||||
import YAML from "yaml";
|
import YAML from "yaml";
|
||||||
|
|
||||||
function parseArgs(argv) {
|
const ROOT = process.cwd();
|
||||||
const out = {
|
const ANNO_ROOT = path.join(ROOT, "src", "annotations");
|
||||||
inDir: "src/annotations",
|
const DIST_DIR = path.join(ROOT, "dist");
|
||||||
outFile: "dist/annotations-index.json",
|
const OUT = path.join(DIST_DIR, "annotations-index.json");
|
||||||
};
|
|
||||||
|
|
||||||
for (let i = 0; i < argv.length; i++) {
|
function assert(cond, msg) {
|
||||||
const a = argv[i];
|
if (!cond) throw new Error(msg);
|
||||||
|
}
|
||||||
|
|
||||||
if (a === "--in" && argv[i + 1]) out.inDir = argv[++i];
|
function isObj(x) {
|
||||||
else if (a.startsWith("--in=")) out.inDir = a.slice("--in=".length);
|
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
|
}
|
||||||
|
function isArr(x) {
|
||||||
|
return Array.isArray(x);
|
||||||
|
}
|
||||||
|
|
||||||
if (a === "--out" && argv[i + 1]) out.outFile = argv[++i];
|
function normPath(s) {
|
||||||
else if (a.startsWith("--out=")) out.outFile = a.slice("--out=".length);
|
return String(s || "")
|
||||||
|
.replace(/\\/g, "/")
|
||||||
|
.replace(/^\/+|\/+$/g, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
function paraNum(pid) {
|
||||||
|
const m = String(pid).match(/^p-(\d+)-/i);
|
||||||
|
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||||
|
}
|
||||||
|
|
||||||
|
function stableSortByTs(arr) {
|
||||||
|
if (!Array.isArray(arr)) return;
|
||||||
|
arr.sort((a, b) => {
|
||||||
|
const ta = Date.parse(a?.ts || "") || 0;
|
||||||
|
const tb = Date.parse(b?.ts || "") || 0;
|
||||||
|
if (ta !== tb) return ta - tb;
|
||||||
|
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function keyMedia(x) { return String(x?.src || ""); }
|
||||||
|
function keyRef(x) {
|
||||||
|
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||||
|
}
|
||||||
|
function keyComment(x) { return String(x?.text || "").trim(); }
|
||||||
|
|
||||||
|
function uniqUnion(dst, src, keyFn) {
|
||||||
|
const out = isArr(dst) ? [...dst] : [];
|
||||||
|
const seen = new Set(out.map((x) => keyFn(x)));
|
||||||
|
for (const it of (isArr(src) ? src : [])) {
|
||||||
|
const k = keyFn(it);
|
||||||
|
if (!k) continue;
|
||||||
|
if (!seen.has(k)) {
|
||||||
|
seen.add(k);
|
||||||
|
out.push(it);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function exists(p) {
|
function deepMergeEntry(dst, src) {
|
||||||
try { await fs.access(p); return true; } catch { return false; }
|
if (!isObj(dst) || !isObj(src)) return;
|
||||||
|
|
||||||
|
for (const [k, v] of Object.entries(src)) {
|
||||||
|
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||||
|
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||||
|
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||||
|
|
||||||
|
if (isObj(v)) {
|
||||||
|
if (!isObj(dst[k])) dst[k] = {};
|
||||||
|
deepMergeEntry(dst[k], v);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isArr(v)) {
|
||||||
|
const cur = isArr(dst[k]) ? dst[k] : [];
|
||||||
|
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||||
|
const out = [...cur];
|
||||||
|
for (const it of v) {
|
||||||
|
const s = JSON.stringify(it);
|
||||||
|
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||||
|
}
|
||||||
|
dst[k] = out;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// scalar: set only if missing/empty
|
||||||
|
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function walk(dir) {
|
async function walk(dir) {
|
||||||
@@ -30,111 +108,116 @@ async function walk(dir) {
|
|||||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||||
for (const e of ents) {
|
for (const e of ents) {
|
||||||
const p = path.join(dir, e.name);
|
const p = path.join(dir, e.name);
|
||||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
if (e.isDirectory()) out.push(...await walk(p));
|
||||||
else out.push(p);
|
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
function inferPageKeyFromFile(inDirAbs, fileAbs) {
|
function inferExpectedFromRel(relNoExt) {
|
||||||
// src/annotations/<page>.yml -> "<page>"
|
const parts = relNoExt.split("/").filter(Boolean);
|
||||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
const last = parts.at(-1) || "";
|
||||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||||
|
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||||
|
const paraId = isShard ? last : null;
|
||||||
|
return { isShard, pageKey, paraId };
|
||||||
}
|
}
|
||||||
|
|
||||||
function assert(cond, msg) {
|
function validateAndNormalizeDoc(doc, relFile, expectedPageKey, expectedParaId) {
|
||||||
if (!cond) throw new Error(msg);
|
assert(isObj(doc), `${relFile}: doc must be an object`);
|
||||||
}
|
assert(doc.schema === 1, `${relFile}: schema must be 1`);
|
||||||
|
assert(isObj(doc.paras), `${relFile}: missing object key "paras"`);
|
||||||
|
|
||||||
function isPlainObject(x) {
|
const gotPage = doc.page != null ? normPath(doc.page) : "";
|
||||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
const expPage = normPath(expectedPageKey);
|
||||||
}
|
|
||||||
|
|
||||||
function normalizePageKey(s) {
|
if (gotPage) {
|
||||||
// pas de / en tête/fin
|
|
||||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateAndNormalizeDoc(doc, pageKey, fileRel) {
|
|
||||||
assert(isPlainObject(doc), `${fileRel}: document must be an object`);
|
|
||||||
assert(doc.schema === 1, `${fileRel}: schema must be 1`);
|
|
||||||
if (doc.page != null) {
|
|
||||||
assert(
|
assert(
|
||||||
normalizePageKey(doc.page) === pageKey,
|
gotPage === expPage,
|
||||||
`${fileRel}: page mismatch (page="${doc.page}" vs path="${pageKey}")`
|
`${relFile}: page mismatch (page="${doc.page}" vs path="${expectedPageKey}")`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
doc.page = expPage;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (expectedParaId) {
|
||||||
|
const keys = Object.keys(doc.paras || {}).map(String);
|
||||||
|
assert(
|
||||||
|
keys.includes(expectedParaId),
|
||||||
|
`${relFile}: shard mismatch: must contain paras["${expectedParaId}"]`
|
||||||
|
);
|
||||||
|
assert(
|
||||||
|
keys.length === 1 && keys[0] === expectedParaId,
|
||||||
|
`${relFile}: shard invariant violated: shard file must contain ONLY paras["${expectedParaId}"] (got: ${keys.join(", ")})`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
assert(isPlainObject(doc.paras), `${fileRel}: missing object key "paras"`);
|
|
||||||
|
|
||||||
const parasOut = Object.create(null);
|
return doc;
|
||||||
|
|
||||||
for (const [paraId, entry] of Object.entries(doc.paras)) {
|
|
||||||
assert(/^p-\d+-/i.test(paraId), `${fileRel}: invalid para id "${paraId}"`);
|
|
||||||
|
|
||||||
// entry peut être vide, mais doit être un objet si présent
|
|
||||||
assert(entry == null || isPlainObject(entry), `${fileRel}: paras.${paraId} must be an object`);
|
|
||||||
|
|
||||||
const e = entry ? { ...entry } : {};
|
|
||||||
|
|
||||||
// Sanity checks (non destructifs : on n’écrase pas, on vérifie juste les types)
|
|
||||||
if (e.refs != null) assert(Array.isArray(e.refs), `${fileRel}: paras.${paraId}.refs must be an array`);
|
|
||||||
if (e.authors != null) assert(Array.isArray(e.authors), `${fileRel}: paras.${paraId}.authors must be an array`);
|
|
||||||
if (e.quotes != null) assert(Array.isArray(e.quotes), `${fileRel}: paras.${paraId}.quotes must be an array`);
|
|
||||||
if (e.media != null) assert(Array.isArray(e.media), `${fileRel}: paras.${paraId}.media must be an array`);
|
|
||||||
if (e.comments_editorial != null) assert(Array.isArray(e.comments_editorial), `${fileRel}: paras.${paraId}.comments_editorial must be an array`);
|
|
||||||
|
|
||||||
parasOut[paraId] = e;
|
|
||||||
}
|
|
||||||
|
|
||||||
return parasOut;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readDoc(fileAbs) {
|
|
||||||
const raw = await fs.readFile(fileAbs, "utf8");
|
|
||||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
|
||||||
return YAML.parse(raw);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const { inDir, outFile } = parseArgs(process.argv.slice(2));
|
const pages = {};
|
||||||
const CWD = process.cwd();
|
const errors = [];
|
||||||
|
|
||||||
const inDirAbs = path.isAbsolute(inDir) ? inDir : path.join(CWD, inDir);
|
await fs.mkdir(DIST_DIR, { recursive: true });
|
||||||
const outAbs = path.isAbsolute(outFile) ? outFile : path.join(CWD, outFile);
|
|
||||||
|
|
||||||
// antifragile
|
const files = await walk(ANNO_ROOT);
|
||||||
if (!(await exists(inDirAbs))) {
|
|
||||||
console.log(`ℹ️ annotations-index: skip (input missing): ${inDir}`);
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
const files = (await walk(inDirAbs)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
for (const fp of files) {
|
||||||
if (!files.length) {
|
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||||
console.log(`ℹ️ annotations-index: skip (no .yml/.yaml/.json found in): ${inDir}`);
|
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||||
process.exit(0);
|
const { isShard, pageKey, paraId } = inferExpectedFromRel(relNoExt);
|
||||||
}
|
|
||||||
|
|
||||||
const pages = Object.create(null);
|
|
||||||
let paraCount = 0;
|
|
||||||
|
|
||||||
for (const f of files) {
|
|
||||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
|
||||||
const pageKey = normalizePageKey(inferPageKeyFromFile(inDirAbs, f));
|
|
||||||
assert(pageKey, `${fileRel}: cannot infer page key`);
|
|
||||||
|
|
||||||
let doc;
|
|
||||||
try {
|
try {
|
||||||
doc = await readDoc(f);
|
const raw = await fs.readFile(fp, "utf8");
|
||||||
|
const doc = YAML.parse(raw) || {};
|
||||||
|
|
||||||
|
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||||
|
|
||||||
|
validateAndNormalizeDoc(
|
||||||
|
doc,
|
||||||
|
`src/annotations/${rel}`,
|
||||||
|
pageKey,
|
||||||
|
isShard ? paraId : null
|
||||||
|
);
|
||||||
|
|
||||||
|
const pg = (pages[pageKey] ??= { paras: {} });
|
||||||
|
|
||||||
|
if (isShard) {
|
||||||
|
const entry = doc.paras[paraId];
|
||||||
|
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||||
|
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||||
|
|
||||||
|
stableSortByTs(pg.paras[paraId].media);
|
||||||
|
stableSortByTs(pg.paras[paraId].refs);
|
||||||
|
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||||
|
} else {
|
||||||
|
for (const [pid, entry] of Object.entries(doc.paras || {})) {
|
||||||
|
const p = String(pid);
|
||||||
|
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||||
|
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||||
|
|
||||||
|
stableSortByTs(pg.paras[p].media);
|
||||||
|
stableSortByTs(pg.paras[p].refs);
|
||||||
|
stableSortByTs(pg.paras[p].comments_editorial);
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error(`${fileRel}: parse failed: ${String(e?.message ?? e)}`);
|
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const paras = validateAndNormalizeDoc(doc, pageKey, fileRel);
|
for (const [pageKey, pg] of Object.entries(pages)) {
|
||||||
|
const keys = Object.keys(pg.paras || {});
|
||||||
// 1 fichier = 1 page (canon)
|
keys.sort((a, b) => {
|
||||||
assert(!pages[pageKey], `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
const ia = paraNum(a);
|
||||||
pages[pageKey] = { paras };
|
const ib = paraNum(b);
|
||||||
paraCount += Object.keys(paras).length;
|
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
});
|
||||||
|
const next = {};
|
||||||
|
for (const k of keys) next[k] = pg.paras[k];
|
||||||
|
pg.paras = next;
|
||||||
}
|
}
|
||||||
|
|
||||||
const out = {
|
const out = {
|
||||||
@@ -143,17 +226,21 @@ async function main() {
|
|||||||
pages,
|
pages,
|
||||||
stats: {
|
stats: {
|
||||||
pages: Object.keys(pages).length,
|
pages: Object.keys(pages).length,
|
||||||
paras: paraCount,
|
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||||
|
errors: errors.length,
|
||||||
},
|
},
|
||||||
|
errors,
|
||||||
};
|
};
|
||||||
|
|
||||||
await fs.mkdir(path.dirname(outAbs), { recursive: true });
|
if (errors.length) {
|
||||||
await fs.writeFile(outAbs, JSON.stringify(out), "utf8");
|
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||||
|
}
|
||||||
|
|
||||||
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> ${path.relative(CWD, outAbs)}`);
|
await fs.writeFile(OUT, JSON.stringify(out), "utf8");
|
||||||
|
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> dist/annotations-index.json`);
|
||||||
}
|
}
|
||||||
|
|
||||||
main().catch((e) => {
|
main().catch((e) => {
|
||||||
console.error("FAIL: build-annotations-index crashed:", e);
|
console.error(`FAIL: build-annotations-index crashed: ${e?.stack || e?.message || e}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
@@ -48,6 +48,9 @@ async function main() {
|
|||||||
let missing = 0;
|
let missing = 0;
|
||||||
const notes = [];
|
const notes = [];
|
||||||
|
|
||||||
|
// Optim: éviter de vérifier 100 fois le même fichier media
|
||||||
|
const seenMedia = new Set(); // src string
|
||||||
|
|
||||||
for (const f of files) {
|
for (const f of files) {
|
||||||
const rel = path.relative(CWD, f).replace(/\\/g, "/");
|
const rel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||||
const raw = await fs.readFile(f, "utf8");
|
const raw = await fs.readFile(f, "utf8");
|
||||||
@@ -70,6 +73,10 @@ async function main() {
|
|||||||
const src = String(m?.src || "");
|
const src = String(m?.src || "");
|
||||||
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
|
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
|
||||||
|
|
||||||
|
// dédupe
|
||||||
|
if (seenMedia.has(src)) continue;
|
||||||
|
seenMedia.add(src);
|
||||||
|
|
||||||
checked++;
|
checked++;
|
||||||
const p = toPublicPathFromUrl(src);
|
const p = toPublicPathFromUrl(src);
|
||||||
if (!p) continue;
|
if (!p) continue;
|
||||||
|
|||||||
@@ -27,11 +27,6 @@ function escRe(s) {
|
|||||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
}
|
}
|
||||||
|
|
||||||
function inferPageKeyFromFile(fileAbs) {
|
|
||||||
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
|
||||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizePageKey(s) {
|
function normalizePageKey(s) {
|
||||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
||||||
}
|
}
|
||||||
@@ -40,6 +35,31 @@ function isPlainObject(x) {
|
|||||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isParaId(s) {
|
||||||
|
return /^p-\d+-/i.test(String(s || ""));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Supporte:
|
||||||
|
* - monolith: src/annotations/<pageKey>.yml -> pageKey = rel sans ext
|
||||||
|
* - shard : src/annotations/<pageKey>/<paraId>.yml -> pageKey = dirname(rel), paraId = basename
|
||||||
|
*
|
||||||
|
* shard seulement si le fichier est dans un sous-dossier (anti cas pathologique).
|
||||||
|
*/
|
||||||
|
function inferFromFile(fileAbs) {
|
||||||
|
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
||||||
|
const relNoExt = rel.replace(/\.(ya?ml|json)$/i, "");
|
||||||
|
const parts = relNoExt.split("/").filter(Boolean);
|
||||||
|
const base = parts[parts.length - 1] || "";
|
||||||
|
const dirParts = parts.slice(0, -1);
|
||||||
|
|
||||||
|
const isShard = dirParts.length > 0 && isParaId(base);
|
||||||
|
const pageKey = isShard ? dirParts.join("/") : relNoExt;
|
||||||
|
const paraId = isShard ? base : "";
|
||||||
|
|
||||||
|
return { pageKey: normalizePageKey(pageKey), paraId };
|
||||||
|
}
|
||||||
|
|
||||||
async function loadAliases() {
|
async function loadAliases() {
|
||||||
if (!(await exists(ALIASES_PATH))) return {};
|
if (!(await exists(ALIASES_PATH))) return {};
|
||||||
try {
|
try {
|
||||||
@@ -83,7 +103,11 @@ async function main() {
|
|||||||
const aliases = await loadAliases();
|
const aliases = await loadAliases();
|
||||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||||
|
|
||||||
let pages = 0;
|
// perf: cache HTML par page (shards = beaucoup de fichiers pour 1 page)
|
||||||
|
const htmlCache = new Map(); // pageKey -> html
|
||||||
|
const missingDistPage = new Set(); // pageKey
|
||||||
|
|
||||||
|
let pagesSeen = new Set();
|
||||||
let checked = 0;
|
let checked = 0;
|
||||||
let failures = 0;
|
let failures = 0;
|
||||||
const notes = [];
|
const notes = [];
|
||||||
@@ -107,7 +131,7 @@ async function main() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const pageKey = normalizePageKey(inferPageKeyFromFile(f));
|
const { pageKey, paraId: shardParaId } = inferFromFile(f);
|
||||||
|
|
||||||
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
|
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
|
||||||
failures++;
|
failures++;
|
||||||
@@ -121,20 +145,44 @@ async function main() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shard invariant (fort) : doit contenir paras[paraId]
|
||||||
|
if (shardParaId) {
|
||||||
|
if (!Object.prototype.hasOwnProperty.call(doc.paras, shardParaId)) {
|
||||||
|
failures++;
|
||||||
|
notes.push(`- SHARD MISMATCH: ${rel} (expected paras["${shardParaId}"] present)`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// si extras -> warning (non destructif)
|
||||||
|
const keys = Object.keys(doc.paras);
|
||||||
|
if (!(keys.length === 1 && keys[0] === shardParaId)) {
|
||||||
|
notes.push(`- WARN shard has extra paras: ${rel} (expected only "${shardParaId}", got ${keys.join(", ")})`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pagesSeen.add(pageKey);
|
||||||
|
|
||||||
const distFile = path.join(DIST_DIR, pageKey, "index.html");
|
const distFile = path.join(DIST_DIR, pageKey, "index.html");
|
||||||
if (!(await exists(distFile))) {
|
if (!(await exists(distFile))) {
|
||||||
failures++;
|
if (!missingDistPage.has(pageKey)) {
|
||||||
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
missingDistPage.add(pageKey);
|
||||||
|
failures++;
|
||||||
|
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
||||||
|
} else {
|
||||||
|
notes.push(`- WARN missing page already reported: dist/${pageKey}/index.html (from ${rel})`);
|
||||||
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
pages++;
|
let html = htmlCache.get(pageKey);
|
||||||
const html = await fs.readFile(distFile, "utf8");
|
if (!html) {
|
||||||
|
html = await fs.readFile(distFile, "utf8");
|
||||||
|
htmlCache.set(pageKey, html);
|
||||||
|
}
|
||||||
|
|
||||||
for (const paraId of Object.keys(doc.paras)) {
|
for (const paraId of Object.keys(doc.paras)) {
|
||||||
checked++;
|
checked++;
|
||||||
|
|
||||||
if (!/^p-\d+-/i.test(paraId)) {
|
if (!isParaId(paraId)) {
|
||||||
failures++;
|
failures++;
|
||||||
notes.push(`- INVALID ID: ${rel} (${paraId})`);
|
notes.push(`- INVALID ID: ${rel} (${paraId})`);
|
||||||
continue;
|
continue;
|
||||||
@@ -158,6 +206,7 @@ async function main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const warns = notes.filter((x) => x.startsWith("- WARN"));
|
const warns = notes.filter((x) => x.startsWith("- WARN"));
|
||||||
|
const pages = pagesSeen.size;
|
||||||
|
|
||||||
if (failures > 0) {
|
if (failures > 0) {
|
||||||
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
|
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
|
||||||
|
|||||||
@@ -114,7 +114,6 @@ async function runMammoth(docxPath, assetsOutDirWebRoot) {
|
|||||||
);
|
);
|
||||||
|
|
||||||
let html = result.value || "";
|
let html = result.value || "";
|
||||||
|
|
||||||
// Mammoth gives relative src="image-xx.png" ; we will prefix later
|
// Mammoth gives relative src="image-xx.png" ; we will prefix later
|
||||||
return html;
|
return html;
|
||||||
}
|
}
|
||||||
@@ -182,6 +181,25 @@ async function exists(p) {
|
|||||||
try { await fs.access(p); return true; } catch { return false; }
|
try { await fs.access(p); return true; } catch { return false; }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* ✅ compat:
|
||||||
|
* - ancien : collection="archicratie" + slug="archicrat-ia/chapitre-3"
|
||||||
|
* - nouveau : collection="archicrat-ia" + slug="chapitre-3"
|
||||||
|
*
|
||||||
|
* But : toujours écrire dans src/content/archicrat-ia/<slugSansPrefix>.mdx
|
||||||
|
*/
|
||||||
|
function normalizeDest(collection, slug) {
|
||||||
|
let outCollection = String(collection || "").trim();
|
||||||
|
let outSlug = String(slug || "").trim().replace(/^\/+|\/+$/g, "");
|
||||||
|
|
||||||
|
if (outCollection === "archicratie" && outSlug.startsWith("archicrat-ia/")) {
|
||||||
|
outCollection = "archicrat-ia";
|
||||||
|
outSlug = outSlug.replace(/^archicrat-ia\//, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
return { outCollection, outSlug };
|
||||||
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const args = parseArgs(process.argv);
|
const args = parseArgs(process.argv);
|
||||||
const manifestPath = path.resolve(args.manifest);
|
const manifestPath = path.resolve(args.manifest);
|
||||||
@@ -203,11 +221,14 @@ async function main() {
|
|||||||
|
|
||||||
for (const it of selected) {
|
for (const it of selected) {
|
||||||
const docxPath = path.resolve(it.source);
|
const docxPath = path.resolve(it.source);
|
||||||
const outFile = path.resolve("src/content", it.collection, `${it.slug}.mdx`);
|
|
||||||
|
const { outCollection, outSlug } = normalizeDest(it.collection, it.slug);
|
||||||
|
|
||||||
|
const outFile = path.resolve("src/content", outCollection, `${outSlug}.mdx`);
|
||||||
const outDir = path.dirname(outFile);
|
const outDir = path.dirname(outFile);
|
||||||
|
|
||||||
const assetsPublicDir = path.posix.join("/imported", it.collection, it.slug);
|
const assetsPublicDir = path.posix.join("/imported", outCollection, outSlug);
|
||||||
const assetsDiskDir = path.resolve("public", "imported", it.collection, it.slug);
|
const assetsDiskDir = path.resolve("public", "imported", outCollection, outSlug);
|
||||||
|
|
||||||
if (!(await exists(docxPath))) {
|
if (!(await exists(docxPath))) {
|
||||||
throw new Error(`Missing source docx: ${docxPath}`);
|
throw new Error(`Missing source docx: ${docxPath}`);
|
||||||
@@ -244,15 +265,17 @@ async function main() {
|
|||||||
|
|
||||||
const defaultVersion = process.env.PUBLIC_RELEASE || "0.1.0";
|
const defaultVersion = process.env.PUBLIC_RELEASE || "0.1.0";
|
||||||
|
|
||||||
|
// ✅ IMPORTANT: archicrat-ia partage edition/status avec archicratie (pas de migration frontmatter)
|
||||||
const schemaDefaultsByCollection = {
|
const schemaDefaultsByCollection = {
|
||||||
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
|
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
|
||||||
ia: { edition: "ia", status: "cas_pratique", level: 1 },
|
"archicrat-ia": { edition: "archicrat-ia", status: "essai_these", level: 1 },
|
||||||
traite: { edition: "traite", status: "ontodynamique", level: 1 },
|
ia: { edition: "ia", status: "cas_pratique", level: 1 },
|
||||||
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
|
traite: { edition: "traite", status: "ontodynamique", level: 1 },
|
||||||
atlas: { edition: "atlas", status: "atlas", level: 1 },
|
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
|
||||||
|
atlas: { edition: "atlas", status: "atlas", level: 1 },
|
||||||
};
|
};
|
||||||
|
|
||||||
const defaults = schemaDefaultsByCollection[it.collection] || { edition: it.collection, status: "draft", level: 1 };
|
const defaults = schemaDefaultsByCollection[outCollection] || { edition: outCollection, status: "draft", level: 1 };
|
||||||
|
|
||||||
const fm = [
|
const fm = [
|
||||||
"---",
|
"---",
|
||||||
|
|||||||
@@ -1,2 +1,5 @@
|
|||||||
{}
|
{
|
||||||
|
"/archicrat-ia/chapitre-3/": {
|
||||||
|
"p-1-60c7ea48": "p-1-a21087b0"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
10
src/annotations/archicrat-ia/chapitre-1/p-0-8d27a7f5.yml
Normal file
10
src/annotations/archicrat-ia/chapitre-1/p-0-8d27a7f5.yml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-1
|
||||||
|
paras:
|
||||||
|
p-0-8d27a7f5:
|
||||||
|
refs:
|
||||||
|
- url: https://auth.archicratie.trans-hands.synology.me/authenticated
|
||||||
|
label: Lien web
|
||||||
|
kind: (livre / article / vidéo / site / autre) Site
|
||||||
|
ts: 2026-02-27T12:34:31.704Z
|
||||||
|
fromIssue: 142
|
||||||
9
src/annotations/archicrat-ia/chapitre-1/p-1-8a6c18bf.yml
Normal file
9
src/annotations/archicrat-ia/chapitre-1/p-1-8a6c18bf.yml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-1
|
||||||
|
paras:
|
||||||
|
p-1-8a6c18bf:
|
||||||
|
comments_editorial:
|
||||||
|
- text: Yeaha
|
||||||
|
status: new
|
||||||
|
ts: 2026-02-27T12:40:39.462Z
|
||||||
|
fromIssue: 143
|
||||||
18
src/annotations/archicrat-ia/chapitre-3/p-0-ace27175.yml
Normal file
18
src/annotations/archicrat-ia/chapitre-3/p-0-ace27175.yml
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-3
|
||||||
|
paras:
|
||||||
|
p-0-ace27175:
|
||||||
|
media:
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-3/p-0-ace27175/Capture_d_e_cran_2025-05-05_a_19.20.40.png
|
||||||
|
caption: "[Media] p-0-ace27175 — Chapitre 3 — Philosophies du pouvoir et
|
||||||
|
archicration"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-27T12:43:14.259Z
|
||||||
|
fromIssue: 144
|
||||||
|
refs:
|
||||||
|
- url: https://gitea.archicratie.trans-hands.synology.me
|
||||||
|
label: Gitea
|
||||||
|
kind: (livre / article / vidéo / site / autre) Site
|
||||||
|
ts: 2026-03-02T19:53:21.252Z
|
||||||
|
fromIssue: 169
|
||||||
11
src/annotations/archicrat-ia/chapitre-3/p-1-60c7ea48.yml
Normal file
11
src/annotations/archicrat-ia/chapitre-3/p-1-60c7ea48.yml
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-3
|
||||||
|
paras:
|
||||||
|
p-1-60c7ea48:
|
||||||
|
refs:
|
||||||
|
- url: https://gitea.archicratie.trans-hands.synology.me
|
||||||
|
label: Gitea
|
||||||
|
kind: (livre / article / vidéo / site / autre) Site
|
||||||
|
ts: 2026-03-02T20:01:55.858Z
|
||||||
|
fromIssue: 172
|
||||||
|
# testB: hotpatch-auto gate proof
|
||||||
19
src/annotations/archicrat-ia/chapitre-4/p-11-67c14c09.yml
Normal file
19
src/annotations/archicrat-ia/chapitre-4/p-11-67c14c09.yml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-4
|
||||||
|
paras:
|
||||||
|
p-11-67c14c09:
|
||||||
|
media:
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2026-02-16_a_13.07.35.png
|
||||||
|
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||||
|
révolutions industrielles"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-26T13:17:41.286Z
|
||||||
|
fromIssue: 129
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2025-05-05_a_19.20.40.png
|
||||||
|
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||||
|
révolutions industrielles"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-27T09:17:04.386Z
|
||||||
|
fromIssue: 127
|
||||||
@@ -3,14 +3,11 @@ import { getCollection } from "astro:content";
|
|||||||
|
|
||||||
const { currentSlug } = Astro.props;
|
const { currentSlug } = Astro.props;
|
||||||
|
|
||||||
const entries = (await getCollection("archicratie"))
|
// ✅ Après migration : TOC = collection "archicrat-ia"
|
||||||
.filter((e) => e.slug.startsWith("archicrat-ia/"))
|
const entries = (await getCollection("archicrat-ia"))
|
||||||
.sort((a, b) => (a.data.order ?? 0) - (b.data.order ?? 0));
|
.sort((a, b) => (a.data.order ?? 0) - (b.data.order ?? 0));
|
||||||
|
|
||||||
// ✅ On route l’Essai-thèse sur /archicrat-ia/<slug-sans-prefix>/
|
const href = (slug) => `/archicrat-ia/${slug}/`;
|
||||||
// (Astro trailingSlash = always → on garde le "/" final)
|
|
||||||
const strip = (s) => String(s || "").replace(/^archicrat-ia\//, "");
|
|
||||||
const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<nav class="toc-global" aria-label="Table des matières — ArchiCraT-IA">
|
<nav class="toc-global" aria-label="Table des matières — ArchiCraT-IA">
|
||||||
|
|||||||
@@ -14,7 +14,7 @@ source:
|
|||||||
---
|
---
|
||||||
Ce chapitre se tient à un point nodal de notre essai-thèse : il ouvre un espace d’exploration systématique des formes conceptuelles et philosophiques à travers lesquelles le pouvoir se configure comme régime de régulation. Il ne s’agit pas ici de revenir une nouvelle fois sur les fondements de l’autorité, ni d’interroger la légitimité politique au sens classique du terme, ni même d’enquêter sur la genèse des institutions. L’ambition est autre, structurelle, transversale, morphologique, elle tentera d’arpenter, à même les dispositifs, les pensées, les théorisations et les expériences, les modalités différentiées par lesquelles s’instaurent, s’éprouvent et se disputent les formes de régulation du vivre-ensemble.
|
Ce chapitre se tient à un point nodal de notre essai-thèse : il ouvre un espace d’exploration systématique des formes conceptuelles et philosophiques à travers lesquelles le pouvoir se configure comme régime de régulation. Il ne s’agit pas ici de revenir une nouvelle fois sur les fondements de l’autorité, ni d’interroger la légitimité politique au sens classique du terme, ni même d’enquêter sur la genèse des institutions. L’ambition est autre, structurelle, transversale, morphologique, elle tentera d’arpenter, à même les dispositifs, les pensées, les théorisations et les expériences, les modalités différentiées par lesquelles s’instaurent, s’éprouvent et se disputent les formes de régulation du vivre-ensemble.
|
||||||
|
|
||||||
Dès lors, ce chapitre ne postule aucun fondement, ne cherche aucun point d’origine, ne prétend restituer aucune ontologie stable du politique. Ce qu’il donne à lire, c’est une cartographie dynamique des régimes de régulation, traversée par des formes irréductibles, non homogènes, souvent conflictuelles, parfois incompatibles, mais toutes pensées comme des configurations singulières.
|
Dès lors, ce chapitre ne postule aucun fondement, ne cherche aucun point d’origine, ne prétend restituer aucune ontologie stable du politique. Ce qu’il donne à lire, c’est une cartographie dynamique des régimes de régulation, traversée par des formes irréductibles, non homogènes, souvent conflictuelles, parfois incompatibles, mais toutes pensées comme des configurations singulières, et souvent complémentaires.
|
||||||
|
|
||||||
Ainsi, loin d’être une galerie illustrative de théories politiques juxtaposées, le chapitre s’agence comme une topologie critique, une plongée stratigraphique dans les scènes où s’articule la régulation — entendue ici non comme stabilisation externe ou ajustement technico-fonctionnel, mais comme dispositif instituant, tension structurante, scène traversée de conflictualité et d’exigence normative. Car à nos yeux, la régulation n’est pas ce qui vient après le pouvoir, elle en est la forme même constitutive — son architecture, son rythme, son épaisseur. Elle est ce par quoi le pouvoir ne se contente pas d’être exercé, mais s’institue, se justifie, se dispute, se recompose.
|
Ainsi, loin d’être une galerie illustrative de théories politiques juxtaposées, le chapitre s’agence comme une topologie critique, une plongée stratigraphique dans les scènes où s’articule la régulation — entendue ici non comme stabilisation externe ou ajustement technico-fonctionnel, mais comme dispositif instituant, tension structurante, scène traversée de conflictualité et d’exigence normative. Car à nos yeux, la régulation n’est pas ce qui vient après le pouvoir, elle en est la forme même constitutive — son architecture, son rythme, son épaisseur. Elle est ce par quoi le pouvoir ne se contente pas d’être exercé, mais s’institue, se justifie, se dispute, se recompose.
|
||||||
|
|
||||||
@@ -2,7 +2,7 @@ import { defineCollection, z } from "astro:content";
|
|||||||
|
|
||||||
const linkSchema = z.object({
|
const linkSchema = z.object({
|
||||||
type: z.enum(["definition", "appui", "transposition"]),
|
type: z.enum(["definition", "appui", "transposition"]),
|
||||||
target: z.string().min(1), // URL interne (ex: /glossaire/archicratie/) ou slug
|
target: z.string().min(1),
|
||||||
note: z.string().optional()
|
note: z.string().optional()
|
||||||
});
|
});
|
||||||
|
|
||||||
@@ -12,7 +12,6 @@ const baseTextSchema = z.object({
|
|||||||
version: z.string().min(1),
|
version: z.string().min(1),
|
||||||
concepts: z.array(z.string().min(1)).default([]),
|
concepts: z.array(z.string().min(1)).default([]),
|
||||||
links: z.array(linkSchema).default([]),
|
links: z.array(linkSchema).default([]),
|
||||||
// optionnels mais utiles dès maintenant
|
|
||||||
order: z.number().int().nonnegative().optional(),
|
order: z.number().int().nonnegative().optional(),
|
||||||
summary: z.string().optional()
|
summary: z.string().optional()
|
||||||
});
|
});
|
||||||
@@ -50,20 +49,31 @@ const atlas = defineCollection({
|
|||||||
})
|
})
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// ✅ NOUVELLE collection : archicrat-ia (Essai-thèse)
|
||||||
|
// NOTE : on accepte temporairement edition/status "archicratie/modele_sociopolitique"
|
||||||
|
// si tes MDX n’ont pas encore été normalisés.
|
||||||
|
// Quand tu voudras "strict", on passera à edition="archicrat-ia" status="essai_these"
|
||||||
|
// + update frontmatter des 7 fichiers.
|
||||||
|
const archicratIa = defineCollection({
|
||||||
|
type: "content",
|
||||||
|
schema: baseTextSchema.extend({
|
||||||
|
edition: z.union([z.literal("archicrat-ia"), z.literal("archicratie")]),
|
||||||
|
status: z.union([z.literal("essai_these"), z.literal("modele_sociopolitique")])
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
// Glossaire (référentiel terminologique)
|
// Glossaire (référentiel terminologique)
|
||||||
const glossaire = defineCollection({
|
const glossaire = defineCollection({
|
||||||
type: "content",
|
type: "content",
|
||||||
schema: z.object({
|
schema: z.object({
|
||||||
title: z.string().min(1), // Titre public (souvent identique au terme)
|
title: z.string().min(1),
|
||||||
term: z.string().min(1), // Terme canonique
|
term: z.string().min(1),
|
||||||
aliases: z.array(z.string().min(1)).default([]),
|
aliases: z.array(z.string().min(1)).default([]),
|
||||||
edition: z.literal("glossaire"),
|
edition: z.literal("glossaire"),
|
||||||
status: z.literal("referentiel"),
|
status: z.literal("referentiel"),
|
||||||
version: z.string().min(1),
|
version: z.string().min(1),
|
||||||
// Micro-définition affichable en popover (courte, stable)
|
|
||||||
definitionShort: z.string().min(1),
|
definitionShort: z.string().min(1),
|
||||||
concepts: z.array(z.string().min(1)).default([]),
|
concepts: z.array(z.string().min(1)).default([]),
|
||||||
// Liens typés (vers ouvrages ou autres termes)
|
|
||||||
links: z.array(linkSchema).default([])
|
links: z.array(linkSchema).default([])
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
@@ -73,5 +83,8 @@ export const collections = {
|
|||||||
archicratie,
|
archicratie,
|
||||||
ia,
|
ia,
|
||||||
glossaire,
|
glossaire,
|
||||||
atlas
|
atlas,
|
||||||
|
|
||||||
|
// ⚠️ clé avec tiret => doit être quotée
|
||||||
|
"archicrat-ia": archicratIa
|
||||||
};
|
};
|
||||||
@@ -1,23 +1,80 @@
|
|||||||
|
// src/pages/annotations-index.json.ts
|
||||||
import type { APIRoute } from "astro";
|
import type { APIRoute } from "astro";
|
||||||
import * as fs from "node:fs/promises";
|
import fs from "node:fs/promises";
|
||||||
import * as path from "node:path";
|
import path from "node:path";
|
||||||
import { parse as parseYAML } from "yaml";
|
import YAML from "yaml";
|
||||||
|
|
||||||
const CWD = process.cwd();
|
const CWD = process.cwd();
|
||||||
const ANNO_DIR = path.join(CWD, "src", "annotations");
|
const ANNO_ROOT = path.join(CWD, "src", "annotations");
|
||||||
|
|
||||||
// Strict en CI (ou override explicite)
|
const isObj = (x: any) => !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
const STRICT =
|
const isArr = (x: any) => Array.isArray(x);
|
||||||
process.env.ANNOTATIONS_STRICT === "1" ||
|
|
||||||
process.env.CI === "1" ||
|
|
||||||
process.env.CI === "true";
|
|
||||||
|
|
||||||
async function exists(p: string): Promise<boolean> {
|
function normPath(s: string) {
|
||||||
try {
|
return String(s || "").replace(/\\/g, "/").replace(/^\/+|\/+$/g, "");
|
||||||
await fs.access(p);
|
}
|
||||||
return true;
|
function paraNum(pid: string) {
|
||||||
} catch {
|
const m = String(pid).match(/^p-(\d+)-/i);
|
||||||
return false;
|
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||||
|
}
|
||||||
|
function toIso(v: any) {
|
||||||
|
if (v instanceof Date) return v.toISOString();
|
||||||
|
return typeof v === "string" ? v : "";
|
||||||
|
}
|
||||||
|
function stableSortByTs(arr: any[]) {
|
||||||
|
if (!Array.isArray(arr)) return;
|
||||||
|
arr.sort((a, b) => {
|
||||||
|
const ta = Date.parse(toIso(a?.ts)) || 0;
|
||||||
|
const tb = Date.parse(toIso(b?.ts)) || 0;
|
||||||
|
if (ta !== tb) return ta - tb;
|
||||||
|
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function keyMedia(x: any) { return String(x?.src || ""); }
|
||||||
|
function keyRef(x: any) {
|
||||||
|
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||||
|
}
|
||||||
|
function keyComment(x: any) { return String(x?.text || "").trim(); }
|
||||||
|
|
||||||
|
function uniqUnion(dst: any[], src: any[], keyFn: (x:any)=>string) {
|
||||||
|
const out = isArr(dst) ? [...dst] : [];
|
||||||
|
const seen = new Set(out.map((x) => keyFn(x)));
|
||||||
|
for (const it of (isArr(src) ? src : [])) {
|
||||||
|
const k = keyFn(it);
|
||||||
|
if (!k) continue;
|
||||||
|
if (!seen.has(k)) { seen.add(k); out.push(it); }
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
function deepMergeEntry(dst: any, src: any) {
|
||||||
|
if (!isObj(dst) || !isObj(src)) return;
|
||||||
|
|
||||||
|
for (const [k, v] of Object.entries(src)) {
|
||||||
|
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||||
|
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||||
|
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||||
|
|
||||||
|
if (isObj(v)) {
|
||||||
|
if (!isObj((dst as any)[k])) (dst as any)[k] = {};
|
||||||
|
deepMergeEntry((dst as any)[k], v);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isArr(v)) {
|
||||||
|
const cur = isArr((dst as any)[k]) ? (dst as any)[k] : [];
|
||||||
|
const seen = new Set(cur.map((x:any) => JSON.stringify(x)));
|
||||||
|
const out = [...cur];
|
||||||
|
for (const it of v) {
|
||||||
|
const s = JSON.stringify(it);
|
||||||
|
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||||
|
}
|
||||||
|
(dst as any)[k] = out;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(k in (dst as any)) || (dst as any)[k] == null || (dst as any)[k] === "") (dst as any)[k] = v;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -26,154 +83,98 @@ async function walk(dir: string): Promise<string[]> {
|
|||||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||||
for (const e of ents) {
|
for (const e of ents) {
|
||||||
const p = path.join(dir, e.name);
|
const p = path.join(dir, e.name);
|
||||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
if (e.isDirectory()) out.push(...await walk(p));
|
||||||
else out.push(p);
|
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isPlainObject(x: unknown): x is Record<string, unknown> {
|
function inferExpected(relNoExt: string) {
|
||||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
const parts = relNoExt.split("/").filter(Boolean);
|
||||||
}
|
const last = parts.at(-1) || "";
|
||||||
|
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||||
function normalizePageKey(s: unknown): string {
|
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||||
return String(s ?? "")
|
const paraId = isShard ? last : null;
|
||||||
.replace(/^\/+/, "")
|
return { isShard, pageKey, paraId };
|
||||||
.replace(/\/+$/, "")
|
|
||||||
.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
function inferPageKeyFromFile(inDirAbs: string, fileAbs: string): string {
|
|
||||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
|
||||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseDoc(raw: string, fileAbs: string): unknown {
|
|
||||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
|
||||||
return parseYAML(raw);
|
|
||||||
}
|
|
||||||
|
|
||||||
function hardFailOrCollect(errors: string[], msg: string): void {
|
|
||||||
if (STRICT) throw new Error(msg);
|
|
||||||
errors.push(msg);
|
|
||||||
}
|
|
||||||
|
|
||||||
function sanitizeEntry(
|
|
||||||
fileRel: string,
|
|
||||||
paraId: string,
|
|
||||||
entry: unknown,
|
|
||||||
errors: string[]
|
|
||||||
): Record<string, unknown> {
|
|
||||||
if (entry == null) return {};
|
|
||||||
|
|
||||||
if (!isPlainObject(entry)) {
|
|
||||||
hardFailOrCollect(errors, `${fileRel}: paras.${paraId} must be an object`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
const e: Record<string, unknown> = { ...entry };
|
|
||||||
|
|
||||||
const arrayFields = [
|
|
||||||
"refs",
|
|
||||||
"authors",
|
|
||||||
"quotes",
|
|
||||||
"media",
|
|
||||||
"comments_editorial",
|
|
||||||
] as const;
|
|
||||||
|
|
||||||
for (const k of arrayFields) {
|
|
||||||
if (e[k] == null) continue;
|
|
||||||
if (!Array.isArray(e[k])) {
|
|
||||||
errors.push(`${fileRel}: paras.${paraId}.${k} must be an array (coerced to [])`);
|
|
||||||
e[k] = [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return e;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GET: APIRoute = async () => {
|
export const GET: APIRoute = async () => {
|
||||||
if (!(await exists(ANNO_DIR))) {
|
const pages: Record<string, { paras: Record<string, any> }> = {};
|
||||||
const out = {
|
const errors: Array<{ file: string; error: string }> = [];
|
||||||
schema: 1,
|
|
||||||
generatedAt: new Date().toISOString(),
|
|
||||||
pages: {},
|
|
||||||
stats: { pages: 0, paras: 0, errors: 0 },
|
|
||||||
errors: [] as string[],
|
|
||||||
};
|
|
||||||
|
|
||||||
return new Response(JSON.stringify(out), {
|
let files: string[] = [];
|
||||||
headers: {
|
try {
|
||||||
"Content-Type": "application/json; charset=utf-8",
|
files = await walk(ANNO_ROOT);
|
||||||
"Cache-Control": "no-store",
|
} catch (e: any) {
|
||||||
},
|
throw new Error(`Missing annotations root: ${ANNO_ROOT} (${e?.message || e})`);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
for (const fp of files) {
|
||||||
|
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||||
|
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||||
|
const { isShard, pageKey, paraId } = inferExpected(relNoExt);
|
||||||
|
|
||||||
const pages: Record<string, { paras: Record<string, Record<string, unknown>> }> =
|
|
||||||
Object.create(null);
|
|
||||||
|
|
||||||
const errors: string[] = [];
|
|
||||||
let paraCount = 0;
|
|
||||||
|
|
||||||
for (const f of files) {
|
|
||||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
|
||||||
const pageKey = normalizePageKey(inferPageKeyFromFile(ANNO_DIR, f));
|
|
||||||
|
|
||||||
if (!pageKey) {
|
|
||||||
hardFailOrCollect(errors, `${fileRel}: cannot infer page key`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let doc: unknown;
|
|
||||||
try {
|
try {
|
||||||
const raw = await fs.readFile(f, "utf8");
|
const raw = await fs.readFile(fp, "utf8");
|
||||||
doc = parseDoc(raw, f);
|
const doc = YAML.parse(raw) || {};
|
||||||
} catch (e) {
|
|
||||||
hardFailOrCollect(errors, `${fileRel}: parse failed: ${String((e as any)?.message ?? e)}`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isPlainObject(doc) || (doc as any).schema !== 1) {
|
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||||
hardFailOrCollect(errors, `${fileRel}: schema must be 1`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((doc as any).page != null) {
|
const docPage = normPath(doc.page || "");
|
||||||
const declared = normalizePageKey((doc as any).page);
|
if (docPage && docPage !== pageKey) {
|
||||||
if (declared !== pageKey) {
|
throw new Error(`page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
||||||
hardFailOrCollect(
|
|
||||||
errors,
|
|
||||||
`${fileRel}: page mismatch (page="${declared}" vs path="${pageKey}")`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
if (!doc.page) doc.page = pageKey;
|
||||||
|
|
||||||
const parasAny = (doc as any).paras;
|
if (!isObj(doc.paras)) throw new Error(`missing object key "paras"`);
|
||||||
if (!isPlainObject(parasAny)) {
|
|
||||||
hardFailOrCollect(errors, `${fileRel}: missing object key "paras"`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pages[pageKey]) {
|
const pg = pages[pageKey] ??= { paras: {} };
|
||||||
hardFailOrCollect(errors, `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const parasOut: Record<string, Record<string, unknown>> = Object.create(null);
|
if (isShard) {
|
||||||
|
if (!paraId) throw new Error("internal: missing paraId");
|
||||||
|
if (!(paraId in doc.paras)) {
|
||||||
|
throw new Error(`shard mismatch: file must contain paras["${paraId}"]`);
|
||||||
|
}
|
||||||
|
// ✅ invariant aligné avec build-annotations-index
|
||||||
|
const keys = Object.keys(doc.paras).map(String);
|
||||||
|
if (!(keys.length === 1 && keys[0] === paraId)) {
|
||||||
|
throw new Error(`shard invariant violated: shard must contain ONLY paras["${paraId}"] (got: ${keys.join(", ")})`);
|
||||||
|
}
|
||||||
|
|
||||||
for (const [paraId, entry] of Object.entries(parasAny)) {
|
const entry = doc.paras[paraId];
|
||||||
if (!/^p-\d+-/i.test(paraId)) {
|
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||||
hardFailOrCollect(errors, `${fileRel}: invalid para id "${paraId}"`);
|
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||||
continue;
|
|
||||||
|
stableSortByTs(pg.paras[paraId].media);
|
||||||
|
stableSortByTs(pg.paras[paraId].refs);
|
||||||
|
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||||
|
} else {
|
||||||
|
for (const [pid, entry] of Object.entries(doc.paras)) {
|
||||||
|
const p = String(pid);
|
||||||
|
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||||
|
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||||
|
|
||||||
|
stableSortByTs(pg.paras[p].media);
|
||||||
|
stableSortByTs(pg.paras[p].refs);
|
||||||
|
stableSortByTs(pg.paras[p].comments_editorial);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
parasOut[paraId] = sanitizeEntry(fileRel, paraId, entry, errors);
|
} catch (e: any) {
|
||||||
|
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pages[pageKey] = { paras: parasOut };
|
for (const [pk, pg] of Object.entries(pages)) {
|
||||||
paraCount += Object.keys(parasOut).length;
|
const keys = Object.keys(pg.paras || {});
|
||||||
|
keys.sort((a, b) => {
|
||||||
|
const ia = paraNum(a);
|
||||||
|
const ib = paraNum(b);
|
||||||
|
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
});
|
||||||
|
const next: Record<string, any> = {};
|
||||||
|
for (const k of keys) next[k] = pg.paras[k];
|
||||||
|
pg.paras = next;
|
||||||
}
|
}
|
||||||
|
|
||||||
const out = {
|
const out = {
|
||||||
@@ -182,16 +183,17 @@ export const GET: APIRoute = async () => {
|
|||||||
pages,
|
pages,
|
||||||
stats: {
|
stats: {
|
||||||
pages: Object.keys(pages).length,
|
pages: Object.keys(pages).length,
|
||||||
paras: paraCount,
|
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||||
errors: errors.length,
|
errors: errors.length,
|
||||||
},
|
},
|
||||||
errors,
|
errors,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (errors.length) {
|
||||||
|
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||||
|
}
|
||||||
|
|
||||||
return new Response(JSON.stringify(out), {
|
return new Response(JSON.stringify(out), {
|
||||||
headers: {
|
headers: { "Content-Type": "application/json; charset=utf-8" },
|
||||||
"Content-Type": "application/json; charset=utf-8",
|
|
||||||
"Cache-Control": "no-store",
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
@@ -5,12 +5,11 @@ import EditionToc from "../../components/EditionToc.astro";
|
|||||||
import LocalToc from "../../components/LocalToc.astro";
|
import LocalToc from "../../components/LocalToc.astro";
|
||||||
|
|
||||||
export async function getStaticPaths() {
|
export async function getStaticPaths() {
|
||||||
const entries = (await getCollection("archicratie"))
|
// ✅ Après migration : plus de filtre par prefix, on prend toute la collection
|
||||||
.filter((e) => e.slug.startsWith("archicrat-ia/"));
|
const entries = await getCollection("archicrat-ia");
|
||||||
|
|
||||||
return entries.map((entry) => ({
|
return entries.map((entry) => ({
|
||||||
// ✅ inline : jamais de helper externe (évite "stripPrefix is not defined")
|
params: { slug: entry.slug },
|
||||||
params: { slug: entry.slug.replace(/^archicrat-ia\//, "") },
|
|
||||||
props: { entry },
|
props: { entry },
|
||||||
}));
|
}));
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -2,13 +2,12 @@
|
|||||||
import SiteLayout from "../../layouts/SiteLayout.astro";
|
import SiteLayout from "../../layouts/SiteLayout.astro";
|
||||||
import { getCollection } from "astro:content";
|
import { getCollection } from "astro:content";
|
||||||
|
|
||||||
const entries = (await getCollection("archicratie"))
|
// ✅ Après migration physique : collection = "archicrat-ia", slug = "chapitre-3" (sans prefix)
|
||||||
.filter((e) => e.slug.startsWith("archicrat-ia/"));
|
const entries = await getCollection("archicrat-ia");
|
||||||
|
|
||||||
entries.sort((a, b) => (a.data.order ?? 9999) - (b.data.order ?? 9999));
|
entries.sort((a, b) => (a.data.order ?? 9999) - (b.data.order ?? 9999));
|
||||||
|
|
||||||
const strip = (slug) => slug.replace(/^archicrat-ia\//, "");
|
const href = (slug) => `/archicrat-ia/${slug}/`;
|
||||||
const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
|
||||||
---
|
---
|
||||||
|
|
||||||
<SiteLayout title="Essai-thèse — ArchiCraT-IA">
|
<SiteLayout title="Essai-thèse — ArchiCraT-IA">
|
||||||
|
|||||||
Reference in New Issue
Block a user