Compare commits
23 Commits
chore/fix-
...
chore/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
| be191b09a0 | |||
| e06587478d | |||
| 402ffb04cd | |||
| 1cbfc02670 | |||
| 28d2fbbd2f | |||
| 225368a952 | |||
| 3574695041 | |||
| ea68025a1d | |||
| 3a08698003 | |||
| 3d583608c2 | |||
|
|
01ae95ab43 | ||
|
|
0d5821c640 | ||
|
|
2bcea39558 | ||
| af85970d4a | |||
| 210f621487 | |||
| 8ad960dc69 | |||
| d45a8b285f | |||
| b6e04a9138 | |||
| dcf1fc2d0b | |||
| 41b0517c6c | |||
| 6b43eb199d | |||
| d40f24e92d | |||
| a5d68d6a7e |
@@ -16,9 +16,13 @@ defaults:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: anno-apply-${{ github.event.issue.number || inputs.issue || 'manual' }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
apply-approved:
|
apply-approved:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
@@ -29,7 +33,6 @@ jobs:
|
|||||||
git --version
|
git --version
|
||||||
node --version
|
node --version
|
||||||
npm --version
|
npm --version
|
||||||
npm ping --registry=https://registry.npmjs.org
|
|
||||||
|
|
||||||
- name: Derive context (event.json / workflow_dispatch)
|
- name: Derive context (event.json / workflow_dispatch)
|
||||||
env:
|
env:
|
||||||
@@ -110,6 +113,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
|
|
||||||
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
||||||
echo "ℹ️ label=$LABEL_NAME => skip"
|
echo "ℹ️ label=$LABEL_NAME => skip"
|
||||||
echo "SKIP=1" >> /tmp/anno.env
|
echo "SKIP=1" >> /tmp/anno.env
|
||||||
@@ -117,6 +121,93 @@ jobs:
|
|||||||
fi
|
fi
|
||||||
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
||||||
|
|
||||||
|
- name: Fetch issue + gate on Type (skip Proposer)
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/anno.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||||
|
|
||||||
|
ISSUE_JSON="$(curl -fsS \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Accept: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER")"
|
||||||
|
|
||||||
|
node --input-type=module - <<'NODE' "$ISSUE_JSON" >> /tmp/anno.env
|
||||||
|
const issue = JSON.parse(process.argv[1] || "{}");
|
||||||
|
const title = String(issue.title || "");
|
||||||
|
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||||
|
|
||||||
|
function pickLine(key) {
|
||||||
|
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||||
|
const m = body.match(re);
|
||||||
|
return m ? m[1].trim() : "";
|
||||||
|
}
|
||||||
|
|
||||||
|
const typeRaw = pickLine("Type");
|
||||||
|
const type = String(typeRaw || "").trim().toLowerCase();
|
||||||
|
|
||||||
|
const allowed = new Set(["type/media","type/reference","type/comment"]);
|
||||||
|
const proposer = new Set(["type/correction","type/fact-check"]);
|
||||||
|
|
||||||
|
const out = [];
|
||||||
|
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
|
||||||
|
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
|
||||||
|
|
||||||
|
if (!type) {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
|
||||||
|
} else if (allowed.has(type)) {
|
||||||
|
// proceed
|
||||||
|
} else if (proposer.has(type)) {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("proposer_type:"+type)}`);
|
||||||
|
} else {
|
||||||
|
out.push(`SKIP=1`);
|
||||||
|
out.push(`SKIP_REASON=${JSON.stringify("unsupported_type:"+type)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
process.stdout.write(out.join("\n") + "\n");
|
||||||
|
NODE
|
||||||
|
|
||||||
|
echo "✅ issue type gating:"
|
||||||
|
grep -E '^(ISSUE_TYPE|SKIP|SKIP_REASON)=' /tmp/anno.env || true
|
||||||
|
|
||||||
|
- name: Comment issue if skipped (Proposer / unsupported / missing Type)
|
||||||
|
if: ${{ always() }}
|
||||||
|
env:
|
||||||
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/anno.env || true
|
||||||
|
|
||||||
|
[[ "${SKIP:-0}" == "1" ]] || exit 0
|
||||||
|
[[ "$LABEL_NAME" == "state/approved" || "$LABEL_NAME" == "workflow_dispatch" ]] || exit 0
|
||||||
|
|
||||||
|
# message différent si Proposer
|
||||||
|
REASON="${SKIP_REASON:-}"
|
||||||
|
TYPE="${ISSUE_TYPE:-}"
|
||||||
|
TITLE="${ISSUE_TITLE:-}"
|
||||||
|
|
||||||
|
if [[ "$REASON" == proposer_type:* ]]; then
|
||||||
|
MSG="ℹ️ Ticket #${ISSUE_NUMBER} détecté comme **Proposer** (${TYPE}).\n\n- Ce type est **traité manuellement par les editors** (correction/fact-check + cat/*).\n- Le bot n'applique **jamais** Proposer et n'ajoute **jamais** state/approved automatiquement.\n\n✅ Action : traitement éditorial manuel."
|
||||||
|
elif [[ "$REASON" == unsupported_type:* ]]; then
|
||||||
|
MSG="ℹ️ Ticket #${ISSUE_NUMBER} ignoré : Type non supporté par le bot (${TYPE}).\n\nTypes supportés : type/media, type/reference, type/comment.\n✅ Action : traitement manuel si nécessaire."
|
||||||
|
else
|
||||||
|
MSG="ℹ️ Ticket #${ISSUE_NUMBER} ignoré : champ 'Type:' manquant ou illisible.\n\n✅ Action : corriger le ticket (ajouter 'Type: type/media|type/reference|type/comment') ou traiter manuellement."
|
||||||
|
fi
|
||||||
|
|
||||||
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
|
curl -fsS -X POST \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
|
--data-binary "$PAYLOAD"
|
||||||
|
|
||||||
- name: Checkout default branch
|
- name: Checkout default branch
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
@@ -135,7 +226,7 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
npm ci
|
npm ci --no-audit --no-fund
|
||||||
|
|
||||||
- name: Check apply script exists
|
- name: Check apply script exists
|
||||||
run: |
|
run: |
|
||||||
@@ -154,7 +245,7 @@ jobs:
|
|||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
npm run build:clean
|
npm run build
|
||||||
|
|
||||||
test -f dist/para-index.json || {
|
test -f dist/para-index.json || {
|
||||||
echo "❌ missing dist/para-index.json after build"
|
echo "❌ missing dist/para-index.json after build"
|
||||||
@@ -220,7 +311,7 @@ jobs:
|
|||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env || true
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
RC="${APPLY_RC:-0}"
|
RC="${APPLY_RC:-0}"
|
||||||
@@ -229,9 +320,13 @@ jobs:
|
|||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
if [[ -f /tmp/apply.log ]]; then
|
||||||
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
||||||
|
else
|
||||||
|
BODY="(no apply log found)"
|
||||||
|
fi
|
||||||
|
|
||||||
|
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
||||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
curl -fsS -X POST \
|
curl -fsS -X POST \
|
||||||
@@ -246,7 +341,7 @@ jobs:
|
|||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env || true
|
||||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||||
@@ -267,7 +362,7 @@ jobs:
|
|||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env || true
|
||||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
||||||
@@ -290,7 +385,7 @@ jobs:
|
|||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env || true
|
||||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip PR"; exit 0; }
|
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip PR"; exit 0; }
|
||||||
@@ -333,6 +428,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env || true
|
source /tmp/anno.env || true
|
||||||
|
|
||||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
RC="${APPLY_RC:-0}"
|
RC="${APPLY_RC:-0}"
|
||||||
|
|||||||
@@ -1,8 +1,13 @@
|
|||||||
name: Anno Reject
|
name: Anno Reject (close issue)
|
||||||
|
|
||||||
on:
|
on:
|
||||||
issues:
|
issues:
|
||||||
types: [labeled]
|
types: [labeled]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
issue:
|
||||||
|
description: "Issue number to reject/close"
|
||||||
|
required: true
|
||||||
|
|
||||||
env:
|
env:
|
||||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||||
@@ -11,14 +16,26 @@ defaults:
|
|||||||
run:
|
run:
|
||||||
shell: bash
|
shell: bash
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: anno-reject-${{ github.event.issue.number || inputs.issue || 'manual' }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
reject:
|
reject:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Derive context
|
- name: Tools sanity
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
node --version
|
||||||
|
|
||||||
|
- name: Derive context (event.json / workflow_dispatch)
|
||||||
|
env:
|
||||||
|
INPUT_ISSUE: ${{ inputs.issue }}
|
||||||
|
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||||
@@ -29,59 +46,115 @@ jobs:
|
|||||||
|
|
||||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||||
const repoObj = ev?.repository || {};
|
const repoObj = ev?.repository || {};
|
||||||
|
|
||||||
const cloneUrl =
|
const cloneUrl =
|
||||||
repoObj?.clone_url ||
|
repoObj?.clone_url ||
|
||||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||||
if (!cloneUrl) throw new Error("No repository url");
|
|
||||||
|
|
||||||
let owner =
|
let owner =
|
||||||
repoObj?.owner?.login ||
|
repoObj?.owner?.login ||
|
||||||
repoObj?.owner?.username ||
|
repoObj?.owner?.username ||
|
||||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||||
|
|
||||||
let repo =
|
let repo =
|
||||||
repoObj?.name ||
|
repoObj?.name ||
|
||||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||||
|
|
||||||
if (!owner || !repo) {
|
if ((!owner || !repo) && cloneUrl) {
|
||||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||||
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
||||||
}
|
}
|
||||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||||
|
|
||||||
const issueNumber = ev?.issue?.number || ev?.issue?.index;
|
const issueNumber =
|
||||||
if (!issueNumber) throw new Error("No issue number");
|
ev?.issue?.number ||
|
||||||
|
ev?.issue?.index ||
|
||||||
|
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
||||||
|
|
||||||
const labelName = ev?.label?.name || ev?.label || "";
|
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
||||||
const u = new URL(cloneUrl);
|
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||||
|
}
|
||||||
|
|
||||||
|
const labelName =
|
||||||
|
ev?.label?.name ||
|
||||||
|
ev?.label ||
|
||||||
|
"workflow_dispatch";
|
||||||
|
|
||||||
|
let apiBase = "";
|
||||||
|
if (process.env.FORGE_API && String(process.env.FORGE_API).trim()) {
|
||||||
|
apiBase = String(process.env.FORGE_API).trim().replace(/\/+$/,"");
|
||||||
|
} else if (cloneUrl) {
|
||||||
|
apiBase = new URL(cloneUrl).origin;
|
||||||
|
} else {
|
||||||
|
apiBase = "";
|
||||||
|
}
|
||||||
|
|
||||||
function sh(s){ return JSON.stringify(String(s)); }
|
function sh(s){ return JSON.stringify(String(s)); }
|
||||||
|
|
||||||
process.stdout.write([
|
process.stdout.write([
|
||||||
`OWNER=${sh(owner)}`,
|
`OWNER=${sh(owner)}`,
|
||||||
`REPO=${sh(repo)}`,
|
`REPO=${sh(repo)}`,
|
||||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||||
`LABEL_NAME=${sh(labelName)}`,
|
`LABEL_NAME=${sh(labelName)}`,
|
||||||
`API_BASE=${sh(u.origin)}`
|
`API_BASE=${sh(apiBase)}`
|
||||||
].join("\n") + "\n");
|
].join("\n") + "\n");
|
||||||
NODE
|
NODE
|
||||||
|
|
||||||
- name: Gate on label state/rejected
|
echo "✅ context:"
|
||||||
|
sed -n '1,120p' /tmp/reject.env
|
||||||
|
|
||||||
|
- name: Gate on label state/rejected only
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/reject.env
|
source /tmp/reject.env
|
||||||
if [[ "$LABEL_NAME" != "state/rejected" ]]; then
|
|
||||||
|
if [[ "$LABEL_NAME" != "state/rejected" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
||||||
echo "ℹ️ label=$LABEL_NAME => skip"
|
echo "ℹ️ label=$LABEL_NAME => skip"
|
||||||
|
echo "SKIP=1" >> /tmp/reject.env
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
echo "✅ reject issue=$ISSUE_NUMBER"
|
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
||||||
|
|
||||||
- name: Comment + close issue
|
- name: Comment + close (only if not conflicting with state/approved)
|
||||||
env:
|
env:
|
||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/reject.env
|
source /tmp/reject.env
|
||||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||||
|
test -n "${API_BASE:-}" || { echo "❌ Missing API_BASE"; exit 1; }
|
||||||
|
|
||||||
|
ISSUE_JSON="$(curl -fsS \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Accept: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER")"
|
||||||
|
|
||||||
|
# conflict guard: approved + rejected => do nothing, comment warning
|
||||||
|
node --input-type=module - <<'NODE' "$ISSUE_JSON" > /tmp/reject.flags
|
||||||
|
const issue = JSON.parse(process.argv[1] || "{}");
|
||||||
|
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
|
||||||
|
const hasApproved = labels.includes("state/approved");
|
||||||
|
const hasRejected = labels.includes("state/rejected");
|
||||||
|
process.stdout.write(`HAS_APPROVED=${hasApproved ? "1":"0"}\nHAS_REJECTED=${hasRejected ? "1":"0"}\n`);
|
||||||
|
NODE
|
||||||
|
|
||||||
|
source /tmp/reject.flags
|
||||||
|
|
||||||
|
if [[ "${HAS_APPROVED:-0}" == "1" && "${HAS_REJECTED:-0}" == "1" ]]; then
|
||||||
|
MSG="⚠️ Conflit d'état sur le ticket #${ISSUE_NUMBER} : labels **state/approved** et **state/rejected** présents.\n\n➡️ Action manuelle requise : retirer l'un des deux labels avant relance."
|
||||||
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
curl -fsS -X POST \
|
||||||
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
|
--data-binary "$PAYLOAD"
|
||||||
|
echo "ℹ️ conflict => stop"
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
# comment reject
|
||||||
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
|
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
|
||||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
@@ -91,8 +164,11 @@ jobs:
|
|||||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
--data-binary "$PAYLOAD"
|
--data-binary "$PAYLOAD"
|
||||||
|
|
||||||
|
# close issue
|
||||||
curl -fsS -X PATCH \
|
curl -fsS -X PATCH \
|
||||||
-H "Authorization: token $FORGE_TOKEN" \
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||||
--data-binary '{"state":"closed"}'
|
--data-binary '{"state":"closed"}'
|
||||||
|
|
||||||
|
echo "✅ rejected+closed"
|
||||||
@@ -6,7 +6,7 @@ on:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
label:
|
label:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
steps:
|
steps:
|
||||||
- name: Apply labels from Type/State/Category
|
- name: Apply labels from Type/State/Category
|
||||||
env:
|
env:
|
||||||
|
|||||||
@@ -3,7 +3,7 @@ name: CI
|
|||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: [master]
|
branches: [main]
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
@@ -15,7 +15,7 @@ defaults:
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build-and-anchors:
|
build-and-anchors:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
container:
|
container:
|
||||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
|
|||||||
@@ -6,7 +6,7 @@ on:
|
|||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
inputs:
|
inputs:
|
||||||
force:
|
force:
|
||||||
description: "Force deploy even if gate would skip (1=yes, 0=no)"
|
description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)"
|
||||||
required: false
|
required: false
|
||||||
default: "0"
|
default: "0"
|
||||||
|
|
||||||
@@ -14,6 +14,7 @@ env:
|
|||||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||||
DOCKER_API_VERSION: "1.43"
|
DOCKER_API_VERSION: "1.43"
|
||||||
COMPOSE_VERSION: "2.29.7"
|
COMPOSE_VERSION: "2.29.7"
|
||||||
|
ASTRO_TELEMETRY_DISABLED: "1"
|
||||||
|
|
||||||
defaults:
|
defaults:
|
||||||
run:
|
run:
|
||||||
@@ -92,7 +93,7 @@ jobs:
|
|||||||
|
|
||||||
git log -1 --oneline
|
git log -1 --oneline
|
||||||
|
|
||||||
- name: Gate — auto deploy only on annotations/media changes
|
- name: Gate — decide HOTPATCH vs FULL rebuild
|
||||||
env:
|
env:
|
||||||
INPUT_FORCE: ${{ inputs.force }}
|
INPUT_FORCE: ${{ inputs.force }}
|
||||||
run: |
|
run: |
|
||||||
@@ -100,21 +101,29 @@ jobs:
|
|||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
|
|
||||||
FORCE="${INPUT_FORCE:-0}"
|
FORCE="${INPUT_FORCE:-0}"
|
||||||
|
|
||||||
|
# liste fichiers touchés (utile pour copier les médias)
|
||||||
|
CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)"
|
||||||
|
printf "%s\n" "$CHANGED" > /tmp/changed.txt
|
||||||
|
|
||||||
|
echo "== changed files =="
|
||||||
|
echo "$CHANGED" | sed -n '1,260p'
|
||||||
|
|
||||||
if [[ "$FORCE" == "1" ]]; then
|
if [[ "$FORCE" == "1" ]]; then
|
||||||
echo "✅ force=1 -> bypass gate -> deploy allowed"
|
echo "GO=1" >> /tmp/deploy.env
|
||||||
echo "GO=1" >> /tmp/deploy.env
|
echo "MODE='full'" >> /tmp/deploy.env
|
||||||
|
echo "✅ force=1 -> MODE=full (rebuild+restart)"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)"
|
# Auto mode: uniquement annotations/media => hotpatch only
|
||||||
echo "== changed files =="
|
|
||||||
echo "$CHANGED" | sed -n '1,240p'
|
|
||||||
|
|
||||||
if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then
|
if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then
|
||||||
echo "GO=1" >> /tmp/deploy.env
|
echo "GO=1" >> /tmp/deploy.env
|
||||||
echo "✅ deploy allowed (annotations/media change detected)"
|
echo "MODE='hotpatch'" >> /tmp/deploy.env
|
||||||
|
echo "✅ annotations/media change -> MODE=hotpatch"
|
||||||
else
|
else
|
||||||
echo "GO=0" >> /tmp/deploy.env
|
echo "GO=0" >> /tmp/deploy.env
|
||||||
|
echo "MODE='skip'" >> /tmp/deploy.env
|
||||||
echo "ℹ️ no annotations/media change -> skip deploy"
|
echo "ℹ️ no annotations/media change -> skip deploy"
|
||||||
fi
|
fi
|
||||||
|
|
||||||
@@ -138,7 +147,7 @@ jobs:
|
|||||||
docker compose version
|
docker compose version
|
||||||
python3 --version
|
python3 --version
|
||||||
|
|
||||||
# 🔥 KEY FIX: reuse existing compose project name if containers already exist
|
# Reuse existing compose project name if containers already exist
|
||||||
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||||
if [[ -z "${PROJ:-}" ]]; then
|
if [[ -z "${PROJ:-}" ]]; then
|
||||||
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||||
@@ -147,7 +156,12 @@ jobs:
|
|||||||
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
|
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
|
||||||
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
|
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
|
||||||
|
|
||||||
- name: Assert required vars (PUBLIC_GITEA_*)
|
# Assert target containers exist (hotpatch needs them)
|
||||||
|
for c in archicratie-web-blue archicratie-web-green; do
|
||||||
|
docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; }
|
||||||
|
done
|
||||||
|
|
||||||
|
- name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full
|
||||||
env:
|
env:
|
||||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
@@ -156,24 +170,26 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> vars not required"; exit 0; }
|
||||||
|
|
||||||
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
||||||
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
||||||
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
||||||
echo "✅ vars OK"
|
echo "✅ vars OK"
|
||||||
|
|
||||||
- name: Assert deploy files exist
|
- name: Assert deploy files exist — only needed for MODE=full
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> files not required"; exit 0; }
|
||||||
|
|
||||||
test -f docker-compose.yml
|
test -f docker-compose.yml
|
||||||
test -f Dockerfile
|
test -f Dockerfile
|
||||||
test -f nginx.conf
|
test -f nginx.conf
|
||||||
echo "✅ deploy files OK"
|
echo "✅ deploy files OK"
|
||||||
|
|
||||||
- name: Build + deploy staging (blue) then smoke
|
- name: FULL — Build + deploy staging (blue) then warmup+smoke
|
||||||
env:
|
env:
|
||||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
@@ -182,31 +198,51 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||||
|
|
||||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||||
|
|
||||||
|
wait_url() {
|
||||||
|
local url="$1"
|
||||||
|
local label="$2"
|
||||||
|
local tries="${3:-60}"
|
||||||
|
for i in $(seq 1 "$tries"); do
|
||||||
|
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||||
|
echo "✅ $label OK ($url)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
echo "… warmup $label ($i/$tries)"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "❌ timeout $label ($url)"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||||
echo "TS='$TS'" >> /tmp/deploy.env
|
echo "TS='$TS'" >> /tmp/deploy.env
|
||||||
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
||||||
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
||||||
|
|
||||||
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
||||||
|
|
||||||
docker rm -f archicratie-web-blue || true
|
docker rm -f archicratie-web-blue || true
|
||||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
||||||
|
|
||||||
curl -fsS "http://127.0.0.1:8081/para-index.json" >/dev/null
|
# warmup endpoints
|
||||||
curl -fsS "http://127.0.0.1:8081/annotations-index.json" >/dev/null
|
wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index"
|
||||||
curl -fsS "http://127.0.0.1:8081/pagefind/pagefind.js" >/dev/null
|
wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index"
|
||||||
|
wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js"
|
||||||
|
|
||||||
CANON="$(curl -fsS "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||||
echo "canonical(blue)=$CANON"
|
echo "canonical(blue)=$CANON"
|
||||||
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
||||||
echo "❌ staging canonical mismatch"; exit 3;
|
echo "❌ staging canonical mismatch"
|
||||||
|
docker logs --tail 120 archicratie-web-blue || true
|
||||||
|
exit 3
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "✅ staging OK"
|
echo "✅ staging OK"
|
||||||
|
|
||||||
- name: Build + deploy live (green) then smoke + rollback if needed
|
- name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed
|
||||||
env:
|
env:
|
||||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
@@ -215,9 +251,27 @@ jobs:
|
|||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||||
|
|
||||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||||
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
||||||
|
|
||||||
|
wait_url() {
|
||||||
|
local url="$1"
|
||||||
|
local label="$2"
|
||||||
|
local tries="${3:-60}"
|
||||||
|
for i in $(seq 1 "$tries"); do
|
||||||
|
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||||
|
echo "✅ $label OK ($url)"
|
||||||
|
return 0
|
||||||
|
fi
|
||||||
|
echo "… warmup $label ($i/$tries)"
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "❌ timeout $label ($url)"
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
rollback() {
|
rollback() {
|
||||||
echo "⚠️ rollback green -> previous image tag (best effort)"
|
echo "⚠️ rollback green -> previous image tag (best effort)"
|
||||||
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
||||||
@@ -225,33 +279,38 @@ jobs:
|
|||||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
||||||
}
|
}
|
||||||
|
|
||||||
set +e
|
# build/restart green
|
||||||
docker compose -p "$PROJ" -f docker-compose.yml build web_green
|
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
|
||||||
|
echo "❌ build green failed"; rollback; exit 4
|
||||||
|
fi
|
||||||
|
|
||||||
docker rm -f archicratie-web-green || true
|
docker rm -f archicratie-web-green || true
|
||||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
||||||
|
|
||||||
curl -fsS "http://127.0.0.1:8082/para-index.json" >/dev/null
|
# warmup endpoints
|
||||||
curl -fsS "http://127.0.0.1:8082/annotations-index.json" >/dev/null
|
if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi
|
||||||
curl -fsS "http://127.0.0.1:8082/pagefind/pagefind.js" >/dev/null
|
if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi
|
||||||
|
if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi
|
||||||
|
|
||||||
CANON="$(curl -fsS "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||||
echo "canonical(green)=$CANON"
|
echo "canonical(green)=$CANON"
|
||||||
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
||||||
echo "❌ live canonical mismatch"; rollback; exit 4;
|
echo "❌ live canonical mismatch"
|
||||||
|
docker logs --tail 120 archicratie-web-green || true
|
||||||
|
rollback
|
||||||
|
exit 4
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "✅ live OK"
|
echo "✅ live OK"
|
||||||
set -e
|
|
||||||
|
|
||||||
- name: Hotpatch annotations-index.json (deep merge shards) into blue+green
|
- name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/deploy.env
|
source /tmp/deploy.env
|
||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
python3 - <<'PY'
|
python3 - <<'PY'
|
||||||
import os, re, json, glob, datetime
|
import os, re, json, glob
|
||||||
import yaml
|
import yaml
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
|
|
||||||
@@ -261,7 +320,6 @@ jobs:
|
|||||||
def is_obj(x): return isinstance(x, dict)
|
def is_obj(x): return isinstance(x, dict)
|
||||||
def is_arr(x): return isinstance(x, list)
|
def is_arr(x): return isinstance(x, list)
|
||||||
|
|
||||||
# --- KEY FIX: YAML timestamps -> datetime; JSON can't dump them
|
|
||||||
def iso_dt(x):
|
def iso_dt(x):
|
||||||
if isinstance(x, dt.datetime):
|
if isinstance(x, dt.datetime):
|
||||||
if x.tzinfo is None:
|
if x.tzinfo is None:
|
||||||
@@ -273,81 +331,58 @@ jobs:
|
|||||||
|
|
||||||
def normalize(x):
|
def normalize(x):
|
||||||
s = iso_dt(x)
|
s = iso_dt(x)
|
||||||
if s is not None:
|
if s is not None: return s
|
||||||
return s
|
|
||||||
if isinstance(x, dict):
|
if isinstance(x, dict):
|
||||||
return {str(k): normalize(v) for k, v in x.items()}
|
return {str(k): normalize(v) for k, v in x.items()}
|
||||||
if isinstance(x, list):
|
if isinstance(x, list):
|
||||||
return [normalize(v) for v in x]
|
return [normalize(v) for v in x]
|
||||||
return x
|
return x
|
||||||
|
|
||||||
def key_media(it):
|
def key_media(it): return str((it or {}).get("src",""))
|
||||||
return str((it or {}).get("src",""))
|
|
||||||
|
|
||||||
def key_ref(it):
|
def key_ref(it):
|
||||||
it = it or {}
|
it = it or {}
|
||||||
return "||".join([
|
return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))])
|
||||||
str(it.get("url","")),
|
def key_comment(it): return str((it or {}).get("text","")).strip()
|
||||||
str(it.get("label","")),
|
|
||||||
str(it.get("kind","")),
|
|
||||||
str(it.get("citation","")),
|
|
||||||
])
|
|
||||||
|
|
||||||
def key_comment(it):
|
|
||||||
return str((it or {}).get("text","")).strip()
|
|
||||||
|
|
||||||
def dedup_extend(dst_list, src_list, key_fn):
|
def dedup_extend(dst_list, src_list, key_fn):
|
||||||
seen = set()
|
seen = set(); out = []
|
||||||
out = []
|
|
||||||
for x in (dst_list or []):
|
for x in (dst_list or []):
|
||||||
x = normalize(x)
|
x = normalize(x); k = key_fn(x)
|
||||||
k = key_fn(x)
|
if k and k not in seen: seen.add(k); out.append(x)
|
||||||
if k and k not in seen:
|
|
||||||
seen.add(k); out.append(x)
|
|
||||||
for x in (src_list or []):
|
for x in (src_list or []):
|
||||||
x = normalize(x)
|
x = normalize(x); k = key_fn(x)
|
||||||
k = key_fn(x)
|
if k and k not in seen: seen.add(k); out.append(x)
|
||||||
if k and k not in seen:
|
|
||||||
seen.add(k); out.append(x)
|
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def deep_merge(dst, src):
|
def deep_merge(dst, src):
|
||||||
src = normalize(src)
|
src = normalize(src)
|
||||||
for k, v in (src or {}).items():
|
for k, v in (src or {}).items():
|
||||||
if k in ("media", "refs", "comments_editorial") and is_arr(v):
|
if k in ("media","refs","comments_editorial") and is_arr(v):
|
||||||
if k == "media":
|
if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media)
|
||||||
dst[k] = dedup_extend(dst.get(k, []), v, key_media)
|
elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
|
||||||
elif k == "refs":
|
else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
|
||||||
dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
|
|
||||||
else:
|
|
||||||
dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
|
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if is_obj(v):
|
if is_obj(v):
|
||||||
if not is_obj(dst.get(k)):
|
if not is_obj(dst.get(k)): dst[k] = {}
|
||||||
dst[k] = {} if not is_obj(dst.get(k)) else dst.get(k)
|
|
||||||
deep_merge(dst[k], v)
|
deep_merge(dst[k], v)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
if is_arr(v):
|
if is_arr(v):
|
||||||
cur = dst.get(k, [])
|
cur = dst.get(k, [])
|
||||||
if not is_arr(cur): cur = []
|
if not is_arr(cur): cur = []
|
||||||
seen = set()
|
seen = set(); out = []
|
||||||
out = []
|
|
||||||
for x in cur:
|
for x in cur:
|
||||||
x = normalize(x)
|
x = normalize(x)
|
||||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||||
if s not in seen:
|
if s not in seen: seen.add(s); out.append(x)
|
||||||
seen.add(s); out.append(x)
|
|
||||||
for x in v:
|
for x in v:
|
||||||
x = normalize(x)
|
x = normalize(x)
|
||||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||||
if s not in seen:
|
if s not in seen: seen.add(s); out.append(x)
|
||||||
seen.add(s); out.append(x)
|
|
||||||
dst[k] = out
|
dst[k] = out
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# scalaires: set seulement si absent / vide
|
|
||||||
v = normalize(v)
|
v = normalize(v)
|
||||||
if k not in dst or dst.get(k) in (None, ""):
|
if k not in dst or dst.get(k) in (None, ""):
|
||||||
dst[k] = v
|
dst[k] = v
|
||||||
@@ -360,7 +395,6 @@ jobs:
|
|||||||
for k in ("media","refs","comments_editorial"):
|
for k in ("media","refs","comments_editorial"):
|
||||||
arr = entry.get(k)
|
arr = entry.get(k)
|
||||||
if not is_arr(arr): continue
|
if not is_arr(arr): continue
|
||||||
|
|
||||||
def ts(x):
|
def ts(x):
|
||||||
x = normalize(x)
|
x = normalize(x)
|
||||||
try:
|
try:
|
||||||
@@ -368,24 +402,22 @@ jobs:
|
|||||||
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
|
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
|
||||||
except Exception:
|
except Exception:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
arr = [normalize(x) for x in arr]
|
arr = [normalize(x) for x in arr]
|
||||||
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
||||||
entry[k] = arr
|
entry[k] = arr
|
||||||
|
|
||||||
pages = {}
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
if not os.path.isdir(ANNO_ROOT):
|
if not os.path.isdir(ANNO_ROOT):
|
||||||
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
|
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
|
||||||
|
|
||||||
|
pages = {}
|
||||||
|
errors = []
|
||||||
|
|
||||||
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
|
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
|
||||||
for fp in files:
|
for fp in files:
|
||||||
try:
|
try:
|
||||||
with open(fp, "r", encoding="utf-8") as f:
|
with open(fp, "r", encoding="utf-8") as f:
|
||||||
doc = yaml.safe_load(f) or {}
|
doc = yaml.safe_load(f) or {}
|
||||||
doc = normalize(doc)
|
doc = normalize(doc)
|
||||||
|
|
||||||
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -423,22 +455,49 @@ jobs:
|
|||||||
"errors": errors,
|
"errors": errors,
|
||||||
}
|
}
|
||||||
|
|
||||||
out = normalize(out)
|
|
||||||
|
|
||||||
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
||||||
json.dump(out, f, ensure_ascii=False)
|
json.dump(out, f, ensure_ascii=False)
|
||||||
|
|
||||||
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
|
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
|
||||||
PY
|
PY
|
||||||
|
|
||||||
|
# patch JSON into running containers
|
||||||
for c in archicratie-web-blue archicratie-web-green; do
|
for c in archicratie-web-blue archicratie-web-green; do
|
||||||
echo "== patch $c =="
|
echo "== patch annotations-index.json into $c =="
|
||||||
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
|
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
# copy changed media files into containers (so new media appears without rebuild)
|
||||||
|
if [[ -s /tmp/changed.txt ]]; then
|
||||||
|
while IFS= read -r f; do
|
||||||
|
[[ -n "$f" ]] || continue
|
||||||
|
if [[ "$f" == public/media/* ]]; then
|
||||||
|
dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/...
|
||||||
|
for c in archicratie-web-blue archicratie-web-green; do
|
||||||
|
echo "== copy media into $c: $f -> $dest =="
|
||||||
|
docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\""
|
||||||
|
docker cp "$f" "$c:$dest"
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
done < /tmp/changed.txt
|
||||||
|
fi
|
||||||
|
|
||||||
|
# smoke after patch
|
||||||
for p in 8081 8082; do
|
for p in 8081 8082; do
|
||||||
echo "== smoke annotations-index on $p =="
|
echo "== smoke annotations-index on $p =="
|
||||||
curl -fsS "http://127.0.0.1:${p}/annotations-index.json" | python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {}))'
|
curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \
|
||||||
|
| python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))'
|
||||||
done
|
done
|
||||||
|
|
||||||
echo "✅ hotpatch annotations-index done"
|
echo "✅ hotpatch done"
|
||||||
|
|
||||||
|
- name: Debug on failure (containers status/logs)
|
||||||
|
if: ${{ failure() }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
echo "== docker ps =="
|
||||||
|
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true
|
||||||
|
for c in archicratie-web-blue archicratie-web-green; do
|
||||||
|
echo "== logs $c (tail 200) =="
|
||||||
|
docker logs --tail 200 "$c" || true
|
||||||
|
done
|
||||||
@@ -3,7 +3,7 @@ on: [push, workflow_dispatch]
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
smoke:
|
smoke:
|
||||||
runs-on: ubuntu-latest
|
runs-on: mac-ci
|
||||||
steps:
|
steps:
|
||||||
- run: node -v && npm -v
|
- run: node -v && npm -v
|
||||||
- run: echo "runner OK"
|
- run: echo "runner OK"
|
||||||
|
|||||||
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
@@ -1,12 +1,17 @@
|
|||||||
#!/usr/bin/env node
|
#!/usr/bin/env node
|
||||||
// scripts/apply-annotation-ticket.mjs
|
// scripts/apply-annotation-ticket.mjs
|
||||||
|
//
|
||||||
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
|
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
|
||||||
//
|
//
|
||||||
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
|
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
|
||||||
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
|
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
|
||||||
//
|
//
|
||||||
// Robuste, idempotent, non destructif.
|
// Compat rétro : lit (si présent) l'ancien monolithe:
|
||||||
|
// src/annotations/<oeuvre>/<chapitre>.yml
|
||||||
|
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
|
||||||
|
// pour permettre une migration progressive sans perte.
|
||||||
//
|
//
|
||||||
|
// Robuste, idempotent, non destructif.
|
||||||
// DRY RUN si --dry-run
|
// DRY RUN si --dry-run
|
||||||
// Options: --dry-run --no-download --verify --strict --commit --close
|
// Options: --dry-run --no-download --verify --strict --commit --close
|
||||||
//
|
//
|
||||||
@@ -49,8 +54,8 @@ Flags:
|
|||||||
--dry-run : n'écrit rien (affiche un aperçu)
|
--dry-run : n'écrit rien (affiche un aperçu)
|
||||||
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
||||||
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
|
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
|
||||||
--strict : refuse si URL ref invalide (http/https) OU caption media vide
|
--strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
|
||||||
--commit : git add + git commit (le script commit dans la branche courante)
|
--commit : git add + git commit (commit dans la branche courante)
|
||||||
--close : ferme le ticket (nécessite --commit)
|
--close : ferme le ticket (nécessite --commit)
|
||||||
|
|
||||||
Env requis:
|
Env requis:
|
||||||
@@ -191,6 +196,7 @@ function normalizeChemin(chemin) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
function normalizePageKeyFromChemin(chemin) {
|
function normalizePageKeyFromChemin(chemin) {
|
||||||
|
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
|
||||||
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -226,90 +232,156 @@ function isHttpUrl(u) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/* ------------------------------ para-index (verify + sort) ------------------------------ */
|
function stableSortByTs(arr) {
|
||||||
|
if (!Array.isArray(arr)) return;
|
||||||
|
arr.sort((a, b) => {
|
||||||
|
const ta = Date.parse(a?.ts || "") || 0;
|
||||||
|
const tb = Date.parse(b?.ts || "") || 0;
|
||||||
|
if (ta !== tb) return ta - tb;
|
||||||
|
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function normPage(s) {
|
||||||
|
let x = String(s || "").trim();
|
||||||
|
if (!x) return "";
|
||||||
|
// retire origin si on a une URL complète
|
||||||
|
x = x.replace(/^https?:\/\/[^/]+/i, "");
|
||||||
|
// enlève query/hash
|
||||||
|
x = x.split("#")[0].split("?")[0];
|
||||||
|
// enlève index.html
|
||||||
|
x = x.replace(/index\.html$/i, "");
|
||||||
|
// enlève slashs de bord
|
||||||
|
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
|
||||||
|
return x;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------ para-index (verify + order) ------------------------------ */
|
||||||
|
|
||||||
async function loadParaOrderFromDist(pageKey) {
|
async function loadParaOrderFromDist(pageKey) {
|
||||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||||
if (!(await exists(distIdx))) return null;
|
if (!(await exists(distIdx))) return null;
|
||||||
|
|
||||||
let j;
|
let j;
|
||||||
try {
|
try {
|
||||||
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
|
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
|
||||||
} catch {
|
} catch {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const want = normPage(pageKey);
|
||||||
|
|
||||||
|
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
|
||||||
|
const items = Array.isArray(j?.items)
|
||||||
|
? j.items
|
||||||
|
: Array.isArray(j?.index?.items)
|
||||||
|
? j.index.items
|
||||||
|
: null;
|
||||||
|
|
||||||
|
if (items) {
|
||||||
|
const ids = [];
|
||||||
|
for (const it of items) {
|
||||||
|
// page peut être dans plein de clés différentes
|
||||||
|
const pageCand = normPage(
|
||||||
|
it?.page ??
|
||||||
|
it?.pageKey ??
|
||||||
|
it?.path ??
|
||||||
|
it?.route ??
|
||||||
|
it?.href ??
|
||||||
|
it?.url ??
|
||||||
|
""
|
||||||
|
);
|
||||||
|
|
||||||
|
// id peut être dans plein de clés différentes
|
||||||
|
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
|
||||||
|
if (id.startsWith("#")) id = id.slice(1);
|
||||||
|
|
||||||
|
if (pageCand === want && id) ids.push(id);
|
||||||
|
}
|
||||||
|
if (ids.length) return ids;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
|
||||||
|
if (j?.byId && typeof j.byId === "object") {
|
||||||
|
const ids = Object.keys(j.byId)
|
||||||
|
.filter((id) => {
|
||||||
|
const meta = j.byId[id] || {};
|
||||||
|
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
|
||||||
|
return pageCand === want;
|
||||||
|
});
|
||||||
|
|
||||||
|
if (ids.length) {
|
||||||
|
ids.sort((a, b) => {
|
||||||
|
const ia = paraIndexFromId(a);
|
||||||
|
const ib = paraIndexFromId(b);
|
||||||
|
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
});
|
||||||
|
return ids;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
|
||||||
|
if (j?.pages && typeof j.pages === "object") {
|
||||||
|
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
|
||||||
|
const keys = Object.keys(j.pages);
|
||||||
|
const hit = keys.find((k) => normPage(k) === want);
|
||||||
|
if (hit) {
|
||||||
|
const pg = j.pages[hit];
|
||||||
|
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
|
||||||
|
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Support several shapes:
|
async function tryVerifyAnchor(pageKey, anchorId) {
|
||||||
// A) { items:[{id,page,...}, ...] }
|
// 1) dist/para-index.json : order complet si possible
|
||||||
if (Array.isArray(j?.items)) {
|
const order = await loadParaOrderFromDist(pageKey);
|
||||||
const ids = [];
|
if (order) return order.includes(anchorId);
|
||||||
for (const it of j.items) {
|
|
||||||
const p = String(it?.page || it?.pageKey || "");
|
// 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
|
||||||
const id = String(it?.id || it?.paraId || "");
|
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||||
if (p === pageKey && id) ids.push(id);
|
if (await exists(distIdx)) {
|
||||||
}
|
try {
|
||||||
if (ids.length) return ids;
|
const raw = await fs.readFile(distIdx, "utf8");
|
||||||
}
|
if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
|
||||||
|
return true;
|
||||||
// B) { byId: { "p-...": { page:"archicrat-ia/chapitre-4", ... }, ... } }
|
|
||||||
if (j?.byId && typeof j.byId === "object") {
|
|
||||||
// cannot rebuild full order; but can verify existence
|
|
||||||
// return a pseudo-order map from known ids sorted by p-<n>- then alpha
|
|
||||||
const ids = Object.keys(j.byId).filter((id) => String(j.byId[id]?.page || "") === pageKey);
|
|
||||||
if (ids.length) {
|
|
||||||
ids.sort((a, b) => {
|
|
||||||
const ia = paraIndexFromId(a);
|
|
||||||
const ib = paraIndexFromId(b);
|
|
||||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
|
||||||
return String(a).localeCompare(String(b));
|
|
||||||
});
|
|
||||||
return ids;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// C) { pages: { "archicrat-ia/chapitre-4": { ids:[...]} } }
|
|
||||||
if (j?.pages && typeof j.pages === "object") {
|
|
||||||
const pg = j.pages[pageKey];
|
|
||||||
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
|
|
||||||
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function tryVerifyAnchor(pageKey, anchorId) {
|
|
||||||
// 1) dist/para-index.json
|
|
||||||
const order = await loadParaOrderFromDist(pageKey);
|
|
||||||
if (order) return order.includes(anchorId);
|
|
||||||
|
|
||||||
// 2) tests/anchors-baseline.json (fallback)
|
|
||||||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
|
||||||
if (await exists(base)) {
|
|
||||||
try {
|
|
||||||
const j = JSON.parse(await fs.readFile(base, "utf8"));
|
|
||||||
const candidates = [];
|
|
||||||
if (j?.pages && typeof j.pages === "object") {
|
|
||||||
for (const [k, v] of Object.entries(j.pages)) {
|
|
||||||
if (!Array.isArray(v)) continue;
|
|
||||||
if (String(k).includes(pageKey)) candidates.push(...v);
|
|
||||||
}
|
}
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
}
|
}
|
||||||
if (Array.isArray(j?.entries)) {
|
|
||||||
for (const it of j.entries) {
|
|
||||||
const p = String(it?.page || "");
|
|
||||||
const ids = it?.ids;
|
|
||||||
if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
|
|
||||||
} catch {
|
|
||||||
// ignore
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// 2) tests/anchors-baseline.json (fallback)
|
||||||
|
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||||||
|
if (await exists(base)) {
|
||||||
|
try {
|
||||||
|
const j = JSON.parse(await fs.readFile(base, "utf8"));
|
||||||
|
const candidates = [];
|
||||||
|
if (j?.pages && typeof j.pages === "object") {
|
||||||
|
for (const [k, v] of Object.entries(j.pages)) {
|
||||||
|
if (!Array.isArray(v)) continue;
|
||||||
|
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (Array.isArray(j?.entries)) {
|
||||||
|
for (const it of j.entries) {
|
||||||
|
const p = String(it?.page || "");
|
||||||
|
const ids = it?.ids;
|
||||||
|
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
|
||||||
|
} catch {
|
||||||
|
// ignore
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return null; // cannot verify
|
||||||
}
|
}
|
||||||
|
|
||||||
return null; // cannot verify
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
|
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
|
||||||
|
|
||||||
function keyMedia(x) {
|
function keyMedia(x) {
|
||||||
@@ -360,7 +432,6 @@ function deepMergeEntry(dst, src) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (Array.isArray(v)) {
|
if (Array.isArray(v)) {
|
||||||
// fallback: union by JSON string
|
|
||||||
const cur = Array.isArray(dst[k]) ? dst[k] : [];
|
const cur = Array.isArray(dst[k]) ? dst[k] : [];
|
||||||
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||||
const out = [...cur];
|
const out = [...cur];
|
||||||
@@ -382,16 +453,6 @@ function deepMergeEntry(dst, src) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function stableSortByTs(arr) {
|
|
||||||
if (!Array.isArray(arr)) return;
|
|
||||||
arr.sort((a, b) => {
|
|
||||||
const ta = Date.parse(a?.ts || "") || 0;
|
|
||||||
const tb = Date.parse(b?.ts || "") || 0;
|
|
||||||
if (ta !== tb) return ta - tb;
|
|
||||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
/* ----------------------------- annotations I/O ----------------------------- */
|
/* ----------------------------- annotations I/O ----------------------------- */
|
||||||
|
|
||||||
async function loadAnnoDocYaml(fileAbs, pageKey) {
|
async function loadAnnoDocYaml(fileAbs, pageKey) {
|
||||||
@@ -424,9 +485,7 @@ async function loadAnnoDocYaml(fileAbs, pageKey) {
|
|||||||
function sortParasObject(paras, order) {
|
function sortParasObject(paras, order) {
|
||||||
const keys = Object.keys(paras || {});
|
const keys = Object.keys(paras || {});
|
||||||
const idx = new Map();
|
const idx = new Map();
|
||||||
if (Array.isArray(order)) {
|
if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
|
||||||
order.forEach((id, i) => idx.set(String(id), i));
|
|
||||||
}
|
|
||||||
|
|
||||||
keys.sort((a, b) => {
|
keys.sort((a, b) => {
|
||||||
const ha = idx.has(a);
|
const ha = idx.has(a);
|
||||||
@@ -448,9 +507,9 @@ function sortParasObject(paras, order) {
|
|||||||
|
|
||||||
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
|
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
|
||||||
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||||||
|
|
||||||
doc.paras = sortParasObject(doc.paras, order);
|
doc.paras = sortParasObject(doc.paras, order);
|
||||||
|
|
||||||
// also sort known lists inside each para for stable diffs
|
|
||||||
for (const e of Object.values(doc.paras || {})) {
|
for (const e of Object.values(doc.paras || {})) {
|
||||||
if (!isPlainObject(e)) continue;
|
if (!isPlainObject(e)) continue;
|
||||||
stableSortByTs(e.media);
|
stableSortByTs(e.media);
|
||||||
@@ -632,7 +691,6 @@ async function main() {
|
|||||||
const pageKey = normalizePageKeyFromChemin(chemin);
|
const pageKey = normalizePageKeyFromChemin(chemin);
|
||||||
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
||||||
|
|
||||||
// para order (used for verify + sorting)
|
|
||||||
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
|
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
|
||||||
|
|
||||||
if (DO_VERIFY) {
|
if (DO_VERIFY) {
|
||||||
@@ -641,46 +699,43 @@ async function main() {
|
|||||||
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
||||||
}
|
}
|
||||||
if (ok === null) {
|
if (ok === null) {
|
||||||
if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`), { __exitCode: 2 });
|
if (STRICT) {
|
||||||
|
throw Object.assign(
|
||||||
|
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
|
||||||
|
{ __exitCode: 2 }
|
||||||
|
);
|
||||||
|
}
|
||||||
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
|
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// ✅ SHARD FILE: src/annotations/<pageKey>/<paraId>.yml
|
// ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
|
||||||
const annoShardFileAbs = path.join(ANNO_DIR, pageKey, `${ancre}.yml`);
|
const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
|
||||||
const annoShardFileRel = path.relative(CWD, annoShardFileAbs).replace(/\\/g, "/");
|
const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
|
||||||
|
|
||||||
// legacy (read-only, used as base to avoid losing previously stored data)
|
// legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
|
||||||
const annoLegacyFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||||||
|
|
||||||
console.log("✅ Parsed:", {
|
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
|
||||||
type,
|
|
||||||
chemin,
|
|
||||||
ancre: `#${ancre}`,
|
|
||||||
pageKey,
|
|
||||||
annoFile: annoShardFileRel,
|
|
||||||
});
|
|
||||||
|
|
||||||
// load shard doc
|
// load shard doc
|
||||||
const doc = await loadAnnoDocYaml(annoShardFileAbs, pageKey);
|
const doc = await loadAnnoDocYaml(shardAbs, pageKey);
|
||||||
|
|
||||||
// merge legacy para into shard as base (non destructive)
|
|
||||||
if (await exists(annoLegacyFileAbs)) {
|
|
||||||
try {
|
|
||||||
const legacy = await loadAnnoDocYaml(annoLegacyFileAbs, pageKey);
|
|
||||||
const legacyEntry = legacy?.paras?.[ancre];
|
|
||||||
if (isPlainObject(legacyEntry)) {
|
|
||||||
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
|
||||||
deepMergeEntry(doc.paras[ancre], legacyEntry);
|
|
||||||
}
|
|
||||||
} catch {
|
|
||||||
// ignore legacy parse issues (shard still works)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
||||||
const entry = doc.paras[ancre];
|
const entry = doc.paras[ancre];
|
||||||
|
|
||||||
|
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
|
||||||
|
if (await exists(legacyAbs)) {
|
||||||
|
try {
|
||||||
|
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
|
||||||
|
const legacyEntry = legacy?.paras?.[ancre];
|
||||||
|
if (isPlainObject(legacyEntry)) {
|
||||||
|
deepMergeEntry(entry, legacyEntry);
|
||||||
|
}
|
||||||
|
} catch {
|
||||||
|
// ignore legacy parse issues; shard still applies new data
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const touchedFiles = [];
|
const touchedFiles = [];
|
||||||
const notes = [];
|
const notes = [];
|
||||||
let changed = false;
|
let changed = false;
|
||||||
@@ -696,10 +751,13 @@ async function main() {
|
|||||||
|
|
||||||
const before = entry.comments_editorial.length;
|
const before = entry.comments_editorial.length;
|
||||||
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
|
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
|
||||||
changed = changed || entry.comments_editorial.length !== before;
|
if (entry.comments_editorial.length !== before) {
|
||||||
|
changed = true;
|
||||||
|
notes.push(`+ comment added (len=${text.length})`);
|
||||||
|
} else {
|
||||||
|
notes.push(`~ comment already present (dedup)`);
|
||||||
|
}
|
||||||
stableSortByTs(entry.comments_editorial);
|
stableSortByTs(entry.comments_editorial);
|
||||||
notes.push(changed ? `+ comment added (len=${text.length})` : `~ comment already present (dedup)`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type === "type/reference") {
|
else if (type === "type/reference") {
|
||||||
@@ -722,15 +780,24 @@ async function main() {
|
|||||||
|
|
||||||
const before = entry.refs.length;
|
const before = entry.refs.length;
|
||||||
entry.refs = uniqUnion(entry.refs, [item], keyRef);
|
entry.refs = uniqUnion(entry.refs, [item], keyRef);
|
||||||
changed = changed || entry.refs.length !== before;
|
if (entry.refs.length !== before) {
|
||||||
|
changed = true;
|
||||||
|
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
|
||||||
|
} else {
|
||||||
|
notes.push(`~ reference already present (dedup)`);
|
||||||
|
}
|
||||||
stableSortByTs(entry.refs);
|
stableSortByTs(entry.refs);
|
||||||
notes.push(changed ? `+ reference added (${item.url ? "url" : "label"})` : `~ reference already present (dedup)`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
else if (type === "type/media") {
|
else if (type === "type/media") {
|
||||||
if (!Array.isArray(entry.media)) entry.media = [];
|
if (!Array.isArray(entry.media)) entry.media = [];
|
||||||
|
|
||||||
|
const caption = (title || "").trim();
|
||||||
|
if (STRICT && !caption) {
|
||||||
|
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
|
||||||
|
}
|
||||||
|
const captionFinal = caption || ".";
|
||||||
|
|
||||||
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||||||
if (!atts.length) notes.push("! no assets found (nothing to download).");
|
if (!atts.length) notes.push("! no assets found (nothing to download).");
|
||||||
|
|
||||||
@@ -739,13 +806,7 @@ async function main() {
|
|||||||
const dl = a?.browser_download_url || a?.download_url || "";
|
const dl = a?.browser_download_url || a?.download_url || "";
|
||||||
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
||||||
|
|
||||||
const caption = (title || "").trim();
|
const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
|
||||||
if (STRICT && !caption) {
|
|
||||||
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
|
|
||||||
}
|
|
||||||
const captionFinal = caption || ".";
|
|
||||||
|
|
||||||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre);
|
|
||||||
const destAbs = path.join(mediaDirAbs, name);
|
const destAbs = path.join(mediaDirAbs, name);
|
||||||
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
||||||
|
|
||||||
@@ -790,7 +851,7 @@ async function main() {
|
|||||||
|
|
||||||
if (DRY_RUN) {
|
if (DRY_RUN) {
|
||||||
console.log("\n--- DRY RUN (no write) ---");
|
console.log("\n--- DRY RUN (no write) ---");
|
||||||
console.log(`Would update: ${annoShardFileRel}`);
|
console.log(`Would update: ${shardRel}`);
|
||||||
for (const n of notes) console.log(" ", n);
|
for (const n of notes) console.log(" ", n);
|
||||||
console.log("\nExcerpt (resulting entry):");
|
console.log("\nExcerpt (resulting entry):");
|
||||||
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
||||||
@@ -798,10 +859,10 @@ async function main() {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
await saveAnnoDocYaml(annoShardFileAbs, doc, paraOrder);
|
await saveAnnoDocYaml(shardAbs, doc, paraOrder);
|
||||||
touchedFiles.unshift(annoShardFileRel);
|
touchedFiles.unshift(shardRel);
|
||||||
|
|
||||||
console.log(`✅ Updated: ${annoShardFileRel}`);
|
console.log(`✅ Updated: ${shardRel}`);
|
||||||
for (const n of notes) console.log(" ", n);
|
for (const n of notes) console.log(" ", n);
|
||||||
|
|
||||||
if (DO_COMMIT) {
|
if (DO_COMMIT) {
|
||||||
|
|||||||
@@ -1,28 +1,106 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
// scripts/build-annotations-index.mjs
|
// scripts/build-annotations-index.mjs
|
||||||
|
// Construit dist/annotations-index.json à partir de src/annotations/**/*.yml
|
||||||
|
// Supporte:
|
||||||
|
// - monolith : src/annotations/<pageKey>.yml
|
||||||
|
// - shard : src/annotations/<pageKey>/<paraId>.yml (paraId = p-<n>-...)
|
||||||
|
// Invariants:
|
||||||
|
// - doc.schema === 1
|
||||||
|
// - doc.page (si présent) == pageKey déduit du chemin
|
||||||
|
// - shard: doc.paras doit contenir EXACTEMENT la clé paraId (sinon fail)
|
||||||
|
//
|
||||||
|
// Deep-merge non destructif (media/refs/comments dédupliqués), tri stable.
|
||||||
|
|
||||||
import fs from "node:fs/promises";
|
import fs from "node:fs/promises";
|
||||||
import path from "node:path";
|
import path from "node:path";
|
||||||
import YAML from "yaml";
|
import YAML from "yaml";
|
||||||
|
|
||||||
function parseArgs(argv) {
|
const ROOT = process.cwd();
|
||||||
const out = {
|
const ANNO_ROOT = path.join(ROOT, "src", "annotations");
|
||||||
inDir: "src/annotations",
|
const DIST_DIR = path.join(ROOT, "dist");
|
||||||
outFile: "dist/annotations-index.json",
|
const OUT = path.join(DIST_DIR, "annotations-index.json");
|
||||||
};
|
|
||||||
|
|
||||||
for (let i = 0; i < argv.length; i++) {
|
function assert(cond, msg) {
|
||||||
const a = argv[i];
|
if (!cond) throw new Error(msg);
|
||||||
|
}
|
||||||
|
|
||||||
if (a === "--in" && argv[i + 1]) out.inDir = argv[++i];
|
function isObj(x) {
|
||||||
else if (a.startsWith("--in=")) out.inDir = a.slice("--in=".length);
|
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
|
}
|
||||||
|
function isArr(x) {
|
||||||
|
return Array.isArray(x);
|
||||||
|
}
|
||||||
|
|
||||||
if (a === "--out" && argv[i + 1]) out.outFile = argv[++i];
|
function normPath(s) {
|
||||||
else if (a.startsWith("--out=")) out.outFile = a.slice("--out=".length);
|
return String(s || "")
|
||||||
|
.replace(/\\/g, "/")
|
||||||
|
.replace(/^\/+|\/+$/g, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
function paraNum(pid) {
|
||||||
|
const m = String(pid).match(/^p-(\d+)-/i);
|
||||||
|
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||||
|
}
|
||||||
|
|
||||||
|
function stableSortByTs(arr) {
|
||||||
|
if (!Array.isArray(arr)) return;
|
||||||
|
arr.sort((a, b) => {
|
||||||
|
const ta = Date.parse(a?.ts || "") || 0;
|
||||||
|
const tb = Date.parse(b?.ts || "") || 0;
|
||||||
|
if (ta !== tb) return ta - tb;
|
||||||
|
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function keyMedia(x) { return String(x?.src || ""); }
|
||||||
|
function keyRef(x) {
|
||||||
|
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||||
|
}
|
||||||
|
function keyComment(x) { return String(x?.text || "").trim(); }
|
||||||
|
|
||||||
|
function uniqUnion(dst, src, keyFn) {
|
||||||
|
const out = isArr(dst) ? [...dst] : [];
|
||||||
|
const seen = new Set(out.map((x) => keyFn(x)));
|
||||||
|
for (const it of (isArr(src) ? src : [])) {
|
||||||
|
const k = keyFn(it);
|
||||||
|
if (!k) continue;
|
||||||
|
if (!seen.has(k)) {
|
||||||
|
seen.add(k);
|
||||||
|
out.push(it);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
async function exists(p) {
|
function deepMergeEntry(dst, src) {
|
||||||
try { await fs.access(p); return true; } catch { return false; }
|
if (!isObj(dst) || !isObj(src)) return;
|
||||||
|
|
||||||
|
for (const [k, v] of Object.entries(src)) {
|
||||||
|
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||||
|
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||||
|
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||||
|
|
||||||
|
if (isObj(v)) {
|
||||||
|
if (!isObj(dst[k])) dst[k] = {};
|
||||||
|
deepMergeEntry(dst[k], v);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isArr(v)) {
|
||||||
|
const cur = isArr(dst[k]) ? dst[k] : [];
|
||||||
|
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||||
|
const out = [...cur];
|
||||||
|
for (const it of v) {
|
||||||
|
const s = JSON.stringify(it);
|
||||||
|
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||||
|
}
|
||||||
|
dst[k] = out;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// scalar: set only if missing/empty
|
||||||
|
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function walk(dir) {
|
async function walk(dir) {
|
||||||
@@ -30,111 +108,116 @@ async function walk(dir) {
|
|||||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||||
for (const e of ents) {
|
for (const e of ents) {
|
||||||
const p = path.join(dir, e.name);
|
const p = path.join(dir, e.name);
|
||||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
if (e.isDirectory()) out.push(...await walk(p));
|
||||||
else out.push(p);
|
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
function inferPageKeyFromFile(inDirAbs, fileAbs) {
|
function inferExpectedFromRel(relNoExt) {
|
||||||
// src/annotations/<page>.yml -> "<page>"
|
const parts = relNoExt.split("/").filter(Boolean);
|
||||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
const last = parts.at(-1) || "";
|
||||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||||
|
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||||
|
const paraId = isShard ? last : null;
|
||||||
|
return { isShard, pageKey, paraId };
|
||||||
}
|
}
|
||||||
|
|
||||||
function assert(cond, msg) {
|
function validateAndNormalizeDoc(doc, relFile, expectedPageKey, expectedParaId) {
|
||||||
if (!cond) throw new Error(msg);
|
assert(isObj(doc), `${relFile}: doc must be an object`);
|
||||||
}
|
assert(doc.schema === 1, `${relFile}: schema must be 1`);
|
||||||
|
assert(isObj(doc.paras), `${relFile}: missing object key "paras"`);
|
||||||
|
|
||||||
function isPlainObject(x) {
|
const gotPage = doc.page != null ? normPath(doc.page) : "";
|
||||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
const expPage = normPath(expectedPageKey);
|
||||||
}
|
|
||||||
|
|
||||||
function normalizePageKey(s) {
|
if (gotPage) {
|
||||||
// pas de / en tête/fin
|
|
||||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function validateAndNormalizeDoc(doc, pageKey, fileRel) {
|
|
||||||
assert(isPlainObject(doc), `${fileRel}: document must be an object`);
|
|
||||||
assert(doc.schema === 1, `${fileRel}: schema must be 1`);
|
|
||||||
if (doc.page != null) {
|
|
||||||
assert(
|
assert(
|
||||||
normalizePageKey(doc.page) === pageKey,
|
gotPage === expPage,
|
||||||
`${fileRel}: page mismatch (page="${doc.page}" vs path="${pageKey}")`
|
`${relFile}: page mismatch (page="${doc.page}" vs path="${expectedPageKey}")`
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
doc.page = expPage;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (expectedParaId) {
|
||||||
|
const keys = Object.keys(doc.paras || {}).map(String);
|
||||||
|
assert(
|
||||||
|
keys.includes(expectedParaId),
|
||||||
|
`${relFile}: shard mismatch: must contain paras["${expectedParaId}"]`
|
||||||
|
);
|
||||||
|
assert(
|
||||||
|
keys.length === 1 && keys[0] === expectedParaId,
|
||||||
|
`${relFile}: shard invariant violated: shard file must contain ONLY paras["${expectedParaId}"] (got: ${keys.join(", ")})`
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
assert(isPlainObject(doc.paras), `${fileRel}: missing object key "paras"`);
|
|
||||||
|
|
||||||
const parasOut = Object.create(null);
|
return doc;
|
||||||
|
|
||||||
for (const [paraId, entry] of Object.entries(doc.paras)) {
|
|
||||||
assert(/^p-\d+-/i.test(paraId), `${fileRel}: invalid para id "${paraId}"`);
|
|
||||||
|
|
||||||
// entry peut être vide, mais doit être un objet si présent
|
|
||||||
assert(entry == null || isPlainObject(entry), `${fileRel}: paras.${paraId} must be an object`);
|
|
||||||
|
|
||||||
const e = entry ? { ...entry } : {};
|
|
||||||
|
|
||||||
// Sanity checks (non destructifs : on n’écrase pas, on vérifie juste les types)
|
|
||||||
if (e.refs != null) assert(Array.isArray(e.refs), `${fileRel}: paras.${paraId}.refs must be an array`);
|
|
||||||
if (e.authors != null) assert(Array.isArray(e.authors), `${fileRel}: paras.${paraId}.authors must be an array`);
|
|
||||||
if (e.quotes != null) assert(Array.isArray(e.quotes), `${fileRel}: paras.${paraId}.quotes must be an array`);
|
|
||||||
if (e.media != null) assert(Array.isArray(e.media), `${fileRel}: paras.${paraId}.media must be an array`);
|
|
||||||
if (e.comments_editorial != null) assert(Array.isArray(e.comments_editorial), `${fileRel}: paras.${paraId}.comments_editorial must be an array`);
|
|
||||||
|
|
||||||
parasOut[paraId] = e;
|
|
||||||
}
|
|
||||||
|
|
||||||
return parasOut;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readDoc(fileAbs) {
|
|
||||||
const raw = await fs.readFile(fileAbs, "utf8");
|
|
||||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
|
||||||
return YAML.parse(raw);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const { inDir, outFile } = parseArgs(process.argv.slice(2));
|
const pages = {};
|
||||||
const CWD = process.cwd();
|
const errors = [];
|
||||||
|
|
||||||
const inDirAbs = path.isAbsolute(inDir) ? inDir : path.join(CWD, inDir);
|
await fs.mkdir(DIST_DIR, { recursive: true });
|
||||||
const outAbs = path.isAbsolute(outFile) ? outFile : path.join(CWD, outFile);
|
|
||||||
|
|
||||||
// antifragile
|
const files = await walk(ANNO_ROOT);
|
||||||
if (!(await exists(inDirAbs))) {
|
|
||||||
console.log(`ℹ️ annotations-index: skip (input missing): ${inDir}`);
|
|
||||||
process.exit(0);
|
|
||||||
}
|
|
||||||
|
|
||||||
const files = (await walk(inDirAbs)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
for (const fp of files) {
|
||||||
if (!files.length) {
|
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||||
console.log(`ℹ️ annotations-index: skip (no .yml/.yaml/.json found in): ${inDir}`);
|
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||||
process.exit(0);
|
const { isShard, pageKey, paraId } = inferExpectedFromRel(relNoExt);
|
||||||
}
|
|
||||||
|
|
||||||
const pages = Object.create(null);
|
|
||||||
let paraCount = 0;
|
|
||||||
|
|
||||||
for (const f of files) {
|
|
||||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
|
||||||
const pageKey = normalizePageKey(inferPageKeyFromFile(inDirAbs, f));
|
|
||||||
assert(pageKey, `${fileRel}: cannot infer page key`);
|
|
||||||
|
|
||||||
let doc;
|
|
||||||
try {
|
try {
|
||||||
doc = await readDoc(f);
|
const raw = await fs.readFile(fp, "utf8");
|
||||||
|
const doc = YAML.parse(raw) || {};
|
||||||
|
|
||||||
|
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||||
|
|
||||||
|
validateAndNormalizeDoc(
|
||||||
|
doc,
|
||||||
|
`src/annotations/${rel}`,
|
||||||
|
pageKey,
|
||||||
|
isShard ? paraId : null
|
||||||
|
);
|
||||||
|
|
||||||
|
const pg = (pages[pageKey] ??= { paras: {} });
|
||||||
|
|
||||||
|
if (isShard) {
|
||||||
|
const entry = doc.paras[paraId];
|
||||||
|
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||||
|
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||||
|
|
||||||
|
stableSortByTs(pg.paras[paraId].media);
|
||||||
|
stableSortByTs(pg.paras[paraId].refs);
|
||||||
|
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||||
|
} else {
|
||||||
|
for (const [pid, entry] of Object.entries(doc.paras || {})) {
|
||||||
|
const p = String(pid);
|
||||||
|
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||||
|
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||||
|
|
||||||
|
stableSortByTs(pg.paras[p].media);
|
||||||
|
stableSortByTs(pg.paras[p].refs);
|
||||||
|
stableSortByTs(pg.paras[p].comments_editorial);
|
||||||
|
}
|
||||||
|
}
|
||||||
} catch (e) {
|
} catch (e) {
|
||||||
throw new Error(`${fileRel}: parse failed: ${String(e?.message ?? e)}`);
|
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
const paras = validateAndNormalizeDoc(doc, pageKey, fileRel);
|
for (const [pageKey, pg] of Object.entries(pages)) {
|
||||||
|
const keys = Object.keys(pg.paras || {});
|
||||||
// 1 fichier = 1 page (canon)
|
keys.sort((a, b) => {
|
||||||
assert(!pages[pageKey], `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
const ia = paraNum(a);
|
||||||
pages[pageKey] = { paras };
|
const ib = paraNum(b);
|
||||||
paraCount += Object.keys(paras).length;
|
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
});
|
||||||
|
const next = {};
|
||||||
|
for (const k of keys) next[k] = pg.paras[k];
|
||||||
|
pg.paras = next;
|
||||||
}
|
}
|
||||||
|
|
||||||
const out = {
|
const out = {
|
||||||
@@ -143,17 +226,21 @@ async function main() {
|
|||||||
pages,
|
pages,
|
||||||
stats: {
|
stats: {
|
||||||
pages: Object.keys(pages).length,
|
pages: Object.keys(pages).length,
|
||||||
paras: paraCount,
|
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||||
|
errors: errors.length,
|
||||||
},
|
},
|
||||||
|
errors,
|
||||||
};
|
};
|
||||||
|
|
||||||
await fs.mkdir(path.dirname(outAbs), { recursive: true });
|
if (errors.length) {
|
||||||
await fs.writeFile(outAbs, JSON.stringify(out), "utf8");
|
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||||
|
}
|
||||||
|
|
||||||
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> ${path.relative(CWD, outAbs)}`);
|
await fs.writeFile(OUT, JSON.stringify(out), "utf8");
|
||||||
|
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> dist/annotations-index.json`);
|
||||||
}
|
}
|
||||||
|
|
||||||
main().catch((e) => {
|
main().catch((e) => {
|
||||||
console.error("FAIL: build-annotations-index crashed:", e);
|
console.error(`FAIL: build-annotations-index crashed: ${e?.stack || e?.message || e}`);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
@@ -48,6 +48,9 @@ async function main() {
|
|||||||
let missing = 0;
|
let missing = 0;
|
||||||
const notes = [];
|
const notes = [];
|
||||||
|
|
||||||
|
// Optim: éviter de vérifier 100 fois le même fichier media
|
||||||
|
const seenMedia = new Set(); // src string
|
||||||
|
|
||||||
for (const f of files) {
|
for (const f of files) {
|
||||||
const rel = path.relative(CWD, f).replace(/\\/g, "/");
|
const rel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||||
const raw = await fs.readFile(f, "utf8");
|
const raw = await fs.readFile(f, "utf8");
|
||||||
@@ -70,6 +73,10 @@ async function main() {
|
|||||||
const src = String(m?.src || "");
|
const src = String(m?.src || "");
|
||||||
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
|
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
|
||||||
|
|
||||||
|
// dédupe
|
||||||
|
if (seenMedia.has(src)) continue;
|
||||||
|
seenMedia.add(src);
|
||||||
|
|
||||||
checked++;
|
checked++;
|
||||||
const p = toPublicPathFromUrl(src);
|
const p = toPublicPathFromUrl(src);
|
||||||
if (!p) continue;
|
if (!p) continue;
|
||||||
@@ -94,4 +101,4 @@ async function main() {
|
|||||||
main().catch((e) => {
|
main().catch((e) => {
|
||||||
console.error("FAIL: check-annotations-media crashed:", e);
|
console.error("FAIL: check-annotations-media crashed:", e);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
@@ -27,11 +27,6 @@ function escRe(s) {
|
|||||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
}
|
}
|
||||||
|
|
||||||
function inferPageKeyFromFile(fileAbs) {
|
|
||||||
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
|
||||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizePageKey(s) {
|
function normalizePageKey(s) {
|
||||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
||||||
}
|
}
|
||||||
@@ -40,6 +35,31 @@ function isPlainObject(x) {
|
|||||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function isParaId(s) {
|
||||||
|
return /^p-\d+-/i.test(String(s || ""));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Supporte:
|
||||||
|
* - monolith: src/annotations/<pageKey>.yml -> pageKey = rel sans ext
|
||||||
|
* - shard : src/annotations/<pageKey>/<paraId>.yml -> pageKey = dirname(rel), paraId = basename
|
||||||
|
*
|
||||||
|
* shard seulement si le fichier est dans un sous-dossier (anti cas pathologique).
|
||||||
|
*/
|
||||||
|
function inferFromFile(fileAbs) {
|
||||||
|
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
||||||
|
const relNoExt = rel.replace(/\.(ya?ml|json)$/i, "");
|
||||||
|
const parts = relNoExt.split("/").filter(Boolean);
|
||||||
|
const base = parts[parts.length - 1] || "";
|
||||||
|
const dirParts = parts.slice(0, -1);
|
||||||
|
|
||||||
|
const isShard = dirParts.length > 0 && isParaId(base);
|
||||||
|
const pageKey = isShard ? dirParts.join("/") : relNoExt;
|
||||||
|
const paraId = isShard ? base : "";
|
||||||
|
|
||||||
|
return { pageKey: normalizePageKey(pageKey), paraId };
|
||||||
|
}
|
||||||
|
|
||||||
async function loadAliases() {
|
async function loadAliases() {
|
||||||
if (!(await exists(ALIASES_PATH))) return {};
|
if (!(await exists(ALIASES_PATH))) return {};
|
||||||
try {
|
try {
|
||||||
@@ -83,7 +103,11 @@ async function main() {
|
|||||||
const aliases = await loadAliases();
|
const aliases = await loadAliases();
|
||||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||||
|
|
||||||
let pages = 0;
|
// perf: cache HTML par page (shards = beaucoup de fichiers pour 1 page)
|
||||||
|
const htmlCache = new Map(); // pageKey -> html
|
||||||
|
const missingDistPage = new Set(); // pageKey
|
||||||
|
|
||||||
|
let pagesSeen = new Set();
|
||||||
let checked = 0;
|
let checked = 0;
|
||||||
let failures = 0;
|
let failures = 0;
|
||||||
const notes = [];
|
const notes = [];
|
||||||
@@ -107,7 +131,7 @@ async function main() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
const pageKey = normalizePageKey(inferPageKeyFromFile(f));
|
const { pageKey, paraId: shardParaId } = inferFromFile(f);
|
||||||
|
|
||||||
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
|
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
|
||||||
failures++;
|
failures++;
|
||||||
@@ -121,20 +145,44 @@ async function main() {
|
|||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// shard invariant (fort) : doit contenir paras[paraId]
|
||||||
|
if (shardParaId) {
|
||||||
|
if (!Object.prototype.hasOwnProperty.call(doc.paras, shardParaId)) {
|
||||||
|
failures++;
|
||||||
|
notes.push(`- SHARD MISMATCH: ${rel} (expected paras["${shardParaId}"] present)`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
// si extras -> warning (non destructif)
|
||||||
|
const keys = Object.keys(doc.paras);
|
||||||
|
if (!(keys.length === 1 && keys[0] === shardParaId)) {
|
||||||
|
notes.push(`- WARN shard has extra paras: ${rel} (expected only "${shardParaId}", got ${keys.join(", ")})`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pagesSeen.add(pageKey);
|
||||||
|
|
||||||
const distFile = path.join(DIST_DIR, pageKey, "index.html");
|
const distFile = path.join(DIST_DIR, pageKey, "index.html");
|
||||||
if (!(await exists(distFile))) {
|
if (!(await exists(distFile))) {
|
||||||
failures++;
|
if (!missingDistPage.has(pageKey)) {
|
||||||
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
missingDistPage.add(pageKey);
|
||||||
|
failures++;
|
||||||
|
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
||||||
|
} else {
|
||||||
|
notes.push(`- WARN missing page already reported: dist/${pageKey}/index.html (from ${rel})`);
|
||||||
|
}
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
|
|
||||||
pages++;
|
let html = htmlCache.get(pageKey);
|
||||||
const html = await fs.readFile(distFile, "utf8");
|
if (!html) {
|
||||||
|
html = await fs.readFile(distFile, "utf8");
|
||||||
|
htmlCache.set(pageKey, html);
|
||||||
|
}
|
||||||
|
|
||||||
for (const paraId of Object.keys(doc.paras)) {
|
for (const paraId of Object.keys(doc.paras)) {
|
||||||
checked++;
|
checked++;
|
||||||
|
|
||||||
if (!/^p-\d+-/i.test(paraId)) {
|
if (!isParaId(paraId)) {
|
||||||
failures++;
|
failures++;
|
||||||
notes.push(`- INVALID ID: ${rel} (${paraId})`);
|
notes.push(`- INVALID ID: ${rel} (${paraId})`);
|
||||||
continue;
|
continue;
|
||||||
@@ -158,6 +206,7 @@ async function main() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const warns = notes.filter((x) => x.startsWith("- WARN"));
|
const warns = notes.filter((x) => x.startsWith("- WARN"));
|
||||||
|
const pages = pagesSeen.size;
|
||||||
|
|
||||||
if (failures > 0) {
|
if (failures > 0) {
|
||||||
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
|
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
|
||||||
@@ -172,4 +221,4 @@ async function main() {
|
|||||||
main().catch((e) => {
|
main().catch((e) => {
|
||||||
console.error("FAIL: annotations check crashed:", e);
|
console.error("FAIL: annotations check crashed:", e);
|
||||||
process.exit(1);
|
process.exit(1);
|
||||||
});
|
});
|
||||||
10
src/annotations/archicrat-ia/chapitre-1/p-0-8d27a7f5.yml
Normal file
10
src/annotations/archicrat-ia/chapitre-1/p-0-8d27a7f5.yml
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-1
|
||||||
|
paras:
|
||||||
|
p-0-8d27a7f5:
|
||||||
|
refs:
|
||||||
|
- url: https://auth.archicratie.trans-hands.synology.me/authenticated
|
||||||
|
label: Lien web
|
||||||
|
kind: (livre / article / vidéo / site / autre) Site
|
||||||
|
ts: 2026-02-27T12:34:31.704Z
|
||||||
|
fromIssue: 142
|
||||||
9
src/annotations/archicrat-ia/chapitre-1/p-1-8a6c18bf.yml
Normal file
9
src/annotations/archicrat-ia/chapitre-1/p-1-8a6c18bf.yml
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-1
|
||||||
|
paras:
|
||||||
|
p-1-8a6c18bf:
|
||||||
|
comments_editorial:
|
||||||
|
- text: Yeaha
|
||||||
|
status: new
|
||||||
|
ts: 2026-02-27T12:40:39.462Z
|
||||||
|
fromIssue: 143
|
||||||
12
src/annotations/archicrat-ia/chapitre-3/p-0-ace27175.yml
Normal file
12
src/annotations/archicrat-ia/chapitre-3/p-0-ace27175.yml
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-3
|
||||||
|
paras:
|
||||||
|
p-0-ace27175:
|
||||||
|
media:
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-3/p-0-ace27175/Capture_d_e_cran_2025-05-05_a_19.20.40.png
|
||||||
|
caption: "[Media] p-0-ace27175 — Chapitre 3 — Philosophies du pouvoir et
|
||||||
|
archicration"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-27T12:43:14.259Z
|
||||||
|
fromIssue: 144
|
||||||
19
src/annotations/archicrat-ia/chapitre-4/p-11-67c14c09.yml
Normal file
19
src/annotations/archicrat-ia/chapitre-4/p-11-67c14c09.yml
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-4
|
||||||
|
paras:
|
||||||
|
p-11-67c14c09:
|
||||||
|
media:
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2026-02-16_a_13.07.35.png
|
||||||
|
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||||
|
révolutions industrielles"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-26T13:17:41.286Z
|
||||||
|
fromIssue: 129
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2025-05-05_a_19.20.40.png
|
||||||
|
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||||
|
révolutions industrielles"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-27T09:17:04.386Z
|
||||||
|
fromIssue: 127
|
||||||
@@ -1,23 +1,80 @@
|
|||||||
|
// src/pages/annotations-index.json.ts
|
||||||
import type { APIRoute } from "astro";
|
import type { APIRoute } from "astro";
|
||||||
import * as fs from "node:fs/promises";
|
import fs from "node:fs/promises";
|
||||||
import * as path from "node:path";
|
import path from "node:path";
|
||||||
import { parse as parseYAML } from "yaml";
|
import YAML from "yaml";
|
||||||
|
|
||||||
const CWD = process.cwd();
|
const CWD = process.cwd();
|
||||||
const ANNO_DIR = path.join(CWD, "src", "annotations");
|
const ANNO_ROOT = path.join(CWD, "src", "annotations");
|
||||||
|
|
||||||
// Strict en CI (ou override explicite)
|
const isObj = (x: any) => !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
const STRICT =
|
const isArr = (x: any) => Array.isArray(x);
|
||||||
process.env.ANNOTATIONS_STRICT === "1" ||
|
|
||||||
process.env.CI === "1" ||
|
|
||||||
process.env.CI === "true";
|
|
||||||
|
|
||||||
async function exists(p: string): Promise<boolean> {
|
function normPath(s: string) {
|
||||||
try {
|
return String(s || "").replace(/\\/g, "/").replace(/^\/+|\/+$/g, "");
|
||||||
await fs.access(p);
|
}
|
||||||
return true;
|
function paraNum(pid: string) {
|
||||||
} catch {
|
const m = String(pid).match(/^p-(\d+)-/i);
|
||||||
return false;
|
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||||
|
}
|
||||||
|
function toIso(v: any) {
|
||||||
|
if (v instanceof Date) return v.toISOString();
|
||||||
|
return typeof v === "string" ? v : "";
|
||||||
|
}
|
||||||
|
function stableSortByTs(arr: any[]) {
|
||||||
|
if (!Array.isArray(arr)) return;
|
||||||
|
arr.sort((a, b) => {
|
||||||
|
const ta = Date.parse(toIso(a?.ts)) || 0;
|
||||||
|
const tb = Date.parse(toIso(b?.ts)) || 0;
|
||||||
|
if (ta !== tb) return ta - tb;
|
||||||
|
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function keyMedia(x: any) { return String(x?.src || ""); }
|
||||||
|
function keyRef(x: any) {
|
||||||
|
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||||
|
}
|
||||||
|
function keyComment(x: any) { return String(x?.text || "").trim(); }
|
||||||
|
|
||||||
|
function uniqUnion(dst: any[], src: any[], keyFn: (x:any)=>string) {
|
||||||
|
const out = isArr(dst) ? [...dst] : [];
|
||||||
|
const seen = new Set(out.map((x) => keyFn(x)));
|
||||||
|
for (const it of (isArr(src) ? src : [])) {
|
||||||
|
const k = keyFn(it);
|
||||||
|
if (!k) continue;
|
||||||
|
if (!seen.has(k)) { seen.add(k); out.push(it); }
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
function deepMergeEntry(dst: any, src: any) {
|
||||||
|
if (!isObj(dst) || !isObj(src)) return;
|
||||||
|
|
||||||
|
for (const [k, v] of Object.entries(src)) {
|
||||||
|
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||||
|
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||||
|
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||||
|
|
||||||
|
if (isObj(v)) {
|
||||||
|
if (!isObj((dst as any)[k])) (dst as any)[k] = {};
|
||||||
|
deepMergeEntry((dst as any)[k], v);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (isArr(v)) {
|
||||||
|
const cur = isArr((dst as any)[k]) ? (dst as any)[k] : [];
|
||||||
|
const seen = new Set(cur.map((x:any) => JSON.stringify(x)));
|
||||||
|
const out = [...cur];
|
||||||
|
for (const it of v) {
|
||||||
|
const s = JSON.stringify(it);
|
||||||
|
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||||
|
}
|
||||||
|
(dst as any)[k] = out;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!(k in (dst as any)) || (dst as any)[k] == null || (dst as any)[k] === "") (dst as any)[k] = v;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -26,154 +83,98 @@ async function walk(dir: string): Promise<string[]> {
|
|||||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||||
for (const e of ents) {
|
for (const e of ents) {
|
||||||
const p = path.join(dir, e.name);
|
const p = path.join(dir, e.name);
|
||||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
if (e.isDirectory()) out.push(...await walk(p));
|
||||||
else out.push(p);
|
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||||
}
|
}
|
||||||
return out;
|
return out;
|
||||||
}
|
}
|
||||||
|
|
||||||
function isPlainObject(x: unknown): x is Record<string, unknown> {
|
function inferExpected(relNoExt: string) {
|
||||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
const parts = relNoExt.split("/").filter(Boolean);
|
||||||
}
|
const last = parts.at(-1) || "";
|
||||||
|
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||||
function normalizePageKey(s: unknown): string {
|
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||||
return String(s ?? "")
|
const paraId = isShard ? last : null;
|
||||||
.replace(/^\/+/, "")
|
return { isShard, pageKey, paraId };
|
||||||
.replace(/\/+$/, "")
|
|
||||||
.trim();
|
|
||||||
}
|
|
||||||
|
|
||||||
function inferPageKeyFromFile(inDirAbs: string, fileAbs: string): string {
|
|
||||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
|
||||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseDoc(raw: string, fileAbs: string): unknown {
|
|
||||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
|
||||||
return parseYAML(raw);
|
|
||||||
}
|
|
||||||
|
|
||||||
function hardFailOrCollect(errors: string[], msg: string): void {
|
|
||||||
if (STRICT) throw new Error(msg);
|
|
||||||
errors.push(msg);
|
|
||||||
}
|
|
||||||
|
|
||||||
function sanitizeEntry(
|
|
||||||
fileRel: string,
|
|
||||||
paraId: string,
|
|
||||||
entry: unknown,
|
|
||||||
errors: string[]
|
|
||||||
): Record<string, unknown> {
|
|
||||||
if (entry == null) return {};
|
|
||||||
|
|
||||||
if (!isPlainObject(entry)) {
|
|
||||||
hardFailOrCollect(errors, `${fileRel}: paras.${paraId} must be an object`);
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
const e: Record<string, unknown> = { ...entry };
|
|
||||||
|
|
||||||
const arrayFields = [
|
|
||||||
"refs",
|
|
||||||
"authors",
|
|
||||||
"quotes",
|
|
||||||
"media",
|
|
||||||
"comments_editorial",
|
|
||||||
] as const;
|
|
||||||
|
|
||||||
for (const k of arrayFields) {
|
|
||||||
if (e[k] == null) continue;
|
|
||||||
if (!Array.isArray(e[k])) {
|
|
||||||
errors.push(`${fileRel}: paras.${paraId}.${k} must be an array (coerced to [])`);
|
|
||||||
e[k] = [];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return e;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export const GET: APIRoute = async () => {
|
export const GET: APIRoute = async () => {
|
||||||
if (!(await exists(ANNO_DIR))) {
|
const pages: Record<string, { paras: Record<string, any> }> = {};
|
||||||
const out = {
|
const errors: Array<{ file: string; error: string }> = [];
|
||||||
schema: 1,
|
|
||||||
generatedAt: new Date().toISOString(),
|
|
||||||
pages: {},
|
|
||||||
stats: { pages: 0, paras: 0, errors: 0 },
|
|
||||||
errors: [] as string[],
|
|
||||||
};
|
|
||||||
|
|
||||||
return new Response(JSON.stringify(out), {
|
let files: string[] = [];
|
||||||
headers: {
|
try {
|
||||||
"Content-Type": "application/json; charset=utf-8",
|
files = await walk(ANNO_ROOT);
|
||||||
"Cache-Control": "no-store",
|
} catch (e: any) {
|
||||||
},
|
throw new Error(`Missing annotations root: ${ANNO_ROOT} (${e?.message || e})`);
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
for (const fp of files) {
|
||||||
|
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||||
|
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||||
|
const { isShard, pageKey, paraId } = inferExpected(relNoExt);
|
||||||
|
|
||||||
const pages: Record<string, { paras: Record<string, Record<string, unknown>> }> =
|
|
||||||
Object.create(null);
|
|
||||||
|
|
||||||
const errors: string[] = [];
|
|
||||||
let paraCount = 0;
|
|
||||||
|
|
||||||
for (const f of files) {
|
|
||||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
|
||||||
const pageKey = normalizePageKey(inferPageKeyFromFile(ANNO_DIR, f));
|
|
||||||
|
|
||||||
if (!pageKey) {
|
|
||||||
hardFailOrCollect(errors, `${fileRel}: cannot infer page key`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let doc: unknown;
|
|
||||||
try {
|
try {
|
||||||
const raw = await fs.readFile(f, "utf8");
|
const raw = await fs.readFile(fp, "utf8");
|
||||||
doc = parseDoc(raw, f);
|
const doc = YAML.parse(raw) || {};
|
||||||
} catch (e) {
|
|
||||||
hardFailOrCollect(errors, `${fileRel}: parse failed: ${String((e as any)?.message ?? e)}`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!isPlainObject(doc) || (doc as any).schema !== 1) {
|
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||||
hardFailOrCollect(errors, `${fileRel}: schema must be 1`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if ((doc as any).page != null) {
|
const docPage = normPath(doc.page || "");
|
||||||
const declared = normalizePageKey((doc as any).page);
|
if (docPage && docPage !== pageKey) {
|
||||||
if (declared !== pageKey) {
|
throw new Error(`page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
||||||
hardFailOrCollect(
|
|
||||||
errors,
|
|
||||||
`${fileRel}: page mismatch (page="${declared}" vs path="${pageKey}")`
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
}
|
if (!doc.page) doc.page = pageKey;
|
||||||
|
|
||||||
const parasAny = (doc as any).paras;
|
if (!isObj(doc.paras)) throw new Error(`missing object key "paras"`);
|
||||||
if (!isPlainObject(parasAny)) {
|
|
||||||
hardFailOrCollect(errors, `${fileRel}: missing object key "paras"`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pages[pageKey]) {
|
const pg = pages[pageKey] ??= { paras: {} };
|
||||||
hardFailOrCollect(errors, `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
const parasOut: Record<string, Record<string, unknown>> = Object.create(null);
|
if (isShard) {
|
||||||
|
if (!paraId) throw new Error("internal: missing paraId");
|
||||||
|
if (!(paraId in doc.paras)) {
|
||||||
|
throw new Error(`shard mismatch: file must contain paras["${paraId}"]`);
|
||||||
|
}
|
||||||
|
// ✅ invariant aligné avec build-annotations-index
|
||||||
|
const keys = Object.keys(doc.paras).map(String);
|
||||||
|
if (!(keys.length === 1 && keys[0] === paraId)) {
|
||||||
|
throw new Error(`shard invariant violated: shard must contain ONLY paras["${paraId}"] (got: ${keys.join(", ")})`);
|
||||||
|
}
|
||||||
|
|
||||||
for (const [paraId, entry] of Object.entries(parasAny)) {
|
const entry = doc.paras[paraId];
|
||||||
if (!/^p-\d+-/i.test(paraId)) {
|
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||||
hardFailOrCollect(errors, `${fileRel}: invalid para id "${paraId}"`);
|
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||||
continue;
|
|
||||||
|
stableSortByTs(pg.paras[paraId].media);
|
||||||
|
stableSortByTs(pg.paras[paraId].refs);
|
||||||
|
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||||
|
} else {
|
||||||
|
for (const [pid, entry] of Object.entries(doc.paras)) {
|
||||||
|
const p = String(pid);
|
||||||
|
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||||
|
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||||
|
|
||||||
|
stableSortByTs(pg.paras[p].media);
|
||||||
|
stableSortByTs(pg.paras[p].refs);
|
||||||
|
stableSortByTs(pg.paras[p].comments_editorial);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
parasOut[paraId] = sanitizeEntry(fileRel, paraId, entry, errors);
|
} catch (e: any) {
|
||||||
|
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pages[pageKey] = { paras: parasOut };
|
for (const [pk, pg] of Object.entries(pages)) {
|
||||||
paraCount += Object.keys(parasOut).length;
|
const keys = Object.keys(pg.paras || {});
|
||||||
|
keys.sort((a, b) => {
|
||||||
|
const ia = paraNum(a);
|
||||||
|
const ib = paraNum(b);
|
||||||
|
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
});
|
||||||
|
const next: Record<string, any> = {};
|
||||||
|
for (const k of keys) next[k] = pg.paras[k];
|
||||||
|
pg.paras = next;
|
||||||
}
|
}
|
||||||
|
|
||||||
const out = {
|
const out = {
|
||||||
@@ -182,16 +183,17 @@ export const GET: APIRoute = async () => {
|
|||||||
pages,
|
pages,
|
||||||
stats: {
|
stats: {
|
||||||
pages: Object.keys(pages).length,
|
pages: Object.keys(pages).length,
|
||||||
paras: paraCount,
|
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||||
errors: errors.length,
|
errors: errors.length,
|
||||||
},
|
},
|
||||||
errors,
|
errors,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
if (errors.length) {
|
||||||
|
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||||
|
}
|
||||||
|
|
||||||
return new Response(JSON.stringify(out), {
|
return new Response(JSON.stringify(out), {
|
||||||
headers: {
|
headers: { "Content-Type": "application/json; charset=utf-8" },
|
||||||
"Content-Type": "application/json; charset=utf-8",
|
|
||||||
"Cache-Control": "no-store",
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
};
|
};
|
||||||
Reference in New Issue
Block a user