Compare commits
43 Commits
chore/fix-
...
chore/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
| 225368a952 | |||
| 3574695041 | |||
| ea68025a1d | |||
| 3a08698003 | |||
| 3d583608c2 | |||
|
|
01ae95ab43 | ||
|
|
0d5821c640 | ||
|
|
2bcea39558 | ||
| af85970d4a | |||
| 210f621487 | |||
| 8ad960dc69 | |||
| d45a8b285f | |||
| b6e04a9138 | |||
| dcf1fc2d0b | |||
| 41b0517c6c | |||
| 6b43eb199d | |||
| d40f24e92d | |||
| 480a61b071 | |||
| a5d68d6a7e | |||
| 390f2c33e5 | |||
| 16485dc4a9 | |||
| a43ce5f188 | |||
| 0519ae2dd0 | |||
| 0d5b790e52 | |||
| 342e21b9ea | |||
| 4dec9e182b | |||
| c7ae883c6a | |||
| 9b4584f70a | |||
| 7b64fb7401 | |||
|
|
57cb23ce8b | ||
| 708b87ff35 | |||
| 577cfd08e8 | |||
| de9edbe532 | |||
| 5e95dc9898 | |||
| 006fec7efd | |||
| 2b612214bb | |||
| 29a6c349aa | |||
|
|
33a227c401 | ||
| 396ad4df7c | |||
|
|
0b39427090 | ||
| 8fcb18cb46 | |||
| d03fc519de | |||
| 97dd3797d6 |
@@ -16,6 +16,10 @@ defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: anno-apply-${{ github.event.issue.number || inputs.issue || 'manual' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
apply-approved:
|
||||
runs-on: ubuntu-latest
|
||||
@@ -29,7 +33,6 @@ jobs:
|
||||
git --version
|
||||
node --version
|
||||
npm --version
|
||||
npm ping --registry=https://registry.npmjs.org
|
||||
|
||||
- name: Derive context (event.json / workflow_dispatch)
|
||||
env:
|
||||
@@ -44,8 +47,8 @@ jobs:
|
||||
import fs from "node:fs";
|
||||
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
|
||||
const repoObj = ev?.repository || {};
|
||||
|
||||
const cloneUrl =
|
||||
repoObj?.clone_url ||
|
||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||
@@ -110,13 +113,101 @@ jobs:
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
|
||||
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
||||
echo "ℹ️ label=$LABEL_NAME => skip"
|
||||
echo 'SKIP=1' >> /tmp/anno.env
|
||||
echo "SKIP=1" >> /tmp/anno.env
|
||||
exit 0
|
||||
fi
|
||||
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
||||
|
||||
- name: Fetch issue + gate on Type (skip Proposer)
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||
|
||||
ISSUE_JSON="$(curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER")"
|
||||
|
||||
node --input-type=module - <<'NODE' "$ISSUE_JSON" >> /tmp/anno.env
|
||||
const issue = JSON.parse(process.argv[1] || "{}");
|
||||
const title = String(issue.title || "");
|
||||
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||
|
||||
function pickLine(key) {
|
||||
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||
const m = body.match(re);
|
||||
return m ? m[1].trim() : "";
|
||||
}
|
||||
|
||||
const typeRaw = pickLine("Type");
|
||||
const type = String(typeRaw || "").trim().toLowerCase();
|
||||
|
||||
const allowed = new Set(["type/media","type/reference","type/comment"]);
|
||||
const proposer = new Set(["type/correction","type/fact-check"]);
|
||||
|
||||
const out = [];
|
||||
out.push(`ISSUE_TITLE=${JSON.stringify(title)}`);
|
||||
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
|
||||
|
||||
if (!type) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
|
||||
} else if (allowed.has(type)) {
|
||||
// proceed
|
||||
} else if (proposer.has(type)) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("proposer_type:"+type)}`);
|
||||
} else {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("unsupported_type:"+type)}`);
|
||||
}
|
||||
|
||||
process.stdout.write(out.join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
echo "✅ issue type gating:"
|
||||
grep -E '^(ISSUE_TYPE|SKIP|SKIP_REASON)=' /tmp/anno.env || true
|
||||
|
||||
- name: Comment issue if skipped (Proposer / unsupported / missing Type)
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
|
||||
[[ "${SKIP:-0}" == "1" ]] || exit 0
|
||||
[[ "$LABEL_NAME" == "state/approved" || "$LABEL_NAME" == "workflow_dispatch" ]] || exit 0
|
||||
|
||||
# message différent si Proposer
|
||||
REASON="${SKIP_REASON:-}"
|
||||
TYPE="${ISSUE_TYPE:-}"
|
||||
TITLE="${ISSUE_TITLE:-}"
|
||||
|
||||
if [[ "$REASON" == proposer_type:* ]]; then
|
||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} détecté comme **Proposer** (${TYPE}).\n\n- Ce type est **traité manuellement par les editors** (correction/fact-check + cat/*).\n- Le bot n'applique **jamais** Proposer et n'ajoute **jamais** state/approved automatiquement.\n\n✅ Action : traitement éditorial manuel."
|
||||
elif [[ "$REASON" == unsupported_type:* ]]; then
|
||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} ignoré : Type non supporté par le bot (${TYPE}).\n\nTypes supportés : type/media, type/reference, type/comment.\n✅ Action : traitement manuel si nécessaire."
|
||||
else
|
||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} ignoré : champ 'Type:' manquant ou illisible.\n\n✅ Action : corriger le ticket (ajouter 'Type: type/media|type/reference|type/comment') ou traiter manuellement."
|
||||
fi
|
||||
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
|
||||
- name: Checkout default branch
|
||||
run: |
|
||||
set -euo pipefail
|
||||
@@ -128,30 +219,43 @@ jobs:
|
||||
git remote add origin "$CLONE_URL"
|
||||
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||
git log -1 --oneline
|
||||
|
||||
- name: Install deps
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
npm ci
|
||||
|
||||
test -f scripts/apply-annotation-ticket.mjs || { echo "❌ missing scripts/apply-annotation-ticket.mjs on main"; exit 1; }
|
||||
npm ci --no-audit --no-fund
|
||||
|
||||
- name: Check apply script exists
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
test -f scripts/apply-annotation-ticket.mjs || {
|
||||
echo "❌ missing scripts/apply-annotation-ticket.mjs on $DEFAULT_BRANCH"
|
||||
ls -la scripts | sed -n '1,200p' || true
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Build dist (needed for --verify)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
# génère dist + para-index.json (via postbuild)
|
||||
npm run build:clean
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
npm run build
|
||||
|
||||
test -f dist/para-index.json || {
|
||||
echo "❌ missing dist/para-index.json after build"
|
||||
ls -la dist | sed -n '1,160p'
|
||||
ls -la dist | sed -n '1,200p' || true
|
||||
exit 1
|
||||
}
|
||||
echo "✅ dist/para-index.json present"
|
||||
|
||||
|
||||
- name: Apply ticket on bot branch (strict+verify, commit)
|
||||
continue-on-error: true
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
|
||||
@@ -160,6 +264,7 @@ jobs:
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||
|
||||
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
|
||||
@@ -181,12 +286,16 @@ jobs:
|
||||
RC=$?
|
||||
set -e
|
||||
|
||||
tail -n 160 "$LOG" || true
|
||||
echo "APPLY_RC=$RC" >> /tmp/anno.env
|
||||
|
||||
echo "== apply log (tail) =="
|
||||
tail -n 180 "$LOG" || true
|
||||
|
||||
END_SHA="$(git rev-parse HEAD)"
|
||||
|
||||
if [[ "$RC" -ne 0 ]]; then
|
||||
echo "APPLY_RC=$RC" >> /tmp/anno.env
|
||||
exit "$RC"
|
||||
echo "NOOP=0" >> /tmp/anno.env
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$START_SHA" == "$END_SHA" ]]; then
|
||||
@@ -196,13 +305,46 @@ jobs:
|
||||
echo "END_SHA=$END_SHA" >> /tmp/anno.env
|
||||
fi
|
||||
|
||||
- name: Comment issue if no-op (already applied)
|
||||
- name: Comment issue on failure (strict/verify/etc)
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
RC="${APPLY_RC:-0}"
|
||||
if [[ "$RC" == "0" ]]; then
|
||||
echo "ℹ️ no failure detected"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ -f /tmp/apply.log ]]; then
|
||||
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
||||
else
|
||||
BODY="(no apply log found)"
|
||||
fi
|
||||
|
||||
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
|
||||
- name: Comment issue if no-op (already applied)
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||
[[ "${NOOP:-0}" == "1" ]] || exit 0
|
||||
|
||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} : rien à appliquer (déjà présent / dédupliqué)."
|
||||
@@ -215,12 +357,15 @@ jobs:
|
||||
--data-binary "$PAYLOAD"
|
||||
|
||||
- name: Push bot branch
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
||||
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip push"; exit 0; }
|
||||
|
||||
AUTH_URL="$(node --input-type=module -e '
|
||||
@@ -235,12 +380,15 @@ jobs:
|
||||
git push -u origin "$BRANCH"
|
||||
|
||||
- name: Create PR + comment issue
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip PR"; exit 0; }
|
||||
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip PR"; exit 0; }
|
||||
|
||||
PR_TITLE="anno: apply ticket #${ISSUE_NUMBER}"
|
||||
@@ -271,4 +419,21 @@ jobs:
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$C_PAYLOAD"
|
||||
--data-binary "$C_PAYLOAD"
|
||||
|
||||
echo "✅ PR: $PR_URL"
|
||||
|
||||
- name: Finalize (fail job if apply failed)
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
RC="${APPLY_RC:-0}"
|
||||
if [[ "$RC" != "0" ]]; then
|
||||
echo "❌ apply failed (rc=$RC)"
|
||||
exit "$RC"
|
||||
fi
|
||||
echo "✅ apply ok"
|
||||
503
.gitea/workflows/deploy-staging-live.yml
Normal file
503
.gitea/workflows/deploy-staging-live.yml
Normal file
@@ -0,0 +1,503 @@
|
||||
name: Deploy staging+live (annotations)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force:
|
||||
description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)"
|
||||
required: false
|
||||
default: "0"
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
DOCKER_API_VERSION: "1.43"
|
||||
COMPOSE_VERSION: "2.29.7"
|
||||
ASTRO_TELEMETRY_DISABLED: "1"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: deploy-staging-live-main
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Tools sanity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git --version
|
||||
node --version
|
||||
npm --version
|
||||
|
||||
- name: Checkout (push or workflow_dispatch, no external actions)
|
||||
env:
|
||||
EVENT_JSON: /var/run/act/workflow/event.json
|
||||
run: |
|
||||
set -euo pipefail
|
||||
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||
|
||||
node --input-type=module <<'NODE'
|
||||
import fs from "node:fs";
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
const repoObj = ev?.repository || {};
|
||||
const cloneUrl =
|
||||
repoObj?.clone_url ||
|
||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||
|
||||
const defaultBranch = repoObj?.default_branch || "main";
|
||||
const sha =
|
||||
(process.env.GITHUB_SHA && String(process.env.GITHUB_SHA).trim()) ||
|
||||
ev?.after ||
|
||||
ev?.sha ||
|
||||
ev?.head_commit?.id ||
|
||||
ev?.pull_request?.head?.sha ||
|
||||
"";
|
||||
|
||||
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
|
||||
fs.writeFileSync("/tmp/deploy.env", [
|
||||
`REPO_URL=${shq(cloneUrl)}`,
|
||||
`DEFAULT_BRANCH=${shq(defaultBranch)}`,
|
||||
`SHA=${shq(sha)}`
|
||||
].join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
source /tmp/deploy.env
|
||||
echo "Repo URL: $REPO_URL"
|
||||
echo "Default branch: $DEFAULT_BRANCH"
|
||||
echo "SHA: ${SHA:-<empty>}"
|
||||
|
||||
rm -rf .git
|
||||
git init -q
|
||||
git remote add origin "$REPO_URL"
|
||||
|
||||
if [[ -n "${SHA:-}" ]]; then
|
||||
git fetch --depth 1 origin "$SHA"
|
||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||
else
|
||||
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
||||
git -c advice.detachedHead=false checkout -q "origin/$DEFAULT_BRANCH"
|
||||
SHA="$(git rev-parse HEAD)"
|
||||
echo "SHA='$SHA'" >> /tmp/deploy.env
|
||||
echo "Resolved SHA: $SHA"
|
||||
fi
|
||||
|
||||
git log -1 --oneline
|
||||
|
||||
- name: Gate — decide HOTPATCH vs FULL rebuild
|
||||
env:
|
||||
INPUT_FORCE: ${{ inputs.force }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
|
||||
FORCE="${INPUT_FORCE:-0}"
|
||||
|
||||
# liste fichiers touchés (utile pour copier les médias)
|
||||
CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)"
|
||||
printf "%s\n" "$CHANGED" > /tmp/changed.txt
|
||||
|
||||
echo "== changed files =="
|
||||
echo "$CHANGED" | sed -n '1,260p'
|
||||
|
||||
if [[ "$FORCE" == "1" ]]; then
|
||||
echo "GO=1" >> /tmp/deploy.env
|
||||
echo "MODE='full'" >> /tmp/deploy.env
|
||||
echo "✅ force=1 -> MODE=full (rebuild+restart)"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Auto mode: uniquement annotations/media => hotpatch only
|
||||
if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then
|
||||
echo "GO=1" >> /tmp/deploy.env
|
||||
echo "MODE='hotpatch'" >> /tmp/deploy.env
|
||||
echo "✅ annotations/media change -> MODE=hotpatch"
|
||||
else
|
||||
echo "GO=0" >> /tmp/deploy.env
|
||||
echo "MODE='skip'" >> /tmp/deploy.env
|
||||
echo "ℹ️ no annotations/media change -> skip deploy"
|
||||
fi
|
||||
|
||||
- name: Install docker client + docker compose plugin (v2) + python yaml
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
apt-get -o Acquire::Retries=5 -o Acquire::ForceIPv4=true update
|
||||
apt-get install -y --no-install-recommends ca-certificates curl docker.io python3 python3-yaml
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
mkdir -p /usr/local/lib/docker/cli-plugins
|
||||
curl -fsSL \
|
||||
"https://github.com/docker/compose/releases/download/v${COMPOSE_VERSION}/docker-compose-linux-x86_64" \
|
||||
-o /usr/local/lib/docker/cli-plugins/docker-compose
|
||||
chmod +x /usr/local/lib/docker/cli-plugins/docker-compose
|
||||
|
||||
docker version
|
||||
docker compose version
|
||||
python3 --version
|
||||
|
||||
# Reuse existing compose project name if containers already exist
|
||||
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||
if [[ -z "${PROJ:-}" ]]; then
|
||||
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||
fi
|
||||
if [[ -z "${PROJ:-}" ]]; then PROJ="archicratie-web"; fi
|
||||
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
|
||||
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
|
||||
|
||||
# Assert target containers exist (hotpatch needs them)
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; }
|
||||
done
|
||||
|
||||
- name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> vars not required"; exit 0; }
|
||||
|
||||
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
||||
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
||||
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
||||
echo "✅ vars OK"
|
||||
|
||||
- name: Assert deploy files exist — only needed for MODE=full
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> files not required"; exit 0; }
|
||||
|
||||
test -f docker-compose.yml
|
||||
test -f Dockerfile
|
||||
test -f nginx.conf
|
||||
echo "✅ deploy files OK"
|
||||
|
||||
- name: FULL — Build + deploy staging (blue) then warmup+smoke
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||
|
||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||
|
||||
wait_url() {
|
||||
local url="$1"
|
||||
local label="$2"
|
||||
local tries="${3:-60}"
|
||||
for i in $(seq 1 "$tries"); do
|
||||
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||
echo "✅ $label OK ($url)"
|
||||
return 0
|
||||
fi
|
||||
echo "… warmup $label ($i/$tries)"
|
||||
sleep 1
|
||||
done
|
||||
echo "❌ timeout $label ($url)"
|
||||
return 1
|
||||
}
|
||||
|
||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||
echo "TS='$TS'" >> /tmp/deploy.env
|
||||
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
||||
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
||||
|
||||
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
||||
docker rm -f archicratie-web-blue || true
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
||||
|
||||
# warmup endpoints
|
||||
wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index"
|
||||
wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index"
|
||||
wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js"
|
||||
|
||||
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||
echo "canonical(blue)=$CANON"
|
||||
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
||||
echo "❌ staging canonical mismatch"
|
||||
docker logs --tail 120 archicratie-web-blue || true
|
||||
exit 3
|
||||
}
|
||||
|
||||
echo "✅ staging OK"
|
||||
|
||||
- name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||
|
||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
||||
|
||||
wait_url() {
|
||||
local url="$1"
|
||||
local label="$2"
|
||||
local tries="${3:-60}"
|
||||
for i in $(seq 1 "$tries"); do
|
||||
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||
echo "✅ $label OK ($url)"
|
||||
return 0
|
||||
fi
|
||||
echo "… warmup $label ($i/$tries)"
|
||||
sleep 1
|
||||
done
|
||||
echo "❌ timeout $label ($url)"
|
||||
return 1
|
||||
}
|
||||
|
||||
rollback() {
|
||||
echo "⚠️ rollback green -> previous image tag (best effort)"
|
||||
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
||||
docker rm -f archicratie-web-green || true
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
||||
}
|
||||
|
||||
# build/restart green
|
||||
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
|
||||
echo "❌ build green failed"; rollback; exit 4
|
||||
fi
|
||||
|
||||
docker rm -f archicratie-web-green || true
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
||||
|
||||
# warmup endpoints
|
||||
if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi
|
||||
if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi
|
||||
if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi
|
||||
|
||||
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||
echo "canonical(green)=$CANON"
|
||||
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
||||
echo "❌ live canonical mismatch"
|
||||
docker logs --tail 120 archicratie-web-green || true
|
||||
rollback
|
||||
exit 4
|
||||
}
|
||||
|
||||
echo "✅ live OK"
|
||||
|
||||
- name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
python3 - <<'PY'
|
||||
import os, re, json, glob
|
||||
import yaml
|
||||
import datetime as dt
|
||||
|
||||
ROOT = os.getcwd()
|
||||
ANNO_ROOT = os.path.join(ROOT, "src", "annotations")
|
||||
|
||||
def is_obj(x): return isinstance(x, dict)
|
||||
def is_arr(x): return isinstance(x, list)
|
||||
|
||||
def iso_dt(x):
|
||||
if isinstance(x, dt.datetime):
|
||||
if x.tzinfo is None:
|
||||
return x.isoformat()
|
||||
return x.astimezone(dt.timezone.utc).isoformat().replace("+00:00","Z")
|
||||
if isinstance(x, dt.date):
|
||||
return x.isoformat()
|
||||
return None
|
||||
|
||||
def normalize(x):
|
||||
s = iso_dt(x)
|
||||
if s is not None: return s
|
||||
if isinstance(x, dict):
|
||||
return {str(k): normalize(v) for k, v in x.items()}
|
||||
if isinstance(x, list):
|
||||
return [normalize(v) for v in x]
|
||||
return x
|
||||
|
||||
def key_media(it): return str((it or {}).get("src",""))
|
||||
def key_ref(it):
|
||||
it = it or {}
|
||||
return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))])
|
||||
def key_comment(it): return str((it or {}).get("text","")).strip()
|
||||
|
||||
def dedup_extend(dst_list, src_list, key_fn):
|
||||
seen = set(); out = []
|
||||
for x in (dst_list or []):
|
||||
x = normalize(x); k = key_fn(x)
|
||||
if k and k not in seen: seen.add(k); out.append(x)
|
||||
for x in (src_list or []):
|
||||
x = normalize(x); k = key_fn(x)
|
||||
if k and k not in seen: seen.add(k); out.append(x)
|
||||
return out
|
||||
|
||||
def deep_merge(dst, src):
|
||||
src = normalize(src)
|
||||
for k, v in (src or {}).items():
|
||||
if k in ("media","refs","comments_editorial") and is_arr(v):
|
||||
if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media)
|
||||
elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
|
||||
else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
|
||||
continue
|
||||
|
||||
if is_obj(v):
|
||||
if not is_obj(dst.get(k)): dst[k] = {}
|
||||
deep_merge(dst[k], v)
|
||||
continue
|
||||
|
||||
if is_arr(v):
|
||||
cur = dst.get(k, [])
|
||||
if not is_arr(cur): cur = []
|
||||
seen = set(); out = []
|
||||
for x in cur:
|
||||
x = normalize(x)
|
||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||
if s not in seen: seen.add(s); out.append(x)
|
||||
for x in v:
|
||||
x = normalize(x)
|
||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||
if s not in seen: seen.add(s); out.append(x)
|
||||
dst[k] = out
|
||||
continue
|
||||
|
||||
v = normalize(v)
|
||||
if k not in dst or dst.get(k) in (None, ""):
|
||||
dst[k] = v
|
||||
|
||||
def para_num(pid):
|
||||
m = re.match(r"^p-(\d+)-", str(pid))
|
||||
return int(m.group(1)) if m else 10**9
|
||||
|
||||
def sort_lists(entry):
|
||||
for k in ("media","refs","comments_editorial"):
|
||||
arr = entry.get(k)
|
||||
if not is_arr(arr): continue
|
||||
def ts(x):
|
||||
x = normalize(x)
|
||||
try:
|
||||
s = str((x or {}).get("ts",""))
|
||||
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
|
||||
except Exception:
|
||||
return 0
|
||||
arr = [normalize(x) for x in arr]
|
||||
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
||||
entry[k] = arr
|
||||
|
||||
if not os.path.isdir(ANNO_ROOT):
|
||||
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
|
||||
|
||||
pages = {}
|
||||
errors = []
|
||||
|
||||
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
|
||||
for fp in files:
|
||||
try:
|
||||
with open(fp, "r", encoding="utf-8") as f:
|
||||
doc = yaml.safe_load(f) or {}
|
||||
doc = normalize(doc)
|
||||
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
||||
continue
|
||||
|
||||
page = str(doc.get("page","")).strip().strip("/")
|
||||
paras = doc.get("paras") or {}
|
||||
if not page or not isinstance(paras, dict):
|
||||
continue
|
||||
|
||||
pg = pages.setdefault(page, {"paras": {}})
|
||||
for pid, entry in paras.items():
|
||||
pid = str(pid)
|
||||
if pid not in pg["paras"] or not isinstance(pg["paras"].get(pid), dict):
|
||||
pg["paras"][pid] = {}
|
||||
if isinstance(entry, dict):
|
||||
deep_merge(pg["paras"][pid], entry)
|
||||
sort_lists(pg["paras"][pid])
|
||||
|
||||
except Exception as e:
|
||||
errors.append({"file": os.path.relpath(fp, ROOT), "error": str(e)})
|
||||
|
||||
for page, obj in pages.items():
|
||||
keys = list((obj.get("paras") or {}).keys())
|
||||
keys.sort(key=lambda k: (para_num(k), k))
|
||||
obj["paras"] = {k: obj["paras"][k] for k in keys}
|
||||
|
||||
out = {
|
||||
"schema": 1,
|
||||
"generatedAt": dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc).isoformat().replace("+00:00","Z"),
|
||||
"pages": pages,
|
||||
"stats": {
|
||||
"pages": len(pages),
|
||||
"paras": sum(len(v.get("paras") or {}) for v in pages.values()),
|
||||
"errors": len(errors),
|
||||
},
|
||||
"errors": errors,
|
||||
}
|
||||
|
||||
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
||||
json.dump(out, f, ensure_ascii=False)
|
||||
|
||||
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
|
||||
PY
|
||||
|
||||
# patch JSON into running containers
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== patch annotations-index.json into $c =="
|
||||
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
|
||||
done
|
||||
|
||||
# copy changed media files into containers (so new media appears without rebuild)
|
||||
if [[ -s /tmp/changed.txt ]]; then
|
||||
while IFS= read -r f; do
|
||||
[[ -n "$f" ]] || continue
|
||||
if [[ "$f" == public/media/* ]]; then
|
||||
dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/...
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== copy media into $c: $f -> $dest =="
|
||||
docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\""
|
||||
docker cp "$f" "$c:$dest"
|
||||
done
|
||||
fi
|
||||
done < /tmp/changed.txt
|
||||
fi
|
||||
|
||||
# smoke after patch
|
||||
for p in 8081 8082; do
|
||||
echo "== smoke annotations-index on $p =="
|
||||
curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \
|
||||
| python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))'
|
||||
done
|
||||
|
||||
echo "✅ hotpatch done"
|
||||
|
||||
- name: Debug on failure (containers status/logs)
|
||||
if: ${{ failure() }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "== docker ps =="
|
||||
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== logs $c (tail 200) =="
|
||||
docker logs --tail 200 "$c" || true
|
||||
done
|
||||
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 61 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 816 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 822 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 822 KiB |
@@ -1,9 +1,18 @@
|
||||
#!/usr/bin/env node
|
||||
// scripts/apply-annotation-ticket.mjs
|
||||
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers src/annotations + public/media
|
||||
// Robuste, idempotent, non destructif
|
||||
//
|
||||
// DRY RUN par défaut si --dry-run
|
||||
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
|
||||
//
|
||||
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
|
||||
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
|
||||
//
|
||||
// Compat rétro : lit (si présent) l'ancien monolithe:
|
||||
// src/annotations/<oeuvre>/<chapitre>.yml
|
||||
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
|
||||
// pour permettre une migration progressive sans perte.
|
||||
//
|
||||
// Robuste, idempotent, non destructif.
|
||||
// DRY RUN si --dry-run
|
||||
// Options: --dry-run --no-download --verify --strict --commit --close
|
||||
//
|
||||
// Env requis:
|
||||
@@ -36,7 +45,7 @@ import YAML from "yaml";
|
||||
|
||||
function usage(exitCode = 0) {
|
||||
console.log(`
|
||||
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/
|
||||
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/ (shard par paragraphe)
|
||||
|
||||
Usage:
|
||||
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
|
||||
@@ -44,9 +53,9 @@ Usage:
|
||||
Flags:
|
||||
--dry-run : n'écrit rien (affiche un aperçu)
|
||||
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
||||
--verify : tente de vérifier que (page, ancre) existent (baseline/dist si dispo)
|
||||
--strict : refuse si URL ref invalide (http/https) OU caption media vide
|
||||
--commit : git add + git commit (le script commit dans la branche courante)
|
||||
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
|
||||
--strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
|
||||
--commit : git add + git commit (commit dans la branche courante)
|
||||
--close : ferme le ticket (nécessite --commit)
|
||||
|
||||
Env requis:
|
||||
@@ -57,7 +66,7 @@ Env optionnel:
|
||||
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||||
ANNO_DIR (défaut: src/annotations)
|
||||
PUBLIC_DIR (défaut: public)
|
||||
MEDIA_ROOT (défaut URL: /media) -> écrit dans public/media/...
|
||||
MEDIA_ROOT (défaut URL: /media)
|
||||
|
||||
Exit codes:
|
||||
0 ok
|
||||
@@ -102,6 +111,8 @@ const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations");
|
||||
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
|
||||
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
|
||||
|
||||
/* --------------------------------- helpers -------------------------------- */
|
||||
|
||||
function getEnv(name, fallback = "") {
|
||||
return (process.env[name] ?? fallback).trim();
|
||||
}
|
||||
@@ -123,7 +134,12 @@ function runQuiet(cmd, args, opts = {}) {
|
||||
}
|
||||
|
||||
async function exists(p) {
|
||||
try { await fs.access(p); return true; } catch { return false; }
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function inferOwnerRepoFromGit() {
|
||||
@@ -140,6 +156,371 @@ function gitHasStagedChanges() {
|
||||
return r.status === 1;
|
||||
}
|
||||
|
||||
function escapeRegExp(s) {
|
||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function pickLine(body, key) {
|
||||
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||
const m = String(body || "").match(re);
|
||||
return m ? m[1].trim() : "";
|
||||
}
|
||||
|
||||
function pickSection(body, markers) {
|
||||
const text = String(body || "").replace(/\r\n/g, "\n");
|
||||
const idx = markers
|
||||
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
||||
.filter((x) => x.i >= 0)
|
||||
.sort((a, b) => a.i - b.i)[0];
|
||||
if (!idx) return "";
|
||||
|
||||
const start = idx.i + idx.m.length;
|
||||
const tail = text.slice(start);
|
||||
|
||||
const stops = ["\n## ", "\n---", "\nJustification", "\nProposition", "\nSources"];
|
||||
let end = tail.length;
|
||||
for (const s of stops) {
|
||||
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
||||
if (j >= 0 && j < end) end = j;
|
||||
}
|
||||
return tail.slice(0, end).trim();
|
||||
}
|
||||
|
||||
function normalizeChemin(chemin) {
|
||||
let c = String(chemin || "").trim();
|
||||
if (!c) return "";
|
||||
if (!c.startsWith("/")) c = "/" + c;
|
||||
if (!c.endsWith("/")) c = c + "/";
|
||||
c = c.replace(/\/{2,}/g, "/");
|
||||
return c;
|
||||
}
|
||||
|
||||
function normalizePageKeyFromChemin(chemin) {
|
||||
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
|
||||
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
|
||||
function normalizeAnchorId(s) {
|
||||
let a = String(s || "").trim();
|
||||
if (a.startsWith("#")) a = a.slice(1);
|
||||
return a;
|
||||
}
|
||||
|
||||
function assert(cond, msg, code = 1) {
|
||||
if (!cond) {
|
||||
const e = new Error(msg);
|
||||
e.__exitCode = code;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function paraIndexFromId(id) {
|
||||
const m = String(id).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.NaN;
|
||||
}
|
||||
|
||||
function isHttpUrl(u) {
|
||||
try {
|
||||
const x = new URL(String(u));
|
||||
return x.protocol === "http:" || x.protocol === "https:";
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function stableSortByTs(arr) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(a?.ts || "") || 0;
|
||||
const tb = Date.parse(b?.ts || "") || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function normPage(s) {
|
||||
let x = String(s || "").trim();
|
||||
if (!x) return "";
|
||||
// retire origin si on a une URL complète
|
||||
x = x.replace(/^https?:\/\/[^/]+/i, "");
|
||||
// enlève query/hash
|
||||
x = x.split("#")[0].split("?")[0];
|
||||
// enlève index.html
|
||||
x = x.replace(/index\.html$/i, "");
|
||||
// enlève slashs de bord
|
||||
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
return x;
|
||||
}
|
||||
|
||||
/* ------------------------------ para-index (verify + order) ------------------------------ */
|
||||
|
||||
async function loadParaOrderFromDist(pageKey) {
|
||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||
if (!(await exists(distIdx))) return null;
|
||||
|
||||
let j;
|
||||
try {
|
||||
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
const want = normPage(pageKey);
|
||||
|
||||
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
|
||||
const items = Array.isArray(j?.items)
|
||||
? j.items
|
||||
: Array.isArray(j?.index?.items)
|
||||
? j.index.items
|
||||
: null;
|
||||
|
||||
if (items) {
|
||||
const ids = [];
|
||||
for (const it of items) {
|
||||
// page peut être dans plein de clés différentes
|
||||
const pageCand = normPage(
|
||||
it?.page ??
|
||||
it?.pageKey ??
|
||||
it?.path ??
|
||||
it?.route ??
|
||||
it?.href ??
|
||||
it?.url ??
|
||||
""
|
||||
);
|
||||
|
||||
// id peut être dans plein de clés différentes
|
||||
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
|
||||
if (id.startsWith("#")) id = id.slice(1);
|
||||
|
||||
if (pageCand === want && id) ids.push(id);
|
||||
}
|
||||
if (ids.length) return ids;
|
||||
}
|
||||
|
||||
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
|
||||
if (j?.byId && typeof j.byId === "object") {
|
||||
const ids = Object.keys(j.byId)
|
||||
.filter((id) => {
|
||||
const meta = j.byId[id] || {};
|
||||
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
|
||||
return pageCand === want;
|
||||
});
|
||||
|
||||
if (ids.length) {
|
||||
ids.sort((a, b) => {
|
||||
const ia = paraIndexFromId(a);
|
||||
const ib = paraIndexFromId(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
return ids;
|
||||
}
|
||||
}
|
||||
|
||||
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
|
||||
const keys = Object.keys(j.pages);
|
||||
const hit = keys.find((k) => normPage(k) === want);
|
||||
if (hit) {
|
||||
const pg = j.pages[hit];
|
||||
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
|
||||
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function tryVerifyAnchor(pageKey, anchorId) {
|
||||
// 1) dist/para-index.json : order complet si possible
|
||||
const order = await loadParaOrderFromDist(pageKey);
|
||||
if (order) return order.includes(anchorId);
|
||||
|
||||
// 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
|
||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||
if (await exists(distIdx)) {
|
||||
try {
|
||||
const raw = await fs.readFile(distIdx, "utf8");
|
||||
if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// 2) tests/anchors-baseline.json (fallback)
|
||||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||||
if (await exists(base)) {
|
||||
try {
|
||||
const j = JSON.parse(await fs.readFile(base, "utf8"));
|
||||
const candidates = [];
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
for (const [k, v] of Object.entries(j.pages)) {
|
||||
if (!Array.isArray(v)) continue;
|
||||
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
|
||||
}
|
||||
}
|
||||
if (Array.isArray(j?.entries)) {
|
||||
for (const it of j.entries) {
|
||||
const p = String(it?.page || "");
|
||||
const ids = it?.ids;
|
||||
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
|
||||
}
|
||||
}
|
||||
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
return null; // cannot verify
|
||||
}
|
||||
|
||||
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
|
||||
|
||||
function keyMedia(x) {
|
||||
return String(x?.src || "");
|
||||
}
|
||||
function keyRef(x) {
|
||||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||
}
|
||||
function keyComment(x) {
|
||||
return String(x?.text || "").trim();
|
||||
}
|
||||
|
||||
function uniqUnion(dstArr, srcArr, keyFn) {
|
||||
const out = Array.isArray(dstArr) ? [...dstArr] : [];
|
||||
const seen = new Set(out.map((x) => keyFn(x)));
|
||||
for (const it of (Array.isArray(srcArr) ? srcArr : [])) {
|
||||
const k = keyFn(it);
|
||||
if (!k) continue;
|
||||
if (!seen.has(k)) {
|
||||
seen.add(k);
|
||||
out.push(it);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function deepMergeEntry(dst, src) {
|
||||
if (!isPlainObject(dst) || !isPlainObject(src)) return;
|
||||
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
if (k === "media" && Array.isArray(v)) {
|
||||
dst.media = uniqUnion(dst.media, v, keyMedia);
|
||||
continue;
|
||||
}
|
||||
if (k === "refs" && Array.isArray(v)) {
|
||||
dst.refs = uniqUnion(dst.refs, v, keyRef);
|
||||
continue;
|
||||
}
|
||||
if (k === "comments_editorial" && Array.isArray(v)) {
|
||||
dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isPlainObject(v)) {
|
||||
if (!isPlainObject(dst[k])) dst[k] = {};
|
||||
deepMergeEntry(dst[k], v);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(v)) {
|
||||
const cur = Array.isArray(dst[k]) ? dst[k] : [];
|
||||
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
for (const it of v) {
|
||||
const s = JSON.stringify(it);
|
||||
if (!seen.has(s)) {
|
||||
seen.add(s);
|
||||
out.push(it);
|
||||
}
|
||||
}
|
||||
dst[k] = out;
|
||||
continue;
|
||||
}
|
||||
|
||||
// scalar: set only if missing/empty
|
||||
if (!(k in dst) || dst[k] == null || dst[k] === "") {
|
||||
dst[k] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ----------------------------- annotations I/O ----------------------------- */
|
||||
|
||||
async function loadAnnoDocYaml(fileAbs, pageKey) {
|
||||
if (!(await exists(fileAbs))) {
|
||||
return { schema: 1, page: pageKey, paras: {} };
|
||||
}
|
||||
|
||||
const raw = await fs.readFile(fileAbs, "utf8");
|
||||
let doc;
|
||||
try {
|
||||
doc = YAML.parse(raw);
|
||||
} catch (e) {
|
||||
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
|
||||
}
|
||||
|
||||
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`, 2);
|
||||
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`, 2);
|
||||
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`, 2);
|
||||
|
||||
if (doc.page != null) {
|
||||
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`, 2);
|
||||
} else {
|
||||
doc.page = pageKey;
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
function sortParasObject(paras, order) {
|
||||
const keys = Object.keys(paras || {});
|
||||
const idx = new Map();
|
||||
if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
|
||||
|
||||
keys.sort((a, b) => {
|
||||
const ha = idx.has(a);
|
||||
const hb = idx.has(b);
|
||||
if (ha && hb) return idx.get(a) - idx.get(b);
|
||||
if (ha && !hb) return -1;
|
||||
if (!ha && hb) return 1;
|
||||
|
||||
const ia = paraIndexFromId(a);
|
||||
const ib = paraIndexFromId(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
|
||||
const out = {};
|
||||
for (const k of keys) out[k] = paras[k];
|
||||
return out;
|
||||
}
|
||||
|
||||
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
|
||||
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||||
|
||||
doc.paras = sortParasObject(doc.paras, order);
|
||||
|
||||
for (const e of Object.values(doc.paras || {})) {
|
||||
if (!isPlainObject(e)) continue;
|
||||
stableSortByTs(e.media);
|
||||
stableSortByTs(e.refs);
|
||||
stableSortByTs(e.comments_editorial);
|
||||
}
|
||||
|
||||
const out = YAML.stringify(doc);
|
||||
await fs.writeFile(fileAbs, out, "utf8");
|
||||
}
|
||||
|
||||
/* ------------------------------ gitea helpers ------------------------------ */
|
||||
|
||||
function apiBaseNorm(forgeApiBase) {
|
||||
@@ -167,7 +548,7 @@ async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
|
||||
}
|
||||
|
||||
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
|
||||
// ✅ Gitea: /issues/{index}/assets
|
||||
// Gitea: /issues/{index}/assets
|
||||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
|
||||
try {
|
||||
const json = await giteaGET(url, token);
|
||||
@@ -215,200 +596,43 @@ async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment
|
||||
}
|
||||
}
|
||||
|
||||
/* ------------------------------ parsing helpers ---------------------------- */
|
||||
/* ------------------------------ media helpers ------------------------------ */
|
||||
|
||||
function escapeRegExp(s) {
|
||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
function inferMediaTypeFromFilename(name) {
|
||||
const n = String(name || "").toLowerCase();
|
||||
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
|
||||
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
|
||||
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
|
||||
return "link";
|
||||
}
|
||||
|
||||
function pickLine(body, key) {
|
||||
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||
const m = String(body || "").match(re);
|
||||
return m ? m[1].trim() : "";
|
||||
function sanitizeFilename(name) {
|
||||
return String(name || "file")
|
||||
.replace(/[\/\\]/g, "_")
|
||||
.replace(/[^\w.\-]+/g, "_")
|
||||
.replace(/_+/g, "_")
|
||||
.slice(0, 180);
|
||||
}
|
||||
|
||||
function pickSection(body, markers) {
|
||||
const text = String(body || "").replace(/\r\n/g, "\n");
|
||||
const idx = markers
|
||||
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
||||
.filter((x) => x.i >= 0)
|
||||
.sort((a, b) => a.i - b.i)[0];
|
||||
if (!idx) return "";
|
||||
|
||||
const start = idx.i + idx.m.length;
|
||||
const tail = text.slice(start);
|
||||
|
||||
const stops = [
|
||||
"\n## ",
|
||||
"\n---",
|
||||
"\nJustification",
|
||||
"\nProposition",
|
||||
"\nSources",
|
||||
];
|
||||
let end = tail.length;
|
||||
for (const s of stops) {
|
||||
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
||||
if (j >= 0 && j < end) end = j;
|
||||
}
|
||||
return tail.slice(0, end).trim();
|
||||
}
|
||||
|
||||
function normalizeChemin(chemin) {
|
||||
let c = String(chemin || "").trim();
|
||||
if (!c) return "";
|
||||
if (!c.startsWith("/")) c = "/" + c;
|
||||
if (!c.endsWith("/")) c = c + "/";
|
||||
c = c.replace(/\/{2,}/g, "/");
|
||||
return c;
|
||||
}
|
||||
|
||||
function normalizePageKeyFromChemin(chemin) {
|
||||
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
|
||||
function normalizeAnchorId(s) {
|
||||
let a = String(s || "").trim();
|
||||
if (a.startsWith("#")) a = a.slice(1);
|
||||
return a;
|
||||
}
|
||||
|
||||
function assert(cond, msg, code = 1) {
|
||||
if (!cond) {
|
||||
const e = new Error(msg);
|
||||
e.__exitCode = code;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
/* ----------------------------- verify helpers ------------------------------ */
|
||||
|
||||
function paraIndexFromId(id) {
|
||||
const m = String(id).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.NaN;
|
||||
}
|
||||
|
||||
async function tryVerifyAnchor(pageKey, anchorId) {
|
||||
// 1) dist/para-index.json (si build déjà faite)
|
||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||
if (await exists(distIdx)) {
|
||||
const raw = await fs.readFile(distIdx, "utf8");
|
||||
const idx = JSON.parse(raw);
|
||||
const byId = idx?.byId;
|
||||
if (byId && typeof byId === "object" && byId[anchorId] != null) return true;
|
||||
}
|
||||
|
||||
// 2) tests/anchors-baseline.json (si dispo)
|
||||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||||
if (await exists(base)) {
|
||||
const raw = await fs.readFile(base, "utf8");
|
||||
const j = JSON.parse(raw);
|
||||
|
||||
// tolérant: cherche un array d'ids associé à la page
|
||||
const candidates = [];
|
||||
|
||||
// cas 1: j.pages[...]
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
for (const [k, v] of Object.entries(j.pages)) {
|
||||
if (!Array.isArray(v)) continue;
|
||||
// on matche large: pageKey inclus dans le path
|
||||
if (String(k).includes(pageKey)) candidates.push(...v);
|
||||
}
|
||||
}
|
||||
|
||||
// cas 2: j.entries = [{page, ids}]
|
||||
if (Array.isArray(j?.entries)) {
|
||||
for (const it of j.entries) {
|
||||
const p = String(it?.page || "");
|
||||
const ids = it?.ids;
|
||||
if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids);
|
||||
}
|
||||
}
|
||||
|
||||
if (candidates.length) {
|
||||
return candidates.some((x) => String(x) === anchorId);
|
||||
}
|
||||
}
|
||||
|
||||
// impossible à vérifier
|
||||
return null;
|
||||
}
|
||||
|
||||
/* ----------------------------- annotations I/O ----------------------------- */
|
||||
|
||||
async function loadAnnoDoc(fileAbs, pageKey) {
|
||||
if (!(await exists(fileAbs))) {
|
||||
return { schema: 1, page: pageKey, paras: {} };
|
||||
}
|
||||
|
||||
const raw = await fs.readFile(fileAbs, "utf8");
|
||||
let doc;
|
||||
try {
|
||||
doc = YAML.parse(raw);
|
||||
} catch (e) {
|
||||
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
|
||||
}
|
||||
|
||||
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`);
|
||||
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`);
|
||||
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`);
|
||||
|
||||
if (doc.page != null) {
|
||||
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
||||
} else {
|
||||
doc.page = pageKey;
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
function sortParasObject(paras) {
|
||||
const keys = Object.keys(paras || {});
|
||||
keys.sort((a, b) => {
|
||||
const ia = paraIndexFromId(a);
|
||||
const ib = paraIndexFromId(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
async function downloadToFile(url, token, destAbs) {
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||
},
|
||||
redirect: "follow",
|
||||
});
|
||||
const out = {};
|
||||
for (const k of keys) out[k] = paras[k];
|
||||
return out;
|
||||
if (!res.ok) {
|
||||
const t = await res.text().catch(() => "");
|
||||
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
|
||||
}
|
||||
const buf = Buffer.from(await res.arrayBuffer());
|
||||
await fs.mkdir(path.dirname(destAbs), { recursive: true });
|
||||
await fs.writeFile(destAbs, buf);
|
||||
return buf.length;
|
||||
}
|
||||
|
||||
async function saveAnnoDocYaml(fileAbs, doc) {
|
||||
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||||
doc.paras = sortParasObject(doc.paras);
|
||||
const out = YAML.stringify(doc);
|
||||
await fs.writeFile(fileAbs, out, "utf8");
|
||||
}
|
||||
|
||||
/* ------------------------------ apply per type ----------------------------- */
|
||||
|
||||
function ensureEntry(doc, paraId) {
|
||||
if (!doc.paras[paraId] || !isPlainObject(doc.paras[paraId])) doc.paras[paraId] = {};
|
||||
return doc.paras[paraId];
|
||||
}
|
||||
|
||||
function uniqPush(arr, item, keyFn) {
|
||||
const k = keyFn(item);
|
||||
const exists = arr.some((x) => keyFn(x) === k);
|
||||
if (!exists) arr.push(item);
|
||||
return !exists;
|
||||
}
|
||||
|
||||
function stableSortByTs(arr) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(a?.ts || "") || 0;
|
||||
const tb = Date.parse(b?.ts || "") || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
/* ------------------------------ type parsers ------------------------------ */
|
||||
|
||||
function parseReferenceBlock(body) {
|
||||
const block =
|
||||
@@ -431,50 +655,6 @@ function parseReferenceBlock(body) {
|
||||
};
|
||||
}
|
||||
|
||||
function inferMediaTypeFromFilename(name) {
|
||||
const n = String(name || "").toLowerCase();
|
||||
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
|
||||
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
|
||||
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
|
||||
return "link";
|
||||
}
|
||||
|
||||
function sanitizeFilename(name) {
|
||||
return String(name || "file")
|
||||
.replace(/[\/\\]/g, "_")
|
||||
.replace(/[^\w.\-]+/g, "_")
|
||||
.replace(/_+/g, "_")
|
||||
.slice(0, 180);
|
||||
}
|
||||
|
||||
function isHttpUrl(u) {
|
||||
try {
|
||||
const x = new URL(String(u));
|
||||
return x.protocol === "http:" || x.protocol === "https:";
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function downloadToFile(url, token, destAbs) {
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
// la plupart des /attachments sont publics, mais on garde le token “au cas où”
|
||||
Authorization: `token ${token}`,
|
||||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||
},
|
||||
redirect: "follow",
|
||||
});
|
||||
if (!res.ok) {
|
||||
const t = await res.text().catch(() => "");
|
||||
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
|
||||
}
|
||||
const buf = Buffer.from(await res.arrayBuffer());
|
||||
await fs.mkdir(path.dirname(destAbs), { recursive: true });
|
||||
await fs.writeFile(destAbs, buf);
|
||||
return buf.length;
|
||||
}
|
||||
|
||||
/* ----------------------------------- main ---------------------------------- */
|
||||
|
||||
async function main() {
|
||||
@@ -511,29 +691,53 @@ async function main() {
|
||||
const pageKey = normalizePageKeyFromChemin(chemin);
|
||||
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
||||
|
||||
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
|
||||
|
||||
if (DO_VERIFY) {
|
||||
const ok = await tryVerifyAnchor(pageKey, ancre);
|
||||
if (ok === false) {
|
||||
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
||||
}
|
||||
if (ok === null) {
|
||||
// pas de source de vérité dispo
|
||||
if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de baseline/dist)`), { __exitCode: 2 });
|
||||
console.warn("⚠️ verify: impossible de vérifier (pas de baseline/dist) — on continue.");
|
||||
if (STRICT) {
|
||||
throw Object.assign(
|
||||
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
|
||||
{ __exitCode: 2 }
|
||||
);
|
||||
}
|
||||
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
|
||||
}
|
||||
}
|
||||
|
||||
const annoFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||||
const annoFileRel = path.relative(CWD, annoFileAbs).replace(/\\/g, "/");
|
||||
// ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
|
||||
const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
|
||||
const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
|
||||
|
||||
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: annoFileRel });
|
||||
// legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
|
||||
const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||||
|
||||
const doc = await loadAnnoDoc(annoFileAbs, pageKey);
|
||||
const entry = ensureEntry(doc, ancre);
|
||||
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
|
||||
|
||||
// load shard doc
|
||||
const doc = await loadAnnoDocYaml(shardAbs, pageKey);
|
||||
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
||||
const entry = doc.paras[ancre];
|
||||
|
||||
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
|
||||
if (await exists(legacyAbs)) {
|
||||
try {
|
||||
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
|
||||
const legacyEntry = legacy?.paras?.[ancre];
|
||||
if (isPlainObject(legacyEntry)) {
|
||||
deepMergeEntry(entry, legacyEntry);
|
||||
}
|
||||
} catch {
|
||||
// ignore legacy parse issues; shard still applies new data
|
||||
}
|
||||
}
|
||||
|
||||
const touchedFiles = [];
|
||||
const notes = [];
|
||||
|
||||
let changed = false;
|
||||
const nowIso = new Date().toISOString();
|
||||
|
||||
@@ -545,16 +749,19 @@ async function main() {
|
||||
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
|
||||
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
|
||||
|
||||
const added = uniqPush(entry.comments_editorial, item, (x) => `${(x?.text || "").trim()}`);
|
||||
if (added) { changed = true; notes.push(`+ comment added (len=${text.length})`); }
|
||||
else notes.push(`~ comment already present (dedup)`);
|
||||
|
||||
const before = entry.comments_editorial.length;
|
||||
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
|
||||
if (entry.comments_editorial.length !== before) {
|
||||
changed = true;
|
||||
notes.push(`+ comment added (len=${text.length})`);
|
||||
} else {
|
||||
notes.push(`~ comment already present (dedup)`);
|
||||
}
|
||||
stableSortByTs(entry.comments_editorial);
|
||||
}
|
||||
|
||||
else if (type === "type/reference") {
|
||||
const ref = parseReferenceBlock(body);
|
||||
|
||||
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
|
||||
|
||||
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
|
||||
@@ -571,34 +778,35 @@ async function main() {
|
||||
};
|
||||
if (ref.citation) item.citation = ref.citation;
|
||||
|
||||
const added = uniqPush(entry.refs, item, (x) => `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`);
|
||||
if (added) { changed = true; notes.push(`+ reference added (${item.url ? "url" : "label"})`); }
|
||||
else notes.push(`~ reference already present (dedup)`);
|
||||
|
||||
const before = entry.refs.length;
|
||||
entry.refs = uniqUnion(entry.refs, [item], keyRef);
|
||||
if (entry.refs.length !== before) {
|
||||
changed = true;
|
||||
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
|
||||
} else {
|
||||
notes.push(`~ reference already present (dedup)`);
|
||||
}
|
||||
stableSortByTs(entry.refs);
|
||||
}
|
||||
|
||||
else if (type === "type/media") {
|
||||
if (!Array.isArray(entry.media)) entry.media = [];
|
||||
|
||||
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||||
|
||||
if (!atts.length) {
|
||||
notes.push("! no assets found (nothing to download).");
|
||||
const caption = (title || "").trim();
|
||||
if (STRICT && !caption) {
|
||||
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
|
||||
}
|
||||
const captionFinal = caption || ".";
|
||||
|
||||
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||||
if (!atts.length) notes.push("! no assets found (nothing to download).");
|
||||
|
||||
for (const a of atts) {
|
||||
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
|
||||
const dl = a?.browser_download_url || a?.download_url || "";
|
||||
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
||||
|
||||
// caption = title du ticket (fallback ".")
|
||||
const caption = (title || "").trim() || ".";
|
||||
if (STRICT && !caption.trim()) {
|
||||
throw Object.assign(new Error("Ticket media (strict): caption vide."), { __exitCode: 2 });
|
||||
}
|
||||
|
||||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre);
|
||||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
|
||||
const destAbs = path.join(mediaDirAbs, name);
|
||||
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
||||
|
||||
@@ -608,21 +816,24 @@ async function main() {
|
||||
const bytes = await downloadToFile(dl, token, destAbs);
|
||||
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
|
||||
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
|
||||
changed = true;
|
||||
} else {
|
||||
notes.push(`(dry) would download ${name} -> ${urlPath}`);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
const item = {
|
||||
type: inferMediaTypeFromFilename(name),
|
||||
src: urlPath,
|
||||
caption,
|
||||
caption: captionFinal,
|
||||
credit: "",
|
||||
ts: nowIso,
|
||||
fromIssue: issueNum,
|
||||
};
|
||||
|
||||
const added = uniqPush(entry.media, item, (x) => String(x?.src || ""));
|
||||
if (added) changed = true;
|
||||
const before = entry.media.length;
|
||||
entry.media = uniqUnion(entry.media, [item], keyMedia);
|
||||
if (entry.media.length !== before) changed = true;
|
||||
}
|
||||
|
||||
stableSortByTs(entry.media);
|
||||
@@ -640,7 +851,7 @@ async function main() {
|
||||
|
||||
if (DRY_RUN) {
|
||||
console.log("\n--- DRY RUN (no write) ---");
|
||||
console.log(`Would update: ${annoFileRel}`);
|
||||
console.log(`Would update: ${shardRel}`);
|
||||
for (const n of notes) console.log(" ", n);
|
||||
console.log("\nExcerpt (resulting entry):");
|
||||
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
||||
@@ -648,10 +859,10 @@ async function main() {
|
||||
return;
|
||||
}
|
||||
|
||||
await saveAnnoDocYaml(annoFileAbs, doc);
|
||||
touchedFiles.unshift(annoFileRel);
|
||||
await saveAnnoDocYaml(shardAbs, doc, paraOrder);
|
||||
touchedFiles.unshift(shardRel);
|
||||
|
||||
console.log(`✅ Updated: ${annoFileRel}`);
|
||||
console.log(`✅ Updated: ${shardRel}`);
|
||||
for (const n of notes) console.log(" ", n);
|
||||
|
||||
if (DO_COMMIT) {
|
||||
@@ -685,4 +896,4 @@ main().catch((e) => {
|
||||
const code = e?.__exitCode || 1;
|
||||
console.error("💥", e?.message || e);
|
||||
process.exit(code);
|
||||
});
|
||||
});
|
||||
@@ -1,28 +1,106 @@
|
||||
#!/usr/bin/env node
|
||||
// scripts/build-annotations-index.mjs
|
||||
// Construit dist/annotations-index.json à partir de src/annotations/**/*.yml
|
||||
// Supporte:
|
||||
// - monolith : src/annotations/<pageKey>.yml
|
||||
// - shard : src/annotations/<pageKey>/<paraId>.yml (paraId = p-<n>-...)
|
||||
// Invariants:
|
||||
// - doc.schema === 1
|
||||
// - doc.page (si présent) == pageKey déduit du chemin
|
||||
// - shard: doc.paras doit contenir EXACTEMENT la clé paraId (sinon fail)
|
||||
//
|
||||
// Deep-merge non destructif (media/refs/comments dédupliqués), tri stable.
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import YAML from "yaml";
|
||||
|
||||
function parseArgs(argv) {
|
||||
const out = {
|
||||
inDir: "src/annotations",
|
||||
outFile: "dist/annotations-index.json",
|
||||
};
|
||||
const ROOT = process.cwd();
|
||||
const ANNO_ROOT = path.join(ROOT, "src", "annotations");
|
||||
const DIST_DIR = path.join(ROOT, "dist");
|
||||
const OUT = path.join(DIST_DIR, "annotations-index.json");
|
||||
|
||||
for (let i = 0; i < argv.length; i++) {
|
||||
const a = argv[i];
|
||||
function assert(cond, msg) {
|
||||
if (!cond) throw new Error(msg);
|
||||
}
|
||||
|
||||
if (a === "--in" && argv[i + 1]) out.inDir = argv[++i];
|
||||
else if (a.startsWith("--in=")) out.inDir = a.slice("--in=".length);
|
||||
function isObj(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
function isArr(x) {
|
||||
return Array.isArray(x);
|
||||
}
|
||||
|
||||
if (a === "--out" && argv[i + 1]) out.outFile = argv[++i];
|
||||
else if (a.startsWith("--out=")) out.outFile = a.slice("--out=".length);
|
||||
function normPath(s) {
|
||||
return String(s || "")
|
||||
.replace(/\\/g, "/")
|
||||
.replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
|
||||
function paraNum(pid) {
|
||||
const m = String(pid).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||
}
|
||||
|
||||
function stableSortByTs(arr) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(a?.ts || "") || 0;
|
||||
const tb = Date.parse(b?.ts || "") || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function keyMedia(x) { return String(x?.src || ""); }
|
||||
function keyRef(x) {
|
||||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||
}
|
||||
function keyComment(x) { return String(x?.text || "").trim(); }
|
||||
|
||||
function uniqUnion(dst, src, keyFn) {
|
||||
const out = isArr(dst) ? [...dst] : [];
|
||||
const seen = new Set(out.map((x) => keyFn(x)));
|
||||
for (const it of (isArr(src) ? src : [])) {
|
||||
const k = keyFn(it);
|
||||
if (!k) continue;
|
||||
if (!seen.has(k)) {
|
||||
seen.add(k);
|
||||
out.push(it);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
async function exists(p) {
|
||||
try { await fs.access(p); return true; } catch { return false; }
|
||||
function deepMergeEntry(dst, src) {
|
||||
if (!isObj(dst) || !isObj(src)) return;
|
||||
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||
|
||||
if (isObj(v)) {
|
||||
if (!isObj(dst[k])) dst[k] = {};
|
||||
deepMergeEntry(dst[k], v);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isArr(v)) {
|
||||
const cur = isArr(dst[k]) ? dst[k] : [];
|
||||
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
for (const it of v) {
|
||||
const s = JSON.stringify(it);
|
||||
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||
}
|
||||
dst[k] = out;
|
||||
continue;
|
||||
}
|
||||
|
||||
// scalar: set only if missing/empty
|
||||
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
|
||||
}
|
||||
}
|
||||
|
||||
async function walk(dir) {
|
||||
@@ -30,111 +108,116 @@ async function walk(dir) {
|
||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const e of ents) {
|
||||
const p = path.join(dir, e.name);
|
||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
||||
else out.push(p);
|
||||
if (e.isDirectory()) out.push(...await walk(p));
|
||||
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function inferPageKeyFromFile(inDirAbs, fileAbs) {
|
||||
// src/annotations/<page>.yml -> "<page>"
|
||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
function inferExpectedFromRel(relNoExt) {
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const last = parts.at(-1) || "";
|
||||
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||
const paraId = isShard ? last : null;
|
||||
return { isShard, pageKey, paraId };
|
||||
}
|
||||
|
||||
function assert(cond, msg) {
|
||||
if (!cond) throw new Error(msg);
|
||||
}
|
||||
function validateAndNormalizeDoc(doc, relFile, expectedPageKey, expectedParaId) {
|
||||
assert(isObj(doc), `${relFile}: doc must be an object`);
|
||||
assert(doc.schema === 1, `${relFile}: schema must be 1`);
|
||||
assert(isObj(doc.paras), `${relFile}: missing object key "paras"`);
|
||||
|
||||
function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
const gotPage = doc.page != null ? normPath(doc.page) : "";
|
||||
const expPage = normPath(expectedPageKey);
|
||||
|
||||
function normalizePageKey(s) {
|
||||
// pas de / en tête/fin
|
||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
function validateAndNormalizeDoc(doc, pageKey, fileRel) {
|
||||
assert(isPlainObject(doc), `${fileRel}: document must be an object`);
|
||||
assert(doc.schema === 1, `${fileRel}: schema must be 1`);
|
||||
if (doc.page != null) {
|
||||
if (gotPage) {
|
||||
assert(
|
||||
normalizePageKey(doc.page) === pageKey,
|
||||
`${fileRel}: page mismatch (page="${doc.page}" vs path="${pageKey}")`
|
||||
gotPage === expPage,
|
||||
`${relFile}: page mismatch (page="${doc.page}" vs path="${expectedPageKey}")`
|
||||
);
|
||||
} else {
|
||||
doc.page = expPage;
|
||||
}
|
||||
|
||||
if (expectedParaId) {
|
||||
const keys = Object.keys(doc.paras || {}).map(String);
|
||||
assert(
|
||||
keys.includes(expectedParaId),
|
||||
`${relFile}: shard mismatch: must contain paras["${expectedParaId}"]`
|
||||
);
|
||||
assert(
|
||||
keys.length === 1 && keys[0] === expectedParaId,
|
||||
`${relFile}: shard invariant violated: shard file must contain ONLY paras["${expectedParaId}"] (got: ${keys.join(", ")})`
|
||||
);
|
||||
}
|
||||
assert(isPlainObject(doc.paras), `${fileRel}: missing object key "paras"`);
|
||||
|
||||
const parasOut = Object.create(null);
|
||||
|
||||
for (const [paraId, entry] of Object.entries(doc.paras)) {
|
||||
assert(/^p-\d+-/i.test(paraId), `${fileRel}: invalid para id "${paraId}"`);
|
||||
|
||||
// entry peut être vide, mais doit être un objet si présent
|
||||
assert(entry == null || isPlainObject(entry), `${fileRel}: paras.${paraId} must be an object`);
|
||||
|
||||
const e = entry ? { ...entry } : {};
|
||||
|
||||
// Sanity checks (non destructifs : on n’écrase pas, on vérifie juste les types)
|
||||
if (e.refs != null) assert(Array.isArray(e.refs), `${fileRel}: paras.${paraId}.refs must be an array`);
|
||||
if (e.authors != null) assert(Array.isArray(e.authors), `${fileRel}: paras.${paraId}.authors must be an array`);
|
||||
if (e.quotes != null) assert(Array.isArray(e.quotes), `${fileRel}: paras.${paraId}.quotes must be an array`);
|
||||
if (e.media != null) assert(Array.isArray(e.media), `${fileRel}: paras.${paraId}.media must be an array`);
|
||||
if (e.comments_editorial != null) assert(Array.isArray(e.comments_editorial), `${fileRel}: paras.${paraId}.comments_editorial must be an array`);
|
||||
|
||||
parasOut[paraId] = e;
|
||||
}
|
||||
|
||||
return parasOut;
|
||||
}
|
||||
|
||||
async function readDoc(fileAbs) {
|
||||
const raw = await fs.readFile(fileAbs, "utf8");
|
||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
||||
return YAML.parse(raw);
|
||||
return doc;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const { inDir, outFile } = parseArgs(process.argv.slice(2));
|
||||
const CWD = process.cwd();
|
||||
const pages = {};
|
||||
const errors = [];
|
||||
|
||||
const inDirAbs = path.isAbsolute(inDir) ? inDir : path.join(CWD, inDir);
|
||||
const outAbs = path.isAbsolute(outFile) ? outFile : path.join(CWD, outFile);
|
||||
await fs.mkdir(DIST_DIR, { recursive: true });
|
||||
|
||||
// antifragile
|
||||
if (!(await exists(inDirAbs))) {
|
||||
console.log(`ℹ️ annotations-index: skip (input missing): ${inDir}`);
|
||||
process.exit(0);
|
||||
}
|
||||
const files = await walk(ANNO_ROOT);
|
||||
|
||||
const files = (await walk(inDirAbs)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
if (!files.length) {
|
||||
console.log(`ℹ️ annotations-index: skip (no .yml/.yaml/.json found in): ${inDir}`);
|
||||
process.exit(0);
|
||||
}
|
||||
for (const fp of files) {
|
||||
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||
const { isShard, pageKey, paraId } = inferExpectedFromRel(relNoExt);
|
||||
|
||||
const pages = Object.create(null);
|
||||
let paraCount = 0;
|
||||
|
||||
for (const f of files) {
|
||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||
const pageKey = normalizePageKey(inferPageKeyFromFile(inDirAbs, f));
|
||||
assert(pageKey, `${fileRel}: cannot infer page key`);
|
||||
|
||||
let doc;
|
||||
try {
|
||||
doc = await readDoc(f);
|
||||
const raw = await fs.readFile(fp, "utf8");
|
||||
const doc = YAML.parse(raw) || {};
|
||||
|
||||
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||
|
||||
validateAndNormalizeDoc(
|
||||
doc,
|
||||
`src/annotations/${rel}`,
|
||||
pageKey,
|
||||
isShard ? paraId : null
|
||||
);
|
||||
|
||||
const pg = (pages[pageKey] ??= { paras: {} });
|
||||
|
||||
if (isShard) {
|
||||
const entry = doc.paras[paraId];
|
||||
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||
|
||||
stableSortByTs(pg.paras[paraId].media);
|
||||
stableSortByTs(pg.paras[paraId].refs);
|
||||
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||
} else {
|
||||
for (const [pid, entry] of Object.entries(doc.paras || {})) {
|
||||
const p = String(pid);
|
||||
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||
|
||||
stableSortByTs(pg.paras[p].media);
|
||||
stableSortByTs(pg.paras[p].refs);
|
||||
stableSortByTs(pg.paras[p].comments_editorial);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
throw new Error(`${fileRel}: parse failed: ${String(e?.message ?? e)}`);
|
||||
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||
}
|
||||
}
|
||||
|
||||
const paras = validateAndNormalizeDoc(doc, pageKey, fileRel);
|
||||
|
||||
// 1 fichier = 1 page (canon)
|
||||
assert(!pages[pageKey], `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
||||
pages[pageKey] = { paras };
|
||||
paraCount += Object.keys(paras).length;
|
||||
for (const [pageKey, pg] of Object.entries(pages)) {
|
||||
const keys = Object.keys(pg.paras || {});
|
||||
keys.sort((a, b) => {
|
||||
const ia = paraNum(a);
|
||||
const ib = paraNum(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
const next = {};
|
||||
for (const k of keys) next[k] = pg.paras[k];
|
||||
pg.paras = next;
|
||||
}
|
||||
|
||||
const out = {
|
||||
@@ -143,17 +226,21 @@ async function main() {
|
||||
pages,
|
||||
stats: {
|
||||
pages: Object.keys(pages).length,
|
||||
paras: paraCount,
|
||||
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||
errors: errors.length,
|
||||
},
|
||||
errors,
|
||||
};
|
||||
|
||||
await fs.mkdir(path.dirname(outAbs), { recursive: true });
|
||||
await fs.writeFile(outAbs, JSON.stringify(out), "utf8");
|
||||
if (errors.length) {
|
||||
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||
}
|
||||
|
||||
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> ${path.relative(CWD, outAbs)}`);
|
||||
await fs.writeFile(OUT, JSON.stringify(out), "utf8");
|
||||
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> dist/annotations-index.json`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("FAIL: build-annotations-index crashed:", e);
|
||||
console.error(`FAIL: build-annotations-index crashed: ${e?.stack || e?.message || e}`);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
@@ -48,6 +48,9 @@ async function main() {
|
||||
let missing = 0;
|
||||
const notes = [];
|
||||
|
||||
// Optim: éviter de vérifier 100 fois le même fichier media
|
||||
const seenMedia = new Set(); // src string
|
||||
|
||||
for (const f of files) {
|
||||
const rel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||
const raw = await fs.readFile(f, "utf8");
|
||||
@@ -70,6 +73,10 @@ async function main() {
|
||||
const src = String(m?.src || "");
|
||||
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
|
||||
|
||||
// dédupe
|
||||
if (seenMedia.has(src)) continue;
|
||||
seenMedia.add(src);
|
||||
|
||||
checked++;
|
||||
const p = toPublicPathFromUrl(src);
|
||||
if (!p) continue;
|
||||
@@ -94,4 +101,4 @@ async function main() {
|
||||
main().catch((e) => {
|
||||
console.error("FAIL: check-annotations-media crashed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
@@ -27,11 +27,6 @@ function escRe(s) {
|
||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function inferPageKeyFromFile(fileAbs) {
|
||||
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
}
|
||||
|
||||
function normalizePageKey(s) {
|
||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
}
|
||||
@@ -40,6 +35,31 @@ function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function isParaId(s) {
|
||||
return /^p-\d+-/i.test(String(s || ""));
|
||||
}
|
||||
|
||||
/**
|
||||
* Supporte:
|
||||
* - monolith: src/annotations/<pageKey>.yml -> pageKey = rel sans ext
|
||||
* - shard : src/annotations/<pageKey>/<paraId>.yml -> pageKey = dirname(rel), paraId = basename
|
||||
*
|
||||
* shard seulement si le fichier est dans un sous-dossier (anti cas pathologique).
|
||||
*/
|
||||
function inferFromFile(fileAbs) {
|
||||
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
||||
const relNoExt = rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const base = parts[parts.length - 1] || "";
|
||||
const dirParts = parts.slice(0, -1);
|
||||
|
||||
const isShard = dirParts.length > 0 && isParaId(base);
|
||||
const pageKey = isShard ? dirParts.join("/") : relNoExt;
|
||||
const paraId = isShard ? base : "";
|
||||
|
||||
return { pageKey: normalizePageKey(pageKey), paraId };
|
||||
}
|
||||
|
||||
async function loadAliases() {
|
||||
if (!(await exists(ALIASES_PATH))) return {};
|
||||
try {
|
||||
@@ -83,7 +103,11 @@ async function main() {
|
||||
const aliases = await loadAliases();
|
||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
|
||||
let pages = 0;
|
||||
// perf: cache HTML par page (shards = beaucoup de fichiers pour 1 page)
|
||||
const htmlCache = new Map(); // pageKey -> html
|
||||
const missingDistPage = new Set(); // pageKey
|
||||
|
||||
let pagesSeen = new Set();
|
||||
let checked = 0;
|
||||
let failures = 0;
|
||||
const notes = [];
|
||||
@@ -107,7 +131,7 @@ async function main() {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pageKey = normalizePageKey(inferPageKeyFromFile(f));
|
||||
const { pageKey, paraId: shardParaId } = inferFromFile(f);
|
||||
|
||||
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
|
||||
failures++;
|
||||
@@ -121,20 +145,44 @@ async function main() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// shard invariant (fort) : doit contenir paras[paraId]
|
||||
if (shardParaId) {
|
||||
if (!Object.prototype.hasOwnProperty.call(doc.paras, shardParaId)) {
|
||||
failures++;
|
||||
notes.push(`- SHARD MISMATCH: ${rel} (expected paras["${shardParaId}"] present)`);
|
||||
continue;
|
||||
}
|
||||
// si extras -> warning (non destructif)
|
||||
const keys = Object.keys(doc.paras);
|
||||
if (!(keys.length === 1 && keys[0] === shardParaId)) {
|
||||
notes.push(`- WARN shard has extra paras: ${rel} (expected only "${shardParaId}", got ${keys.join(", ")})`);
|
||||
}
|
||||
}
|
||||
|
||||
pagesSeen.add(pageKey);
|
||||
|
||||
const distFile = path.join(DIST_DIR, pageKey, "index.html");
|
||||
if (!(await exists(distFile))) {
|
||||
failures++;
|
||||
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
||||
if (!missingDistPage.has(pageKey)) {
|
||||
missingDistPage.add(pageKey);
|
||||
failures++;
|
||||
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
||||
} else {
|
||||
notes.push(`- WARN missing page already reported: dist/${pageKey}/index.html (from ${rel})`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
pages++;
|
||||
const html = await fs.readFile(distFile, "utf8");
|
||||
let html = htmlCache.get(pageKey);
|
||||
if (!html) {
|
||||
html = await fs.readFile(distFile, "utf8");
|
||||
htmlCache.set(pageKey, html);
|
||||
}
|
||||
|
||||
for (const paraId of Object.keys(doc.paras)) {
|
||||
checked++;
|
||||
|
||||
if (!/^p-\d+-/i.test(paraId)) {
|
||||
if (!isParaId(paraId)) {
|
||||
failures++;
|
||||
notes.push(`- INVALID ID: ${rel} (${paraId})`);
|
||||
continue;
|
||||
@@ -158,6 +206,7 @@ async function main() {
|
||||
}
|
||||
|
||||
const warns = notes.filter((x) => x.startsWith("- WARN"));
|
||||
const pages = pagesSeen.size;
|
||||
|
||||
if (failures > 0) {
|
||||
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
|
||||
@@ -172,4 +221,4 @@ async function main() {
|
||||
main().catch((e) => {
|
||||
console.error("FAIL: annotations check crashed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
10
src/annotations/archicrat-ia/chapitre-1/p-0-8d27a7f5.yml
Normal file
10
src/annotations/archicrat-ia/chapitre-1/p-0-8d27a7f5.yml
Normal file
@@ -0,0 +1,10 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-1
|
||||
paras:
|
||||
p-0-8d27a7f5:
|
||||
refs:
|
||||
- url: https://auth.archicratie.trans-hands.synology.me/authenticated
|
||||
label: Lien web
|
||||
kind: (livre / article / vidéo / site / autre) Site
|
||||
ts: 2026-02-27T12:34:31.704Z
|
||||
fromIssue: 142
|
||||
9
src/annotations/archicrat-ia/chapitre-1/p-1-8a6c18bf.yml
Normal file
9
src/annotations/archicrat-ia/chapitre-1/p-1-8a6c18bf.yml
Normal file
@@ -0,0 +1,9 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-1
|
||||
paras:
|
||||
p-1-8a6c18bf:
|
||||
comments_editorial:
|
||||
- text: Yeaha
|
||||
status: new
|
||||
ts: 2026-02-27T12:40:39.462Z
|
||||
fromIssue: 143
|
||||
12
src/annotations/archicrat-ia/chapitre-3/p-0-ace27175.yml
Normal file
12
src/annotations/archicrat-ia/chapitre-3/p-0-ace27175.yml
Normal file
@@ -0,0 +1,12 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-3
|
||||
paras:
|
||||
p-0-ace27175:
|
||||
media:
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-3/p-0-ace27175/Capture_d_e_cran_2025-05-05_a_19.20.40.png
|
||||
caption: "[Media] p-0-ace27175 — Chapitre 3 — Philosophies du pouvoir et
|
||||
archicration"
|
||||
credit: ""
|
||||
ts: 2026-02-27T12:43:14.259Z
|
||||
fromIssue: 144
|
||||
30
src/annotations/archicrat-ia/chapitre-4.yml
Normal file
30
src/annotations/archicrat-ia/chapitre-4.yml
Normal file
@@ -0,0 +1,30 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-4
|
||||
paras:
|
||||
p-2-31b12529:
|
||||
media:
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-4/p-2-31b12529/Capture_d_e_cran_2026-02-16_a_13.05.58.png
|
||||
caption: "[Media] p-2-31b12529 — Chapitre 4 — Histoire archicratique des
|
||||
révolutions industrielles"
|
||||
credit: ""
|
||||
ts: 2026-02-25T18:58:32.359Z
|
||||
fromIssue: 115
|
||||
p-7-1da4a458:
|
||||
media:
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-4/p-7-1da4a458/Capture_d_e_cran_2026-02-16_a_13.05.58.png
|
||||
caption: "[Media] p-7-1da4a458 — Chapitre 4 — Histoire archicratique des
|
||||
révolutions industrielles"
|
||||
credit: ""
|
||||
ts: 2026-02-25T19:11:32.634Z
|
||||
fromIssue: 121
|
||||
p-11-67c14c09:
|
||||
media:
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2026-02-16_a_13.07.35.png
|
||||
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||
révolutions industrielles"
|
||||
credit: ""
|
||||
ts: 2026-02-26T13:17:41.286Z
|
||||
fromIssue: 129
|
||||
19
src/annotations/archicrat-ia/chapitre-4/p-11-67c14c09.yml
Normal file
19
src/annotations/archicrat-ia/chapitre-4/p-11-67c14c09.yml
Normal file
@@ -0,0 +1,19 @@
|
||||
schema: 1
|
||||
page: archicrat-ia/chapitre-4
|
||||
paras:
|
||||
p-11-67c14c09:
|
||||
media:
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2026-02-16_a_13.07.35.png
|
||||
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||
révolutions industrielles"
|
||||
credit: ""
|
||||
ts: 2026-02-26T13:17:41.286Z
|
||||
fromIssue: 129
|
||||
- type: image
|
||||
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2025-05-05_a_19.20.40.png
|
||||
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||
révolutions industrielles"
|
||||
credit: ""
|
||||
ts: 2026-02-27T09:17:04.386Z
|
||||
fromIssue: 127
|
||||
@@ -1,23 +1,80 @@
|
||||
// src/pages/annotations-index.json.ts
|
||||
import type { APIRoute } from "astro";
|
||||
import * as fs from "node:fs/promises";
|
||||
import * as path from "node:path";
|
||||
import { parse as parseYAML } from "yaml";
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import YAML from "yaml";
|
||||
|
||||
const CWD = process.cwd();
|
||||
const ANNO_DIR = path.join(CWD, "src", "annotations");
|
||||
const ANNO_ROOT = path.join(CWD, "src", "annotations");
|
||||
|
||||
// Strict en CI (ou override explicite)
|
||||
const STRICT =
|
||||
process.env.ANNOTATIONS_STRICT === "1" ||
|
||||
process.env.CI === "1" ||
|
||||
process.env.CI === "true";
|
||||
const isObj = (x: any) => !!x && typeof x === "object" && !Array.isArray(x);
|
||||
const isArr = (x: any) => Array.isArray(x);
|
||||
|
||||
async function exists(p: string): Promise<boolean> {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
function normPath(s: string) {
|
||||
return String(s || "").replace(/\\/g, "/").replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
function paraNum(pid: string) {
|
||||
const m = String(pid).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||
}
|
||||
function toIso(v: any) {
|
||||
if (v instanceof Date) return v.toISOString();
|
||||
return typeof v === "string" ? v : "";
|
||||
}
|
||||
function stableSortByTs(arr: any[]) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(toIso(a?.ts)) || 0;
|
||||
const tb = Date.parse(toIso(b?.ts)) || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function keyMedia(x: any) { return String(x?.src || ""); }
|
||||
function keyRef(x: any) {
|
||||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||
}
|
||||
function keyComment(x: any) { return String(x?.text || "").trim(); }
|
||||
|
||||
function uniqUnion(dst: any[], src: any[], keyFn: (x:any)=>string) {
|
||||
const out = isArr(dst) ? [...dst] : [];
|
||||
const seen = new Set(out.map((x) => keyFn(x)));
|
||||
for (const it of (isArr(src) ? src : [])) {
|
||||
const k = keyFn(it);
|
||||
if (!k) continue;
|
||||
if (!seen.has(k)) { seen.add(k); out.push(it); }
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function deepMergeEntry(dst: any, src: any) {
|
||||
if (!isObj(dst) || !isObj(src)) return;
|
||||
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||
|
||||
if (isObj(v)) {
|
||||
if (!isObj((dst as any)[k])) (dst as any)[k] = {};
|
||||
deepMergeEntry((dst as any)[k], v);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isArr(v)) {
|
||||
const cur = isArr((dst as any)[k]) ? (dst as any)[k] : [];
|
||||
const seen = new Set(cur.map((x:any) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
for (const it of v) {
|
||||
const s = JSON.stringify(it);
|
||||
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||
}
|
||||
(dst as any)[k] = out;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!(k in (dst as any)) || (dst as any)[k] == null || (dst as any)[k] === "") (dst as any)[k] = v;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -26,154 +83,98 @@ async function walk(dir: string): Promise<string[]> {
|
||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const e of ents) {
|
||||
const p = path.join(dir, e.name);
|
||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
||||
else out.push(p);
|
||||
if (e.isDirectory()) out.push(...await walk(p));
|
||||
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function isPlainObject(x: unknown): x is Record<string, unknown> {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function normalizePageKey(s: unknown): string {
|
||||
return String(s ?? "")
|
||||
.replace(/^\/+/, "")
|
||||
.replace(/\/+$/, "")
|
||||
.trim();
|
||||
}
|
||||
|
||||
function inferPageKeyFromFile(inDirAbs: string, fileAbs: string): string {
|
||||
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
}
|
||||
|
||||
function parseDoc(raw: string, fileAbs: string): unknown {
|
||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
||||
return parseYAML(raw);
|
||||
}
|
||||
|
||||
function hardFailOrCollect(errors: string[], msg: string): void {
|
||||
if (STRICT) throw new Error(msg);
|
||||
errors.push(msg);
|
||||
}
|
||||
|
||||
function sanitizeEntry(
|
||||
fileRel: string,
|
||||
paraId: string,
|
||||
entry: unknown,
|
||||
errors: string[]
|
||||
): Record<string, unknown> {
|
||||
if (entry == null) return {};
|
||||
|
||||
if (!isPlainObject(entry)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: paras.${paraId} must be an object`);
|
||||
return {};
|
||||
}
|
||||
|
||||
const e: Record<string, unknown> = { ...entry };
|
||||
|
||||
const arrayFields = [
|
||||
"refs",
|
||||
"authors",
|
||||
"quotes",
|
||||
"media",
|
||||
"comments_editorial",
|
||||
] as const;
|
||||
|
||||
for (const k of arrayFields) {
|
||||
if (e[k] == null) continue;
|
||||
if (!Array.isArray(e[k])) {
|
||||
errors.push(`${fileRel}: paras.${paraId}.${k} must be an array (coerced to [])`);
|
||||
e[k] = [];
|
||||
}
|
||||
}
|
||||
|
||||
return e;
|
||||
function inferExpected(relNoExt: string) {
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const last = parts.at(-1) || "";
|
||||
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||
const paraId = isShard ? last : null;
|
||||
return { isShard, pageKey, paraId };
|
||||
}
|
||||
|
||||
export const GET: APIRoute = async () => {
|
||||
if (!(await exists(ANNO_DIR))) {
|
||||
const out = {
|
||||
schema: 1,
|
||||
generatedAt: new Date().toISOString(),
|
||||
pages: {},
|
||||
stats: { pages: 0, paras: 0, errors: 0 },
|
||||
errors: [] as string[],
|
||||
};
|
||||
const pages: Record<string, { paras: Record<string, any> }> = {};
|
||||
const errors: Array<{ file: string; error: string }> = [];
|
||||
|
||||
return new Response(JSON.stringify(out), {
|
||||
headers: {
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Cache-Control": "no-store",
|
||||
},
|
||||
});
|
||||
let files: string[] = [];
|
||||
try {
|
||||
files = await walk(ANNO_ROOT);
|
||||
} catch (e: any) {
|
||||
throw new Error(`Missing annotations root: ${ANNO_ROOT} (${e?.message || e})`);
|
||||
}
|
||||
|
||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
for (const fp of files) {
|
||||
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||
const { isShard, pageKey, paraId } = inferExpected(relNoExt);
|
||||
|
||||
const pages: Record<string, { paras: Record<string, Record<string, unknown>> }> =
|
||||
Object.create(null);
|
||||
|
||||
const errors: string[] = [];
|
||||
let paraCount = 0;
|
||||
|
||||
for (const f of files) {
|
||||
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||
const pageKey = normalizePageKey(inferPageKeyFromFile(ANNO_DIR, f));
|
||||
|
||||
if (!pageKey) {
|
||||
hardFailOrCollect(errors, `${fileRel}: cannot infer page key`);
|
||||
continue;
|
||||
}
|
||||
|
||||
let doc: unknown;
|
||||
try {
|
||||
const raw = await fs.readFile(f, "utf8");
|
||||
doc = parseDoc(raw, f);
|
||||
} catch (e) {
|
||||
hardFailOrCollect(errors, `${fileRel}: parse failed: ${String((e as any)?.message ?? e)}`);
|
||||
continue;
|
||||
}
|
||||
const raw = await fs.readFile(fp, "utf8");
|
||||
const doc = YAML.parse(raw) || {};
|
||||
|
||||
if (!isPlainObject(doc) || (doc as any).schema !== 1) {
|
||||
hardFailOrCollect(errors, `${fileRel}: schema must be 1`);
|
||||
continue;
|
||||
}
|
||||
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||
|
||||
if ((doc as any).page != null) {
|
||||
const declared = normalizePageKey((doc as any).page);
|
||||
if (declared !== pageKey) {
|
||||
hardFailOrCollect(
|
||||
errors,
|
||||
`${fileRel}: page mismatch (page="${declared}" vs path="${pageKey}")`
|
||||
);
|
||||
const docPage = normPath(doc.page || "");
|
||||
if (docPage && docPage !== pageKey) {
|
||||
throw new Error(`page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
||||
}
|
||||
}
|
||||
if (!doc.page) doc.page = pageKey;
|
||||
|
||||
const parasAny = (doc as any).paras;
|
||||
if (!isPlainObject(parasAny)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: missing object key "paras"`);
|
||||
continue;
|
||||
}
|
||||
if (!isObj(doc.paras)) throw new Error(`missing object key "paras"`);
|
||||
|
||||
if (pages[pageKey]) {
|
||||
hardFailOrCollect(errors, `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
||||
continue;
|
||||
}
|
||||
const pg = pages[pageKey] ??= { paras: {} };
|
||||
|
||||
const parasOut: Record<string, Record<string, unknown>> = Object.create(null);
|
||||
if (isShard) {
|
||||
if (!paraId) throw new Error("internal: missing paraId");
|
||||
if (!(paraId in doc.paras)) {
|
||||
throw new Error(`shard mismatch: file must contain paras["${paraId}"]`);
|
||||
}
|
||||
// ✅ invariant aligné avec build-annotations-index
|
||||
const keys = Object.keys(doc.paras).map(String);
|
||||
if (!(keys.length === 1 && keys[0] === paraId)) {
|
||||
throw new Error(`shard invariant violated: shard must contain ONLY paras["${paraId}"] (got: ${keys.join(", ")})`);
|
||||
}
|
||||
|
||||
for (const [paraId, entry] of Object.entries(parasAny)) {
|
||||
if (!/^p-\d+-/i.test(paraId)) {
|
||||
hardFailOrCollect(errors, `${fileRel}: invalid para id "${paraId}"`);
|
||||
continue;
|
||||
const entry = doc.paras[paraId];
|
||||
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||
|
||||
stableSortByTs(pg.paras[paraId].media);
|
||||
stableSortByTs(pg.paras[paraId].refs);
|
||||
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||
} else {
|
||||
for (const [pid, entry] of Object.entries(doc.paras)) {
|
||||
const p = String(pid);
|
||||
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||
|
||||
stableSortByTs(pg.paras[p].media);
|
||||
stableSortByTs(pg.paras[p].refs);
|
||||
stableSortByTs(pg.paras[p].comments_editorial);
|
||||
}
|
||||
}
|
||||
parasOut[paraId] = sanitizeEntry(fileRel, paraId, entry, errors);
|
||||
} catch (e: any) {
|
||||
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||
}
|
||||
}
|
||||
|
||||
pages[pageKey] = { paras: parasOut };
|
||||
paraCount += Object.keys(parasOut).length;
|
||||
for (const [pk, pg] of Object.entries(pages)) {
|
||||
const keys = Object.keys(pg.paras || {});
|
||||
keys.sort((a, b) => {
|
||||
const ia = paraNum(a);
|
||||
const ib = paraNum(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
const next: Record<string, any> = {};
|
||||
for (const k of keys) next[k] = pg.paras[k];
|
||||
pg.paras = next;
|
||||
}
|
||||
|
||||
const out = {
|
||||
@@ -182,16 +183,17 @@ export const GET: APIRoute = async () => {
|
||||
pages,
|
||||
stats: {
|
||||
pages: Object.keys(pages).length,
|
||||
paras: paraCount,
|
||||
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||
errors: errors.length,
|
||||
},
|
||||
errors,
|
||||
};
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||
}
|
||||
|
||||
return new Response(JSON.stringify(out), {
|
||||
headers: {
|
||||
"Content-Type": "application/json; charset=utf-8",
|
||||
"Cache-Control": "no-store",
|
||||
},
|
||||
headers: { "Content-Type": "application/json; charset=utf-8" },
|
||||
});
|
||||
};
|
||||
};
|
||||
Reference in New Issue
Block a user