Compare commits
379 Commits
chore/url-
...
chore/docs
| Author | SHA1 | Date | |
|---|---|---|---|
| 2467b334db | |||
| 0015b384ce | |||
| 25ce0409aa | |||
| eb358fb7f8 | |||
| afa4d84997 | |||
| bf0683bbb2 | |||
| c486a5c5eb | |||
| 82e0d5ba78 | |||
| 7910a3964a | |||
| d4df080f1d | |||
| d0d5e03afb | |||
| c3065e7116 | |||
| bfe8b3b45a | |||
| a5263d65ec | |||
| 9e01821278 | |||
| 51677811ab | |||
| dde3fc9a32 | |||
| c3bdde8f58 | |||
| 5858638134 | |||
| 1bec8ae9ce | |||
| abcba413f5 | |||
| 6ed2cd4284 | |||
| 404a45caa1 | |||
| c5ff82f58d | |||
| 894156c540 | |||
| 63e24020c7 | |||
| ec248e9f72 | |||
| 47836c8de2 | |||
| a66aa24b53 | |||
| 84164d0f2a | |||
| 242aeac07a | |||
| 1bdaf3e986 | |||
| 7583f89384 | |||
| 03742db4e1 | |||
| 9a922ffed3 | |||
| 02a7ea403d | |||
| 63feddb01c | |||
| 36bb47f9c6 | |||
| b2ca1f17a4 | |||
| ad545b52af | |||
| 467c07232e | |||
| 531576452d | |||
| 4235ad85b0 | |||
| fd5c979339 | |||
| 7e8c94df6e | |||
| 6b2fd25d23 | |||
| f1c5bb0d26 | |||
| 2e4bc8f583 | |||
| 8a14ea1d7a | |||
| 9f88112aca | |||
| 689619d14d | |||
| 64e56e8abc | |||
| 8605b7198f | |||
| d41aed040f | |||
| bf01a83268 | |||
| 5b427d5602 | |||
| fa46971e76 | |||
| c313587b26 | |||
| 4976ddcc16 | |||
| 17e11f0322 | |||
| 7df18adfa8 | |||
| 535c5108e2 | |||
| 20705f6c90 | |||
| eabd2f5f29 | |||
| 482151c31c | |||
| 6d9d5a460e | |||
| 89d06ade16 | |||
| 69b35df10c | |||
| b5475e9be1 | |||
| fdd3aace5a | |||
| f86704d67e | |||
| ec8e29a313 | |||
| 1dc9a60580 | |||
| ee18b26d03 | |||
| 5f4a0f74db | |||
| 6b17df7320 | |||
| 0c33495342 | |||
| d8a09b1def | |||
| 39af501ea0 | |||
| 4c821d9e83 | |||
| deb4a91348 | |||
| 5b36b8e54e | |||
| eda5a877ef | |||
| 5b615a6999 | |||
| 99cf0947da | |||
| dbd1e14e4e | |||
| 7033354011 | |||
| 7345730e3c | |||
| cea94c56db | |||
| c1e24736e3 | |||
| 24bbfbc17f | |||
| a11e2f1d18 | |||
| 630b146d02 | |||
| 551360db83 | |||
| a96c282780 | |||
| d2e0f147c2 | |||
| ad95364021 | |||
| e48e322363 | |||
| a9f2a5bbd4 | |||
| 0cba8f868e | |||
| f8e3ee4cca | |||
| 92e0ad01c6 | |||
| e6c18d6b16 | |||
| a3092f5d5b | |||
| 7187b69935 | |||
| 4ba4453661 | |||
| ee42e391e3 | |||
| f7756be59e | |||
| 4abe70e10e | |||
| b2b4ec35c0 | |||
| b255436958 | |||
| ad06b34a85 | |||
| a38f585f3d | |||
| bf0dc125d1 | |||
| f61dc15b47 | |||
| 1ac3d91a19 | |||
| 100ba10409 | |||
| 5f14785abb | |||
| c7043ae9d5 | |||
| bd1235f8c3 | |||
| 7ae7b4dca3 | |||
| f088db57d4 | |||
| 311e94ed91 | |||
| e078f3f9ab | |||
| 7c4bb5a2cf | |||
| 214e174635 | |||
| f1b2f4605f | |||
| 87955adf5d | |||
| e39a0c547d | |||
| c89ddf7237 | |||
| 615effe8bf | |||
| e952b344a0 | |||
| bb0572cc1a | |||
| f6a2347278 | |||
| d902c2bf98 | |||
| baa2082f51 | |||
| 2f249b420f | |||
| d6b4eb82f4 | |||
| bfa44fecda | |||
| e329235aa9 | |||
| 8cbaa5117c | |||
| 3086f333ed | |||
| c1c3c19d13 | |||
| ddcd0acd4d | |||
| 9bc4eeb3e7 | |||
| 7a9a5319ac | |||
| 7d75de5c9f | |||
| 69c91cb661 | |||
| a1bfbf4405 | |||
|
|
be26b425d8 | ||
|
|
abf88e7037 | ||
| 04fee32fdb | |||
|
|
fbddf5c3fc | ||
|
|
bad748df3a | ||
| 0066cf8601 | |||
| 5d3473d66c | |||
| f9d34110e4 | |||
|
|
84e9c3ead4 | ||
|
|
72e59175fc | ||
| 81b69ac6d5 | |||
| 513ae72e85 | |||
| 4c4dd1c515 | |||
| 46b15ed6ab | |||
| a015e72f7c | |||
|
|
d5df7d77a0 | ||
| ec3ceee862 | |||
| 867475c3ff | |||
| b024c5557c | |||
| 93306f360d | |||
| 52847d999d | |||
| b9629b43ff | |||
| 06482a9f8d | |||
| f2e4ae5ac2 | |||
| 71baf0f6da | |||
| d02b6fc347 | |||
| 431f1e347b | |||
| ab6f45ed5c | |||
| 02c060d239 | |||
| be2029de82 | |||
| e148eaeaf3 | |||
| c63a1e6ce4 | |||
| b3a73a7781 | |||
| 1968585d0f | |||
| b33c758411 | |||
| afa543125c | |||
|
|
0d0252cac0 | ||
|
|
a8bd9aeed5 | ||
| d277c61afd | |||
|
|
86479952d1 | ||
| c94024a8ae | |||
| 70611d16f8 | |||
| 354db231b8 | |||
| 9d8d60d00f | |||
| f5d25abbec | |||
| 8e9f7314f5 | |||
| 03b88b944d | |||
| 385c36f660 | |||
| cfa092cd38 | |||
| 1a762f8f54 | |||
| fbdaf72775 | |||
| 67128a9ca1 | |||
| 898759db3d | |||
| 4f009a9557 | |||
| 378d0981f0 | |||
| 8f3702f803 | |||
| cfd303fc85 | |||
| 0fc0976f8a | |||
| e247ea8ead | |||
| 0c57c4bc6d | |||
| 9b7998e1c3 | |||
| 8997a00413 | |||
| a2e6f6185f | |||
| c2715b01d7 | |||
| 6f09dfcd12 | |||
| bb9f55a3b5 | |||
| 298ee7492c | |||
| 37cb836246 | |||
| 19e3318125 | |||
| 683b02f4a0 | |||
| 20aecc30b1 | |||
| daf57aa152 | |||
| bfd693de92 | |||
| ea2ad0017b | |||
| 82e7473cac | |||
| 315523e80f | |||
| 569b6de154 | |||
| 95f8159554 | |||
|
|
5698c494f1 | ||
| e640e66b8d | |||
|
|
9be7d170c6 | ||
| c2c98c516b | |||
| 32554f5998 | |||
| 308f4f92bc | |||
| 4dfd3b026b | |||
| c93f274f41 | |||
| dfa311fb5b | |||
| 3ef1dc2801 | |||
| 435e41ed4d | |||
| 8825932159 | |||
| b55decbea4 | |||
| 414a848db3 | |||
| cbd4f3a57f | |||
| 49f8d6a95e | |||
| 5afa5cbfda | |||
| a1b1df38ba | |||
| d3f7d74da7 | |||
| 6919190107 | |||
| 021ef5abd7 | |||
| 76cdc85f9c | |||
| f2f6df2127 | |||
| dfe13757f7 | |||
| 148ac997df | |||
| 84492d2741 | |||
| 81baadd57f | |||
| 63d0ffc5fc | |||
| 24143fc2c4 | |||
| 55370b704f | |||
| b8a3ce1337 | |||
| 7f9baedf41 | |||
| 1adbe1c7a3 | |||
| 107a26352f | |||
| 1c2b9ddbb6 | |||
| be99460d4d | |||
| 9e1b704aa6 | |||
| 941fbf5845 | |||
| 0b4a31a432 | |||
| c617dc3979 | |||
| 1b95161de0 | |||
| ebd976bd46 | |||
| f8d57d8fe0 | |||
| 09a4d2c472 | |||
| 1f6dc874d0 | |||
| 4dd63945ee | |||
| ba64b0694b | |||
| 58e5ceda59 | |||
| 08f826ee01 | |||
| 3358d280ec | |||
| 9cb0d5e416 | |||
| a46f058917 | |||
| 604b2199da | |||
| d153f71be6 | |||
| 8f64e4b098 | |||
| 459bf195d8 | |||
| 0c46b0d19b | |||
| bfbdc7b688 | |||
| 8fd53dd4d2 | |||
|
|
c8bbee4f74 | ||
| 04cdf54eb7 | |||
|
|
d6bf645ae9 | ||
| 1ca6bcbd81 | |||
| dec5f8eba7 | |||
| 716c887045 | |||
| 9b1789a164 | |||
| 17fa39c7ff | |||
| 8132e315f4 | |||
| 8d993915d7 | |||
| 497bddd05d | |||
| 7c8e49c1a9 | |||
| 901d28b89b | |||
| 43e2862c89 | |||
| 73fb38c4d1 | |||
| a81d206aba | |||
| 9801ea3cea | |||
| c11189fe11 | |||
| b47edb24cf | |||
| be191b09a0 | |||
| e06587478d | |||
| 402ffb04cd | |||
| 1cbfc02670 | |||
| 28d2fbbd2f | |||
| 225368a952 | |||
| 3574695041 | |||
| ea68025a1d | |||
| 3a08698003 | |||
| 3d583608c2 | |||
|
|
01ae95ab43 | ||
|
|
0d5821c640 | ||
|
|
2bcea39558 | ||
| af85970d4a | |||
| 210f621487 | |||
| 8ad960dc69 | |||
| d45a8b285f | |||
| b6e04a9138 | |||
| dcf1fc2d0b | |||
| 41b0517c6c | |||
| 6b43eb199d | |||
| d40f24e92d | |||
| 480a61b071 | |||
| a5d68d6a7e | |||
| 390f2c33e5 | |||
| 16485dc4a9 | |||
| a43ce5f188 | |||
| 0519ae2dd0 | |||
| 0d5b790e52 | |||
| 342e21b9ea | |||
| 4dec9e182b | |||
| c7ae883c6a | |||
| 9b4584f70a | |||
| 7b64fb7401 | |||
|
|
57cb23ce8b | ||
| 708b87ff35 | |||
| 577cfd08e8 | |||
| de9edbe532 | |||
| 5e95dc9898 | |||
| 006fec7efd | |||
| 2b612214bb | |||
| 29a6c349aa | |||
|
|
33a227c401 | ||
| 396ad4df7c | |||
|
|
0b39427090 | ||
| 8fcb18cb46 | |||
| d03fc519de | |||
| 97dd3797d6 | |||
| 6c7b7ab6a0 | |||
| 105dfe1b5b | |||
| 82f6453538 | |||
| fe862102d3 | |||
| 6ef538a0c4 | |||
| 689612ff7f | |||
| 7b135a4707 | |||
| 0cb8a54195 | |||
| a7a333397d | |||
| eb1d444776 | |||
| 68c3416594 | |||
| ae809e0152 | |||
| 7444eeb532 | |||
| 9bbebf5886 | |||
| fe7810671d | |||
| 53562025ac | |||
| 2b35315466 | |||
| 1b7f23d0a6 | |||
| 3d1d4d7952 | |||
| 3320563e1b | |||
| 798b2ddd0b | |||
| 31d4896f5d | |||
| 3fda37491d | |||
| 488c02b8b5 | |||
| f9ea3760e2 | |||
| 00e1a1d4b0 |
@@ -3,7 +3,7 @@ name: "Correction paragraphe"
|
||||
about: "Proposer une correction ciblée (un paragraphe) avec justification."
|
||||
---
|
||||
|
||||
## Chemin (ex: /archicratie/prologue/)
|
||||
## Chemin (ex: /archicrat-ia/prologue/)
|
||||
<!-- obligatoire -->
|
||||
/...
|
||||
|
||||
|
||||
@@ -3,7 +3,7 @@ name: "Vérification factuelle / sources"
|
||||
about: "Signaler une assertion à sourcer ou à corriger (preuves, références)."
|
||||
---
|
||||
|
||||
## Chemin (ex: /archicratie/prologue/)
|
||||
## Chemin (ex: /archicrat-ia/prologue/)
|
||||
<!-- obligatoire -->
|
||||
/...
|
||||
|
||||
|
||||
450
.gitea/workflows/anno-apply-pr.yml
Normal file
@@ -0,0 +1,450 @@
|
||||
name: Anno Apply (PR)
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue:
|
||||
description: "Issue number to apply"
|
||||
required: true
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: anno-apply-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
apply-approved:
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Tools sanity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git --version
|
||||
node --version
|
||||
npm --version
|
||||
|
||||
- name: Derive context (event.json / workflow_dispatch)
|
||||
env:
|
||||
INPUT_ISSUE: ${{ inputs.issue }}
|
||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||
test -f "$EVENT_JSON" || { echo "Missing $EVENT_JSON"; exit 1; }
|
||||
|
||||
node --input-type=module - <<'NODE' > /tmp/anno.env
|
||||
import fs from "node:fs";
|
||||
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
const repoObj = ev?.repository || {};
|
||||
|
||||
const cloneUrl =
|
||||
repoObj?.clone_url ||
|
||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||
|
||||
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||
|
||||
let owner =
|
||||
repoObj?.owner?.login ||
|
||||
repoObj?.owner?.username ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||
|
||||
let repo =
|
||||
repoObj?.name ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||
|
||||
if (!owner || !repo) {
|
||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||
if (m?.groups) {
|
||||
owner = owner || m.groups.o;
|
||||
repo = repo || m.groups.r;
|
||||
}
|
||||
}
|
||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||
|
||||
const defaultBranch = repoObj?.default_branch || "main";
|
||||
|
||||
const issueNumber =
|
||||
ev?.issue?.number ||
|
||||
ev?.issue?.index ||
|
||||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
||||
|
||||
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
||||
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||
}
|
||||
|
||||
let labelName = "workflow_dispatch";
|
||||
const lab = ev?.label;
|
||||
if (typeof lab === "string") labelName = lab;
|
||||
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
|
||||
else if (ev?.label?.name) labelName = ev.label.name;
|
||||
|
||||
const u = new URL(cloneUrl);
|
||||
const origin = u.origin;
|
||||
|
||||
const apiBase = (process.env.FORGE_API && String(process.env.FORGE_API).trim())
|
||||
? String(process.env.FORGE_API).trim().replace(/\/+$/,"")
|
||||
: origin;
|
||||
|
||||
function sh(s) { return JSON.stringify(String(s)); }
|
||||
|
||||
process.stdout.write([
|
||||
`CLONE_URL=${sh(cloneUrl)}`,
|
||||
`OWNER=${sh(owner)}`,
|
||||
`REPO=${sh(repo)}`,
|
||||
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
|
||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||
`LABEL_NAME=${sh(labelName)}`,
|
||||
`API_BASE=${sh(apiBase)}`
|
||||
].join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
echo "context:"
|
||||
sed -n '1,120p' /tmp/anno.env
|
||||
|
||||
- name: Early gate (label event fast-skip, but tolerant)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
|
||||
echo "event label = $LABEL_NAME"
|
||||
|
||||
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
|
||||
echo "label=$LABEL_NAME => skip early"
|
||||
echo "SKIP=1" >> /tmp/anno.env
|
||||
echo "SKIP_REASON=\"label_not_approved_event\"" >> /tmp/anno.env
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "continue to API gating (issue=$ISSUE_NUMBER)"
|
||||
|
||||
- name: Fetch issue + hard gate on labels + Type
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "Missing secret FORGE_TOKEN"; exit 1; }
|
||||
|
||||
curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||
-o /tmp/issue.json
|
||||
|
||||
node --input-type=module - <<'NODE' >> /tmp/anno.env
|
||||
import fs from "node:fs";
|
||||
|
||||
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json", "utf8"));
|
||||
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||
|
||||
const labels = Array.isArray(issue.labels)
|
||||
? issue.labels.map(l => String(l.name || "")).filter(Boolean)
|
||||
: [];
|
||||
const hasApproved = labels.includes("state/approved");
|
||||
|
||||
function pickLine(key) {
|
||||
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||
const m = body.match(re);
|
||||
return m ? m[1].trim() : "";
|
||||
}
|
||||
|
||||
const typeRaw = pickLine("Type");
|
||||
const type = String(typeRaw || "").trim().toLowerCase();
|
||||
|
||||
const allowedAnno = new Set(["type/media", "type/reference", "type/comment"]);
|
||||
const proposerTypes = new Set(["type/correction", "type/fact-check"]);
|
||||
|
||||
const out = [];
|
||||
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
|
||||
|
||||
if (!hasApproved) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("not_approved_label_present")}`);
|
||||
process.stdout.write(out.join("\n") + "\n");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
if (!type) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
|
||||
} else if (allowedAnno.has(type)) {
|
||||
// proceed
|
||||
} else if (proposerTypes.has(type)) {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("proposer_type:" + type)}`);
|
||||
} else {
|
||||
out.push(`SKIP=1`);
|
||||
out.push(`SKIP_REASON=${JSON.stringify("unsupported_type:" + type)}`);
|
||||
}
|
||||
|
||||
process.stdout.write(out.join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
echo "gating result:"
|
||||
grep -E '^(ISSUE_TYPE|SKIP|SKIP_REASON)=' /tmp/anno.env || true
|
||||
|
||||
- name: Comment issue if skipped (unsupported / missing Type only)
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
|
||||
[[ "${SKIP:-0}" == "1" ]] || exit 0
|
||||
|
||||
if [[ "${SKIP_REASON:-}" == "not_approved_label_present" || "${SKIP_REASON:-}" == "label_not_approved_event" ]]; then
|
||||
echo "skip reason=${SKIP_REASON} -> no comment"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${SKIP_REASON:-}" == proposer_type:* ]]; then
|
||||
echo "proposer ticket detected -> anno stays silent"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || exit 0
|
||||
|
||||
REASON="${SKIP_REASON:-}"
|
||||
TYPE="${ISSUE_TYPE:-}"
|
||||
|
||||
if [[ "$REASON" == unsupported_type:* ]]; then
|
||||
MSG="Ticket #${ISSUE_NUMBER} ignored: unsupported Type (${TYPE}). Supported types: type/media, type/reference, type/comment."
|
||||
else
|
||||
MSG="Ticket #${ISSUE_NUMBER} ignored: missing or unreadable 'Type:'. Expected: type/media|type/reference|type/comment"
|
||||
fi
|
||||
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1] || ""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
|
||||
- name: Checkout default branch
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
|
||||
|
||||
rm -rf .git
|
||||
git init -q
|
||||
git remote add origin "$CLONE_URL"
|
||||
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||
git log -1 --oneline
|
||||
|
||||
- name: Install deps
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
|
||||
npm ci --no-audit --no-fund
|
||||
|
||||
- name: Check apply script exists
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
|
||||
test -f scripts/apply-annotation-ticket.mjs || {
|
||||
echo "missing scripts/apply-annotation-ticket.mjs on $DEFAULT_BRANCH"
|
||||
ls -la scripts | sed -n '1,200p' || true
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Build dist (needed for --verify)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
|
||||
|
||||
npm run build
|
||||
|
||||
test -f dist/para-index.json || {
|
||||
echo "missing dist/para-index.json after build"
|
||||
ls -la dist | sed -n '1,200p' || true
|
||||
exit 1
|
||||
}
|
||||
echo "dist/para-index.json present"
|
||||
|
||||
- name: Apply ticket on bot branch (strict+verify, commit)
|
||||
continue-on-error: true
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
|
||||
BOT_GIT_EMAIL: ${{ secrets.BOT_GIT_EMAIL }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
|
||||
test -d .git || { echo "not a git repo (checkout failed)"; echo "APPLY_RC=90" >> /tmp/anno.env; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "Missing secret FORGE_TOKEN"; exit 1; }
|
||||
|
||||
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
|
||||
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
|
||||
|
||||
START_SHA="$(git rev-parse HEAD)"
|
||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||
BR="bot/anno-${ISSUE_NUMBER}-${TS}"
|
||||
echo "BRANCH=$BR" >> /tmp/anno.env
|
||||
git checkout -b "$BR"
|
||||
|
||||
export FORGE_API="$API_BASE"
|
||||
export GITEA_OWNER="$OWNER"
|
||||
export GITEA_REPO="$REPO"
|
||||
|
||||
LOG="/tmp/apply.log"
|
||||
set +e
|
||||
node scripts/apply-annotation-ticket.mjs "$ISSUE_NUMBER" --strict --verify --commit >"$LOG" 2>&1
|
||||
RC=$?
|
||||
set -e
|
||||
|
||||
echo "APPLY_RC=$RC" >> /tmp/anno.env
|
||||
|
||||
echo "== apply log (tail) =="
|
||||
tail -n 180 "$LOG" || true
|
||||
|
||||
END_SHA="$(git rev-parse HEAD)"
|
||||
|
||||
if [[ "$RC" -ne 0 ]]; then
|
||||
echo "NOOP=0" >> /tmp/anno.env
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$START_SHA" == "$END_SHA" ]]; then
|
||||
echo "NOOP=1" >> /tmp/anno.env
|
||||
else
|
||||
echo "NOOP=0" >> /tmp/anno.env
|
||||
echo "END_SHA=$END_SHA" >> /tmp/anno.env
|
||||
fi
|
||||
|
||||
- name: Comment issue on failure (strict/verify/etc)
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
|
||||
|
||||
RC="${APPLY_RC:-0}"
|
||||
if [[ "$RC" == "0" ]]; then
|
||||
echo "no failure detected"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || exit 0
|
||||
|
||||
if [[ -f /tmp/apply.log ]]; then
|
||||
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
||||
else
|
||||
BODY="(no apply log found)"
|
||||
fi
|
||||
|
||||
MSG="apply-annotation-ticket failed (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1] || ""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
|
||||
- name: Push bot branch
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "apply failed -> skip push"; exit 0; }
|
||||
[[ "${NOOP:-0}" == "0" ]] || { echo "no-op -> skip push"; exit 0; }
|
||||
test -d .git || { echo "no git repo -> skip push"; exit 0; }
|
||||
|
||||
AUTH_URL="$(node --input-type=module -e '
|
||||
const [clone, tok] = process.argv.slice(1);
|
||||
const u = new URL(clone);
|
||||
u.username = "oauth2";
|
||||
u.password = tok;
|
||||
console.log(u.toString());
|
||||
' "$CLONE_URL" "$FORGE_TOKEN")"
|
||||
|
||||
git remote set-url origin "$AUTH_URL"
|
||||
git push -u origin "$BRANCH"
|
||||
|
||||
- name: Create PR + comment issue
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "apply failed -> skip PR"; exit 0; }
|
||||
[[ "${NOOP:-0}" == "0" ]] || { echo "no-op -> skip PR"; exit 0; }
|
||||
|
||||
PR_TITLE="anno: apply ticket #${ISSUE_NUMBER}"
|
||||
PR_BODY="PR auto depuis ticket #${ISSUE_NUMBER} (state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA}\n\nMerge si CI OK."
|
||||
|
||||
PR_PAYLOAD="$(node --input-type=module -e '
|
||||
const [title, body, base, head] = process.argv.slice(1);
|
||||
console.log(JSON.stringify({ title, body, base, head, allow_maintainer_edit: true }));
|
||||
' "$PR_TITLE" "$PR_BODY" "$DEFAULT_BRANCH" "${OWNER}:${BRANCH}")"
|
||||
|
||||
PR_JSON="$(curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls" \
|
||||
--data-binary "$PR_PAYLOAD")"
|
||||
|
||||
PR_URL="$(node --input-type=module -e '
|
||||
const pr = JSON.parse(process.argv[1] || "{}");
|
||||
console.log(pr.html_url || pr.url || "");
|
||||
' "$PR_JSON")"
|
||||
|
||||
test -n "$PR_URL" || { echo "PR URL missing. Raw: $PR_JSON"; exit 1; }
|
||||
|
||||
MSG="PR created for ticket #${ISSUE_NUMBER}: ${PR_URL}"
|
||||
C_PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1] || ""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$C_PAYLOAD"
|
||||
|
||||
echo "PR: $PR_URL"
|
||||
|
||||
- name: Finalize (fail job if apply failed)
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/anno.env || true
|
||||
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
|
||||
|
||||
RC="${APPLY_RC:-0}"
|
||||
if [[ "$RC" != "0" ]]; then
|
||||
echo "apply failed (rc=$RC)"
|
||||
exit "$RC"
|
||||
fi
|
||||
echo "apply ok"
|
||||
181
.gitea/workflows/anno-reject.yml
Normal file
@@ -0,0 +1,181 @@
|
||||
name: Anno Reject (close issue)
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue:
|
||||
description: "Issue number to reject/close"
|
||||
required: true
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: anno-reject-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
reject:
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Tools sanity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
node --version
|
||||
|
||||
- name: Derive context (event.json / workflow_dispatch)
|
||||
env:
|
||||
INPUT_ISSUE: ${{ inputs.issue }}
|
||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||
|
||||
node --input-type=module - <<'NODE' > /tmp/reject.env
|
||||
import fs from "node:fs";
|
||||
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
const repoObj = ev?.repository || {};
|
||||
|
||||
const cloneUrl =
|
||||
repoObj?.clone_url ||
|
||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||
|
||||
let owner =
|
||||
repoObj?.owner?.login ||
|
||||
repoObj?.owner?.username ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||
|
||||
let repo =
|
||||
repoObj?.name ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||
|
||||
if ((!owner || !repo) && cloneUrl) {
|
||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
||||
}
|
||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||
|
||||
const issueNumber =
|
||||
ev?.issue?.number ||
|
||||
ev?.issue?.index ||
|
||||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
||||
|
||||
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
||||
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||
}
|
||||
|
||||
// label name: best-effort (non-bloquant)
|
||||
let labelName = "workflow_dispatch";
|
||||
const lab = ev?.label;
|
||||
if (typeof lab === "string") labelName = lab;
|
||||
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
|
||||
|
||||
let apiBase = "";
|
||||
if (process.env.FORGE_API && String(process.env.FORGE_API).trim()) {
|
||||
apiBase = String(process.env.FORGE_API).trim().replace(/\/+$/,"");
|
||||
} else if (cloneUrl) {
|
||||
apiBase = new URL(cloneUrl).origin;
|
||||
} else {
|
||||
apiBase = "";
|
||||
}
|
||||
|
||||
function sh(s){ return JSON.stringify(String(s)); }
|
||||
|
||||
process.stdout.write([
|
||||
`OWNER=${sh(owner)}`,
|
||||
`REPO=${sh(repo)}`,
|
||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||
`LABEL_NAME=${sh(labelName)}`,
|
||||
`API_BASE=${sh(apiBase)}`
|
||||
].join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
echo "✅ context:"
|
||||
sed -n '1,120p' /tmp/reject.env
|
||||
|
||||
- name: Early gate (fast-skip, tolerant)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/reject.env
|
||||
echo "ℹ️ event label = $LABEL_NAME"
|
||||
|
||||
if [[ "$LABEL_NAME" != "state/rejected" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
|
||||
echo "ℹ️ label=$LABEL_NAME => skip early"
|
||||
echo "SKIP=1" >> /tmp/reject.env
|
||||
echo "SKIP_REASON=\"label_not_rejected_event\"" >> /tmp/reject.env
|
||||
exit 0
|
||||
fi
|
||||
|
||||
- name: Comment + close (only if label state/rejected is PRESENT now, and no conflict)
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/reject.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||
test -n "${API_BASE:-}" || { echo "❌ Missing API_BASE"; exit 1; }
|
||||
|
||||
curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||
-o /tmp/reject.issue.json
|
||||
|
||||
node --input-type=module - <<'NODE' > /tmp/reject.flags
|
||||
import fs from "node:fs";
|
||||
const issue = JSON.parse(fs.readFileSync("/tmp/reject.issue.json","utf8"));
|
||||
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
|
||||
const hasApproved = labels.includes("state/approved");
|
||||
const hasRejected = labels.includes("state/rejected");
|
||||
process.stdout.write(`HAS_APPROVED=${hasApproved ? "1":"0"}\nHAS_REJECTED=${hasRejected ? "1":"0"}\n`);
|
||||
NODE
|
||||
|
||||
source /tmp/reject.flags
|
||||
|
||||
# Do nothing unless state/rejected is truly present now (anti payload weird)
|
||||
if [[ "${HAS_REJECTED:-0}" != "1" ]]; then
|
||||
echo "ℹ️ state/rejected not present -> skip"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${HAS_APPROVED:-0}" == "1" && "${HAS_REJECTED:-0}" == "1" ]]; then
|
||||
MSG="⚠️ Conflit d'état sur le ticket #${ISSUE_NUMBER} : labels **state/approved** et **state/rejected** présents.\n\n➡️ Action manuelle requise : retirer l'un des deux labels avant relance."
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
echo "ℹ️ conflict => stop"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
|
||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||
--data-binary "$PAYLOAD"
|
||||
|
||||
curl -fsS -X PATCH \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||
--data-binary '{"state":"closed"}'
|
||||
|
||||
echo "✅ rejected+closed"
|
||||
@@ -4,22 +4,37 @@ on:
|
||||
issues:
|
||||
types: [opened, edited]
|
||||
|
||||
concurrency:
|
||||
group: auto-label-${{ github.event.issue.number || github.event.issue.index || 'manual' }}
|
||||
cancel-in-progress: true
|
||||
|
||||
jobs:
|
||||
label:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Apply labels from Type/State/Category
|
||||
env:
|
||||
FORGE_BASE: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||
# IMPORTANT: préfère FORGE_BASE (LAN) si défini, sinon FORGE_API
|
||||
FORGE_BASE: ${{ vars.FORGE_BASE || vars.FORGE_API || vars.FORGE_API_BASE }}
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
REPO_FULL: ${{ gitea.repository }}
|
||||
EVENT_PATH: ${{ github.event_path }}
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
run: |
|
||||
python3 - <<'PY'
|
||||
import json, os, re, urllib.request, urllib.error
|
||||
import json, os, re, time, urllib.request, urllib.error, socket
|
||||
|
||||
forge = (os.environ.get("FORGE_BASE") or "").rstrip("/")
|
||||
if not forge:
|
||||
raise SystemExit("Missing FORGE_BASE/FORGE_API repo variable (e.g. http://192.168.1.20:3000)")
|
||||
|
||||
token = os.environ.get("FORGE_TOKEN") or ""
|
||||
if not token:
|
||||
raise SystemExit("Missing secret FORGE_TOKEN")
|
||||
|
||||
forge = os.environ["FORGE_BASE"].rstrip("/")
|
||||
token = os.environ["FORGE_TOKEN"]
|
||||
owner, repo = os.environ["REPO_FULL"].split("/", 1)
|
||||
event_path = os.environ["EVENT_PATH"]
|
||||
|
||||
@@ -46,12 +61,9 @@ jobs:
|
||||
print("PARSED:", {"Type": t, "State": s, "Category": c})
|
||||
|
||||
# 1) explicite depuis le body
|
||||
if t:
|
||||
desired.add(t)
|
||||
if s:
|
||||
desired.add(s)
|
||||
if c:
|
||||
desired.add(c)
|
||||
if t: desired.add(t)
|
||||
if s: desired.add(s)
|
||||
if c: desired.add(c)
|
||||
|
||||
# 2) fallback depuis le titre si Type absent
|
||||
if not t:
|
||||
@@ -76,42 +88,56 @@ jobs:
|
||||
"Authorization": f"token {token}",
|
||||
"Accept": "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "archicratie-auto-label/1.0",
|
||||
"User-Agent": "archicratie-auto-label/1.1",
|
||||
}
|
||||
|
||||
def jreq(method, url, payload=None):
|
||||
def jreq(method, url, payload=None, timeout=60, retries=4, backoff=2.0):
|
||||
data = None if payload is None else json.dumps(payload).encode("utf-8")
|
||||
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=20) as r:
|
||||
b = r.read()
|
||||
return json.loads(b.decode("utf-8")) if b else None
|
||||
except urllib.error.HTTPError as e:
|
||||
b = e.read().decode("utf-8", errors="replace")
|
||||
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
|
||||
last_err = None
|
||||
for i in range(retries):
|
||||
req = urllib.request.Request(url, data=data, headers=headers, method=method)
|
||||
try:
|
||||
with urllib.request.urlopen(req, timeout=timeout) as r:
|
||||
b = r.read()
|
||||
return json.loads(b.decode("utf-8")) if b else None
|
||||
except urllib.error.HTTPError as e:
|
||||
b = e.read().decode("utf-8", errors="replace")
|
||||
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
|
||||
except (TimeoutError, socket.timeout, urllib.error.URLError) as e:
|
||||
last_err = e
|
||||
# retry only on network/timeout
|
||||
time.sleep(backoff * (i + 1))
|
||||
raise RuntimeError(f"Network/timeout after retries: {method} {url}\n{last_err}")
|
||||
|
||||
# labels repo
|
||||
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000") or []
|
||||
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000", timeout=60) or []
|
||||
name_to_id = {x.get("name"): x.get("id") for x in labels}
|
||||
|
||||
missing = [x for x in desired if x not in name_to_id]
|
||||
if missing:
|
||||
raise SystemExit("Missing labels in repo: " + ", ".join(sorted(missing)))
|
||||
|
||||
wanted_ids = [name_to_id[x] for x in desired]
|
||||
wanted_ids = sorted({int(name_to_id[x]) for x in desired})
|
||||
|
||||
# labels actuels de l'issue
|
||||
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels") or []
|
||||
current_ids = {x.get("id") for x in current if x.get("id") is not None}
|
||||
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
|
||||
current_ids = {int(x.get("id")) for x in current if x.get("id") is not None}
|
||||
|
||||
final_ids = sorted(current_ids.union(wanted_ids))
|
||||
|
||||
# set labels = union (n'enlève rien)
|
||||
# Replace labels = union (n'enlève rien)
|
||||
url = f"{api}/repos/{owner}/{repo}/issues/{number}/labels"
|
||||
try:
|
||||
jreq("PUT", url, {"labels": final_ids})
|
||||
except Exception:
|
||||
jreq("PUT", url, final_ids)
|
||||
|
||||
# IMPORTANT: on n'envoie JAMAIS une liste brute ici (ça a causé le 422)
|
||||
jreq("PUT", url, {"labels": final_ids}, timeout=90, retries=4)
|
||||
|
||||
# vérif post-apply (anti "timeout mais appliqué")
|
||||
post = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
|
||||
post_ids = {int(x.get("id")) for x in post if x.get("id") is not None}
|
||||
|
||||
missing_ids = [i for i in wanted_ids if i not in post_ids]
|
||||
if missing_ids:
|
||||
raise RuntimeError(f"Labels not applied after PUT (missing ids): {missing_ids}")
|
||||
|
||||
print(f"OK labels #{number}: {sorted(desired)}")
|
||||
PY
|
||||
@@ -3,7 +3,7 @@ name: CI
|
||||
on:
|
||||
push:
|
||||
pull_request:
|
||||
branches: [master]
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
|
||||
env:
|
||||
@@ -15,7 +15,7 @@ defaults:
|
||||
|
||||
jobs:
|
||||
build-and-anchors:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
@@ -79,22 +79,7 @@ jobs:
|
||||
set -euo pipefail
|
||||
npm ci
|
||||
|
||||
- name: Inline scripts syntax check
|
||||
- name: Full test suite (CI=1)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
node scripts/check-inline-js.mjs
|
||||
|
||||
- name: Build (includes postbuild injection + pagefind)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
npm run build
|
||||
|
||||
- name: Anchors contract
|
||||
run: |
|
||||
set -euo pipefail
|
||||
npm run test:anchors
|
||||
|
||||
- name: Verify anchor aliases injected in dist
|
||||
run: |
|
||||
set -euo pipefail
|
||||
node scripts/verify-anchor-aliases-in-dist.mjs
|
||||
npm run ci
|
||||
|
||||
@@ -1,103 +0,0 @@
|
||||
name: CI
|
||||
|
||||
on:
|
||||
push: {}
|
||||
pull_request:
|
||||
branches: ["master"]
|
||||
workflow_dispatch: {}
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
jobs:
|
||||
build-and-anchors:
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Tools sanity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git --version
|
||||
node --version
|
||||
npm --version
|
||||
npm ping --registry=https://registry.npmjs.org
|
||||
|
||||
- name: Checkout (from event.json, no external actions)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
EVENT_JSON="/var/run/act/workflow/event.json"
|
||||
test -f "$EVENT_JSON" || (echo "❌ Missing $EVENT_JSON" && exit 1)
|
||||
|
||||
eval "$(node - <<'NODE'
|
||||
import fs from "node:fs";
|
||||
const ev = JSON.parse(fs.readFileSync("/var/run/act/workflow/event.json","utf8"));
|
||||
const repo =
|
||||
ev?.repository?.clone_url ||
|
||||
(ev?.repository?.html_url ? (ev.repository.html_url.replace(/\/$/,'') + ".git") : "");
|
||||
const sha =
|
||||
ev?.after ||
|
||||
ev?.pull_request?.head?.sha ||
|
||||
ev?.head_commit?.id ||
|
||||
ev?.sha ||
|
||||
"";
|
||||
if (!repo) { console.error("No repository.clone_url/html_url in event.json"); process.exit(1); }
|
||||
if (!sha) { console.error("No sha/after/pull_request.head.sha in event.json"); process.exit(1); }
|
||||
console.log(`REPO_URL=${JSON.stringify(repo)}`);
|
||||
console.log(`SHA=${JSON.stringify(sha)}`);
|
||||
NODE
|
||||
)"
|
||||
|
||||
echo "Repo URL: $REPO_URL"
|
||||
echo "SHA: $SHA"
|
||||
|
||||
rm -rf .git
|
||||
git init
|
||||
git remote add origin "$REPO_URL"
|
||||
git fetch --depth 1 origin "$SHA"
|
||||
git checkout -q FETCH_HEAD
|
||||
git log -1 --oneline
|
||||
|
||||
- name: Anchor aliases schema
|
||||
run: |
|
||||
set -euo pipefail
|
||||
node scripts/check-anchor-aliases.mjs
|
||||
|
||||
- name: NPM harden
|
||||
run: |
|
||||
set -euo pipefail
|
||||
npm config set fetch-retries 5
|
||||
npm config set fetch-retry-mintimeout 20000
|
||||
npm config set fetch-retry-maxtimeout 120000
|
||||
npm config set registry https://registry.npmjs.org
|
||||
npm config get registry
|
||||
|
||||
- name: Install deps
|
||||
run: |
|
||||
set -euo pipefail
|
||||
npm ci
|
||||
|
||||
- name: Inline scripts syntax check
|
||||
run: |
|
||||
set -euo pipefail
|
||||
node scripts/check-inline-js.mjs
|
||||
|
||||
- name: Build (includes postbuild injection + pagefind)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
npm run build
|
||||
|
||||
- name: Anchors contract
|
||||
run: |
|
||||
set -euo pipefail
|
||||
npm run test:anchors
|
||||
|
||||
- name: Verify anchor aliases injected in dist
|
||||
run: |
|
||||
set -euo pipefail
|
||||
node scripts/verify-anchor-aliases-in-dist.mjs
|
||||
613
.gitea/workflows/deploy-staging-live.yml
Normal file
@@ -0,0 +1,613 @@
|
||||
name: Deploy staging+live (annotations)
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
force:
|
||||
description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)"
|
||||
required: false
|
||||
default: "0"
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
DOCKER_API_VERSION: "1.43"
|
||||
COMPOSE_VERSION: "2.29.7"
|
||||
ASTRO_TELEMETRY_DISABLED: "1"
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: deploy-staging-live-main
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
deploy:
|
||||
runs-on: nas-deploy
|
||||
container:
|
||||
image: localhost:5000/archicratie/nas-deploy-node22@sha256:fefa8bb307005cebec07796661ab25528dc319c33a8f1e480e1d66f90cd5cff6
|
||||
|
||||
steps:
|
||||
- name: Tools sanity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git --version
|
||||
node --version
|
||||
npm --version
|
||||
|
||||
- name: Checkout (push or workflow_dispatch, no external actions)
|
||||
env:
|
||||
EVENT_JSON: /var/run/act/workflow/event.json
|
||||
run: |
|
||||
set -euo pipefail
|
||||
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||
|
||||
node --input-type=module <<'NODE'
|
||||
import fs from "node:fs";
|
||||
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
const repoObj = ev?.repository || {};
|
||||
|
||||
const cloneUrl =
|
||||
repoObj?.clone_url ||
|
||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||
|
||||
const defaultBranch = repoObj?.default_branch || "main";
|
||||
|
||||
// Push-range (most reliable for change detection)
|
||||
const before = String(ev?.before || "").trim();
|
||||
const after =
|
||||
(process.env.GITHUB_SHA && String(process.env.GITHUB_SHA).trim()) ||
|
||||
String(ev?.after || ev?.sha || ev?.head_commit?.id || ev?.pull_request?.head?.sha || "").trim();
|
||||
|
||||
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
|
||||
|
||||
fs.writeFileSync("/tmp/deploy.env", [
|
||||
`REPO_URL=${shq(cloneUrl)}`,
|
||||
`DEFAULT_BRANCH=${shq(defaultBranch)}`,
|
||||
`BEFORE=${shq(before)}`,
|
||||
`AFTER=${shq(after)}`
|
||||
].join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
source /tmp/deploy.env
|
||||
echo "Repo URL: $REPO_URL"
|
||||
echo "Default branch: $DEFAULT_BRANCH"
|
||||
echo "BEFORE: ${BEFORE:-<empty>}"
|
||||
echo "AFTER: ${AFTER:-<empty>}"
|
||||
|
||||
rm -rf .git
|
||||
git init -q
|
||||
git remote add origin "$REPO_URL"
|
||||
|
||||
# Checkout AFTER (or default branch if missing)
|
||||
if [[ -n "${AFTER:-}" ]]; then
|
||||
git fetch --depth 50 origin "$AFTER"
|
||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||
else
|
||||
git fetch --depth 50 origin "$DEFAULT_BRANCH"
|
||||
git -c advice.detachedHead=false checkout -q "origin/$DEFAULT_BRANCH"
|
||||
AFTER="$(git rev-parse HEAD)"
|
||||
echo "AFTER='$AFTER'" >> /tmp/deploy.env
|
||||
echo "Resolved AFTER: $AFTER"
|
||||
fi
|
||||
|
||||
git log -1 --oneline
|
||||
|
||||
- name: Gate — decide SKIP vs HOTPATCH vs FULL rebuild
|
||||
env:
|
||||
INPUT_FORCE: ${{ inputs.force }}
|
||||
EVENT_JSON: /var/run/act/workflow/event.json
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
|
||||
FORCE="${INPUT_FORCE:-0}"
|
||||
|
||||
# Lire before/after du push depuis event.json (merge-proof)
|
||||
node --input-type=module <<'NODE'
|
||||
import fs from "node:fs";
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
const before = ev?.before || "";
|
||||
const after = ev?.after || ev?.sha || "";
|
||||
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
|
||||
fs.writeFileSync("/tmp/gate.env", [
|
||||
`EV_BEFORE=${shq(before)}`,
|
||||
`EV_AFTER=${shq(after)}`
|
||||
].join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
source /tmp/gate.env
|
||||
|
||||
BEFORE="${EV_BEFORE:-}"
|
||||
AFTER="${EV_AFTER:-}"
|
||||
if [[ -z "${AFTER:-}" ]]; then
|
||||
AFTER="${SHA:-}"
|
||||
fi
|
||||
|
||||
echo "Gate ctx: BEFORE=${BEFORE:-<empty>} AFTER=${AFTER:-<empty>} FORCE=${FORCE}"
|
||||
|
||||
# Produire une liste CHANGED fiable :
|
||||
# - si BEFORE/AFTER valides -> git diff before..after
|
||||
# - sinon fallback -> diff parent1..after ou show after
|
||||
CHANGED=""
|
||||
Z40="0000000000000000000000000000000000000000"
|
||||
|
||||
if [[ -n "${BEFORE:-}" && "${BEFORE}" != "${Z40}" ]] \
|
||||
&& git cat-file -e "${BEFORE}^{commit}" 2>/dev/null \
|
||||
&& git cat-file -e "${AFTER}^{commit}" 2>/dev/null; then
|
||||
CHANGED="$(git diff --name-only "${BEFORE}" "${AFTER}" || true)"
|
||||
else
|
||||
P1="$(git rev-parse "${AFTER}^" 2>/dev/null || true)"
|
||||
if [[ -n "${P1:-}" ]] && git cat-file -e "${P1}^{commit}" 2>/dev/null; then
|
||||
CHANGED="$(git diff --name-only "${P1}" "${AFTER}" || true)"
|
||||
else
|
||||
CHANGED="$(git show --name-only --pretty="" "${AFTER}" | sed '/^$/d' || true)"
|
||||
fi
|
||||
fi
|
||||
|
||||
printf "%s\n" "${CHANGED}" > /tmp/changed.txt
|
||||
|
||||
echo "== changed files (first 200) =="
|
||||
sed -n '1,200p' /tmp/changed.txt || true
|
||||
|
||||
# Flags
|
||||
HAS_FULL=0
|
||||
HAS_HOTPATCH=0
|
||||
|
||||
# HOTPATCH si annotations/media touchés
|
||||
if grep -qE '^(src/annotations/|public/media/)' /tmp/changed.txt; then
|
||||
HAS_HOTPATCH=1
|
||||
fi
|
||||
|
||||
# FULL si build-impacting (robuste)
|
||||
# 1) Tout src/ SAUF src/annotations/
|
||||
if grep -qE '^src/' /tmp/changed.txt && grep -qEv '^src/annotations/' /tmp/changed.txt; then
|
||||
HAS_FULL=1
|
||||
fi
|
||||
|
||||
# 2) scripts/
|
||||
if grep -qE '^scripts/' /tmp/changed.txt; then
|
||||
HAS_FULL=1
|
||||
fi
|
||||
|
||||
# 3) Tout public/ SAUF public/media/
|
||||
if grep -qE '^public/' /tmp/changed.txt && grep -qEv '^public/media/' /tmp/changed.txt; then
|
||||
HAS_FULL=1
|
||||
fi
|
||||
|
||||
# 4) fichiers racine qui changent le build / l’image
|
||||
if grep -qE '^(package\.json|package-lock\.json|astro\.config\.mjs|tsconfig\.json|\.npmrc|\.nvmrc|Dockerfile|docker-compose\.yml|nginx\.conf)$' /tmp/changed.txt; then
|
||||
HAS_FULL=1
|
||||
fi
|
||||
|
||||
echo "Gate flags: HAS_FULL=${HAS_FULL} HAS_HOTPATCH=${HAS_HOTPATCH}"
|
||||
|
||||
# Décision
|
||||
if [[ "${FORCE}" == "1" ]]; then
|
||||
GO=1
|
||||
MODE="full"
|
||||
echo "✅ force=1 -> MODE=full (rebuild+restart)"
|
||||
elif [[ "${HAS_FULL}" == "1" ]]; then
|
||||
GO=1
|
||||
MODE="full"
|
||||
echo "✅ build-impacting change -> MODE=full (rebuild+restart)"
|
||||
elif [[ "${HAS_HOTPATCH}" == "1" ]]; then
|
||||
GO=1
|
||||
MODE="hotpatch"
|
||||
echo "✅ annotations/media change -> MODE=hotpatch"
|
||||
else
|
||||
GO=0
|
||||
MODE="skip"
|
||||
echo "ℹ️ no relevant change -> skip deploy"
|
||||
fi
|
||||
|
||||
echo "GO=${GO}" >> /tmp/deploy.env
|
||||
echo "MODE='${MODE}'" >> /tmp/deploy.env
|
||||
|
||||
- name: Toolchain sanity + resolve COMPOSE_PROJECT_NAME
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
# tools are prebaked in the image
|
||||
git --version
|
||||
docker version
|
||||
docker compose version
|
||||
python3 -c 'import yaml; print("PyYAML OK")'
|
||||
|
||||
# Reuse existing compose project name if containers already exist
|
||||
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||
if [[ -z "${PROJ:-}" ]]; then
|
||||
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||
fi
|
||||
if [[ -z "${PROJ:-}" ]]; then PROJ="archicratie-web"; fi
|
||||
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
|
||||
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
|
||||
|
||||
# Assert target containers exist (hotpatch needs them)
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; }
|
||||
done
|
||||
|
||||
- name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> vars not required"; exit 0; }
|
||||
|
||||
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
||||
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
||||
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
||||
echo "✅ vars OK"
|
||||
|
||||
- name: Assert deploy files exist — only needed for MODE=full
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> files not required"; exit 0; }
|
||||
|
||||
test -f docker-compose.yml
|
||||
test -f Dockerfile
|
||||
test -f nginx.conf
|
||||
echo "✅ deploy files OK"
|
||||
|
||||
- name: FULL — Build + deploy staging (blue) then warmup+smoke
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||
|
||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||
|
||||
wait_url() {
|
||||
local url="$1"
|
||||
local label="$2"
|
||||
local tries="${3:-60}"
|
||||
for i in $(seq 1 "$tries"); do
|
||||
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||
echo "✅ $label OK ($url)"
|
||||
return 0
|
||||
fi
|
||||
echo "… warmup $label ($i/$tries)"
|
||||
sleep 1
|
||||
done
|
||||
echo "❌ timeout $label ($url)"
|
||||
return 1
|
||||
}
|
||||
|
||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||
echo "TS='$TS'" >> /tmp/deploy.env
|
||||
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
||||
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
||||
|
||||
BUILD_TIME_RAW="$(TZ=Europe/Paris date '+%Y-%m-%dT%H:%M:%S%z')"
|
||||
BUILD_TIME="${BUILD_TIME_RAW:0:${#BUILD_TIME_RAW}-2}:${BUILD_TIME_RAW:${#BUILD_TIME_RAW}-2}"
|
||||
|
||||
PUBLIC_OPS_ENV=staging \
|
||||
PUBLIC_OPS_UPSTREAM=web_blue \
|
||||
PUBLIC_BUILD_SHA="${AFTER}" \
|
||||
PUBLIC_BUILD_TIME="${BUILD_TIME}" \
|
||||
node scripts/write-ops-health.mjs
|
||||
|
||||
test -f public/__ops/health.json
|
||||
echo "=== public/__ops/health.json (blue/staging) ==="
|
||||
cat public/__ops/health.json
|
||||
|
||||
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
||||
docker rm -f archicratie-web-blue || true
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
||||
|
||||
# warmup endpoints
|
||||
wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index"
|
||||
wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index"
|
||||
wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js"
|
||||
|
||||
wait_url "http://127.0.0.1:8081/__ops/health.json" "blue ops health"
|
||||
|
||||
curl -fsS --max-time 6 "http://127.0.0.1:8081/__ops/health.json" \
|
||||
| python3 -c 'import sys, json; j=json.load(sys.stdin); print("env=", j.get("env")); print("upstream=", j.get("upstream")); print("buildSha=", j.get("buildSha")); print("builtAt=", j.get("builtAt"))'
|
||||
|
||||
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||
echo "canonical(blue)=$CANON"
|
||||
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
||||
echo "❌ staging canonical mismatch"
|
||||
docker logs --tail 120 archicratie-web-blue || true
|
||||
exit 3
|
||||
}
|
||||
|
||||
echo "✅ staging OK"
|
||||
|
||||
- name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed
|
||||
env:
|
||||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||||
|
||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
||||
|
||||
wait_url() {
|
||||
local url="$1"
|
||||
local label="$2"
|
||||
local tries="${3:-60}"
|
||||
for i in $(seq 1 "$tries"); do
|
||||
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||||
echo "✅ $label OK ($url)"
|
||||
return 0
|
||||
fi
|
||||
echo "… warmup $label ($i/$tries)"
|
||||
sleep 1
|
||||
done
|
||||
echo "❌ timeout $label ($url)"
|
||||
return 1
|
||||
}
|
||||
|
||||
rollback() {
|
||||
echo "⚠️ rollback green -> previous image tag (best effort)"
|
||||
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
||||
docker rm -f archicratie-web-green || true
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
||||
}
|
||||
|
||||
BUILD_TIME_RAW="$(TZ=Europe/Paris date '+%Y-%m-%dT%H:%M:%S%z')"
|
||||
BUILD_TIME="${BUILD_TIME_RAW:0:${#BUILD_TIME_RAW}-2}:${BUILD_TIME_RAW:${#BUILD_TIME_RAW}-2}"
|
||||
|
||||
PUBLIC_OPS_ENV=prod \
|
||||
PUBLIC_OPS_UPSTREAM=web_green \
|
||||
PUBLIC_BUILD_SHA="${AFTER}" \
|
||||
PUBLIC_BUILD_TIME="${BUILD_TIME}" \
|
||||
node scripts/write-ops-health.mjs
|
||||
|
||||
test -f public/__ops/health.json
|
||||
echo "=== public/__ops/health.json (green/prod) ==="
|
||||
cat public/__ops/health.json
|
||||
|
||||
# build/restart green
|
||||
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
|
||||
echo "❌ build green failed"; rollback; exit 4
|
||||
fi
|
||||
|
||||
docker rm -f archicratie-web-green || true
|
||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
||||
|
||||
# warmup endpoints
|
||||
if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi
|
||||
if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi
|
||||
if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi
|
||||
|
||||
if ! wait_url "http://127.0.0.1:8082/__ops/health.json" "green ops health"; then rollback; exit 4; fi
|
||||
|
||||
curl -fsS --max-time 6 "http://127.0.0.1:8082/__ops/health.json" \
|
||||
| python3 -c 'import sys, json; j=json.load(sys.stdin); print("env=", j.get("env")); print("upstream=", j.get("upstream")); print("buildSha=", j.get("buildSha")); print("builtAt=", j.get("builtAt"))'
|
||||
|
||||
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||
echo "canonical(green)=$CANON"
|
||||
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
||||
echo "❌ live canonical mismatch"
|
||||
docker logs --tail 120 archicratie-web-green || true
|
||||
rollback
|
||||
exit 4
|
||||
}
|
||||
|
||||
echo "✅ live OK"
|
||||
|
||||
- name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/deploy.env
|
||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||
|
||||
python3 - <<'PY'
|
||||
import os, re, json, glob
|
||||
import yaml
|
||||
import datetime as dt
|
||||
|
||||
ROOT = os.getcwd()
|
||||
ANNO_ROOT = os.path.join(ROOT, "src", "annotations")
|
||||
|
||||
def is_obj(x): return isinstance(x, dict)
|
||||
def is_arr(x): return isinstance(x, list)
|
||||
|
||||
def iso_dt(x):
|
||||
if isinstance(x, dt.datetime):
|
||||
if x.tzinfo is None:
|
||||
return x.isoformat()
|
||||
return x.astimezone(dt.timezone.utc).isoformat().replace("+00:00","Z")
|
||||
if isinstance(x, dt.date):
|
||||
return x.isoformat()
|
||||
return None
|
||||
|
||||
def normalize(x):
|
||||
s = iso_dt(x)
|
||||
if s is not None: return s
|
||||
if isinstance(x, dict):
|
||||
return {str(k): normalize(v) for k, v in x.items()}
|
||||
if isinstance(x, list):
|
||||
return [normalize(v) for v in x]
|
||||
return x
|
||||
|
||||
def key_media(it): return str((it or {}).get("src",""))
|
||||
def key_ref(it):
|
||||
it = it or {}
|
||||
return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))])
|
||||
def key_comment(it): return str((it or {}).get("text","")).strip()
|
||||
|
||||
def dedup_extend(dst_list, src_list, key_fn):
|
||||
seen = set(); out = []
|
||||
for x in (dst_list or []):
|
||||
x = normalize(x); k = key_fn(x)
|
||||
if k and k not in seen: seen.add(k); out.append(x)
|
||||
for x in (src_list or []):
|
||||
x = normalize(x); k = key_fn(x)
|
||||
if k and k not in seen: seen.add(k); out.append(x)
|
||||
return out
|
||||
|
||||
def deep_merge(dst, src):
|
||||
src = normalize(src)
|
||||
for k, v in (src or {}).items():
|
||||
if k in ("media","refs","comments_editorial") and is_arr(v):
|
||||
if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media)
|
||||
elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
|
||||
else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
|
||||
continue
|
||||
|
||||
if is_obj(v):
|
||||
if not is_obj(dst.get(k)): dst[k] = {}
|
||||
deep_merge(dst[k], v)
|
||||
continue
|
||||
|
||||
if is_arr(v):
|
||||
cur = dst.get(k, [])
|
||||
if not is_arr(cur): cur = []
|
||||
seen = set(); out = []
|
||||
for x in cur:
|
||||
x = normalize(x)
|
||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||
if s not in seen: seen.add(s); out.append(x)
|
||||
for x in v:
|
||||
x = normalize(x)
|
||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||
if s not in seen: seen.add(s); out.append(x)
|
||||
dst[k] = out
|
||||
continue
|
||||
|
||||
v = normalize(v)
|
||||
if k not in dst or dst.get(k) in (None, ""):
|
||||
dst[k] = v
|
||||
|
||||
def para_num(pid):
|
||||
m = re.match(r"^p-(\d+)-", str(pid))
|
||||
return int(m.group(1)) if m else 10**9
|
||||
|
||||
def sort_lists(entry):
|
||||
for k in ("media","refs","comments_editorial"):
|
||||
arr = entry.get(k)
|
||||
if not is_arr(arr): continue
|
||||
def ts(x):
|
||||
x = normalize(x)
|
||||
try:
|
||||
s = str((x or {}).get("ts",""))
|
||||
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
|
||||
except Exception:
|
||||
return 0
|
||||
arr = [normalize(x) for x in arr]
|
||||
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
||||
entry[k] = arr
|
||||
|
||||
if not os.path.isdir(ANNO_ROOT):
|
||||
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
|
||||
|
||||
pages = {}
|
||||
errors = []
|
||||
|
||||
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
|
||||
for fp in files:
|
||||
try:
|
||||
with open(fp, "r", encoding="utf-8") as f:
|
||||
doc = yaml.safe_load(f) or {}
|
||||
doc = normalize(doc)
|
||||
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
||||
continue
|
||||
|
||||
page = str(doc.get("page","")).strip().strip("/")
|
||||
paras = doc.get("paras") or {}
|
||||
if not page or not isinstance(paras, dict):
|
||||
continue
|
||||
|
||||
pg = pages.setdefault(page, {"paras": {}})
|
||||
for pid, entry in paras.items():
|
||||
pid = str(pid)
|
||||
if pid not in pg["paras"] or not isinstance(pg["paras"].get(pid), dict):
|
||||
pg["paras"][pid] = {}
|
||||
if isinstance(entry, dict):
|
||||
deep_merge(pg["paras"][pid], entry)
|
||||
sort_lists(pg["paras"][pid])
|
||||
|
||||
except Exception as e:
|
||||
errors.append({"file": os.path.relpath(fp, ROOT), "error": str(e)})
|
||||
|
||||
for page, obj in pages.items():
|
||||
keys = list((obj.get("paras") or {}).keys())
|
||||
keys.sort(key=lambda k: (para_num(k), k))
|
||||
obj["paras"] = {k: obj["paras"][k] for k in keys}
|
||||
|
||||
out = {
|
||||
"schema": 1,
|
||||
"generatedAt": dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc).isoformat().replace("+00:00","Z"),
|
||||
"pages": pages,
|
||||
"stats": {
|
||||
"pages": len(pages),
|
||||
"paras": sum(len(v.get("paras") or {}) for v in pages.values()),
|
||||
"errors": len(errors),
|
||||
},
|
||||
"errors": errors,
|
||||
}
|
||||
|
||||
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
||||
json.dump(out, f, ensure_ascii=False)
|
||||
|
||||
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
|
||||
PY
|
||||
|
||||
# patch JSON into running containers
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== patch annotations-index.json into $c =="
|
||||
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
|
||||
done
|
||||
|
||||
# copy changed media files into containers (so new media appears without rebuild)
|
||||
if [[ -s /tmp/changed.txt ]]; then
|
||||
while IFS= read -r f; do
|
||||
[[ -n "$f" ]] || continue
|
||||
if [[ "$f" == public/media/* ]]; then
|
||||
dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/...
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== copy media into $c: $f -> $dest =="
|
||||
docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\""
|
||||
docker cp "$f" "$c:$dest"
|
||||
done
|
||||
fi
|
||||
done < /tmp/changed.txt
|
||||
fi
|
||||
|
||||
# smoke after patch
|
||||
for p in 8081 8082; do
|
||||
echo "== smoke annotations-index on $p =="
|
||||
curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \
|
||||
| python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))'
|
||||
done
|
||||
|
||||
echo "✅ hotpatch done"
|
||||
|
||||
- name: Debug on failure (containers status/logs)
|
||||
if: ${{ failure() }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
echo "== docker ps =="
|
||||
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true
|
||||
for c in archicratie-web-blue archicratie-web-green; do
|
||||
echo "== logs $c (tail 200) =="
|
||||
docker logs --tail 200 "$c" || true
|
||||
done
|
||||
788
.gitea/workflows/proposer-apply-pr.yml
Normal file
@@ -0,0 +1,788 @@
|
||||
name: Proposer Apply (Queue)
|
||||
|
||||
on:
|
||||
issues:
|
||||
types: [labeled]
|
||||
push:
|
||||
branches: [main]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
issue:
|
||||
description: "Issue number to prioritize (optional)"
|
||||
required: false
|
||||
default: ""
|
||||
|
||||
env:
|
||||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||
|
||||
defaults:
|
||||
run:
|
||||
shell: bash
|
||||
|
||||
concurrency:
|
||||
group: proposer-queue-main
|
||||
cancel-in-progress: false
|
||||
|
||||
jobs:
|
||||
apply-proposer:
|
||||
runs-on: mac-ci
|
||||
container:
|
||||
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||
|
||||
steps:
|
||||
- name: Tools sanity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
git --version
|
||||
node --version
|
||||
npm --version
|
||||
|
||||
- name: Derive context (event.json / workflow_dispatch / push)
|
||||
env:
|
||||
INPUT_ISSUE: ${{ inputs.issue }}
|
||||
EVENT_NAME_IN: ${{ github.event_name }}
|
||||
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||
test -f "$EVENT_JSON" || { echo "Missing $EVENT_JSON"; exit 1; }
|
||||
|
||||
node --input-type=module - <<'NODE' > /tmp/proposer.env
|
||||
import fs from "node:fs";
|
||||
|
||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||
const repoObj = ev?.repository || {};
|
||||
|
||||
const cloneUrl =
|
||||
repoObj?.clone_url ||
|
||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/, "") + ".git") : "");
|
||||
|
||||
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||
|
||||
let owner =
|
||||
repoObj?.owner?.login ||
|
||||
repoObj?.owner?.username ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||
|
||||
let repo =
|
||||
repoObj?.name ||
|
||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||
|
||||
if (!owner || !repo) {
|
||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||
if (m?.groups) {
|
||||
owner = owner || m.groups.o;
|
||||
repo = repo || m.groups.r;
|
||||
}
|
||||
}
|
||||
|
||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||
|
||||
const defaultBranch = repoObj?.default_branch || "main";
|
||||
|
||||
const issueNumber =
|
||||
ev?.issue?.number ||
|
||||
ev?.issue?.index ||
|
||||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0) ||
|
||||
0;
|
||||
|
||||
const labelName =
|
||||
ev?.label?.name ||
|
||||
(typeof ev?.label === "string" ? ev.label : "") ||
|
||||
"";
|
||||
|
||||
const eventName =
|
||||
String(process.env.EVENT_NAME_IN || "").trim() ||
|
||||
(ev?.issue ? "issues" : (ev?.before || ev?.after ? "push" : "workflow_dispatch"));
|
||||
|
||||
const u = new URL(cloneUrl);
|
||||
const origin = u.origin;
|
||||
|
||||
const apiBase =
|
||||
(process.env.FORGE_API && String(process.env.FORGE_API).trim())
|
||||
? String(process.env.FORGE_API).trim().replace(/\/+$/, "")
|
||||
: origin;
|
||||
|
||||
function sh(s) {
|
||||
return JSON.stringify(String(s));
|
||||
}
|
||||
|
||||
process.stdout.write([
|
||||
`CLONE_URL=${sh(cloneUrl)}`,
|
||||
`OWNER=${sh(owner)}`,
|
||||
`REPO=${sh(repo)}`,
|
||||
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
|
||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||
`LABEL_NAME=${sh(labelName)}`,
|
||||
`EVENT_NAME=${sh(eventName)}`,
|
||||
`API_BASE=${sh(apiBase)}`
|
||||
].join("\n") + "\n");
|
||||
NODE
|
||||
|
||||
echo "Context:"
|
||||
sed -n '1,200p' /tmp/proposer.env
|
||||
|
||||
- name: Early gate (tolerant on empty issue label payload)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
|
||||
echo "event=$EVENT_NAME label=${LABEL_NAME:-<empty>}"
|
||||
|
||||
if [[ "$EVENT_NAME" == "issues" ]]; then
|
||||
if [[ -n "${LABEL_NAME:-}" && "$LABEL_NAME" != "state/approved" ]]; then
|
||||
echo "issues/labeled with explicit non-approved label=$LABEL_NAME -> skip"
|
||||
echo 'SKIP=1' >> /tmp/proposer.env
|
||||
echo 'SKIP_REASON="label_not_state_approved_event"' >> /tmp/proposer.env
|
||||
exit 0
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "Proceed to API-based selection/gating"
|
||||
|
||||
- name: Checkout default branch
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
|
||||
|
||||
rm -rf .git
|
||||
git init -q
|
||||
git remote add origin "$CLONE_URL"
|
||||
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||
git log -1 --oneline
|
||||
|
||||
- name: Detect app dir (repo-root vs ./site)
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
|
||||
|
||||
APP_DIR="."
|
||||
if [[ -d "site" && -f "site/package.json" ]]; then
|
||||
APP_DIR="site"
|
||||
fi
|
||||
|
||||
echo "APP_DIR=$APP_DIR" >> /tmp/proposer.env
|
||||
echo "APP_DIR=$APP_DIR"
|
||||
|
||||
test -f "$APP_DIR/package.json" || {
|
||||
echo "package.json missing in APP_DIR=$APP_DIR"
|
||||
exit 1
|
||||
}
|
||||
|
||||
test -d "$APP_DIR/scripts" || {
|
||||
echo "scripts/ missing in APP_DIR=$APP_DIR"
|
||||
exit 1
|
||||
}
|
||||
|
||||
- name: Select next proposer batch (by path)
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || {
|
||||
echo "Missing secret FORGE_TOKEN"
|
||||
exit 1
|
||||
}
|
||||
|
||||
export GITEA_OWNER="$OWNER"
|
||||
export GITEA_REPO="$REPO"
|
||||
export FORGE_API="$API_BASE"
|
||||
|
||||
cd "$APP_DIR"
|
||||
|
||||
test -f scripts/pick-proposer-issue.mjs || {
|
||||
echo "missing scripts/pick-proposer-issue.mjs in APP_DIR=$APP_DIR"
|
||||
ls -la scripts | sed -n '1,200p' || true
|
||||
exit 1
|
||||
}
|
||||
|
||||
node scripts/pick-proposer-issue.mjs "${ISSUE_NUMBER:-0}" > /tmp/proposer.pick.env
|
||||
cat /tmp/proposer.pick.env >> /tmp/proposer.env
|
||||
source /tmp/proposer.pick.env
|
||||
|
||||
if [[ "${TARGET_FOUND:-0}" != "1" ]]; then
|
||||
echo 'SKIP=1' >> /tmp/proposer.env
|
||||
echo "SKIP_REASON=${TARGET_REASON:-no_target}" >> /tmp/proposer.env
|
||||
echo "No target batch"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Target batch:"
|
||||
grep -E '^(TARGET_PRIMARY_ISSUE|TARGET_ISSUES|TARGET_COUNT|TARGET_CHEMIN)=' /tmp/proposer.env
|
||||
|
||||
- name: Derive deterministic batch identity
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
|
||||
|
||||
export TARGET_ISSUES TARGET_CHEMIN
|
||||
|
||||
node --input-type=module - <<'NODE'
|
||||
import fs from "node:fs";
|
||||
import crypto from "node:crypto";
|
||||
|
||||
const issues = String(process.env.TARGET_ISSUES || "")
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.filter(Boolean)
|
||||
.sort((a, b) => Number(a) - Number(b));
|
||||
|
||||
const chemin = String(process.env.TARGET_CHEMIN || "").trim();
|
||||
const keySource = `${chemin}::${issues.join(",")}`;
|
||||
const hash = crypto.createHash("sha1").update(keySource).digest("hex").slice(0, 12);
|
||||
const primary = issues[0] || "0";
|
||||
const batchBranch = `bot/proposer-${primary}-${hash}`;
|
||||
|
||||
fs.appendFileSync(
|
||||
"/tmp/proposer.env",
|
||||
[
|
||||
`BATCH_KEY=${JSON.stringify(keySource)}`,
|
||||
`BATCH_HASH=${JSON.stringify(hash)}`,
|
||||
`BATCH_BRANCH=${JSON.stringify(batchBranch)}`
|
||||
].join("\n") + "\n"
|
||||
);
|
||||
NODE
|
||||
|
||||
echo "Batch identity:"
|
||||
grep -E '^(BATCH_KEY|BATCH_HASH|BATCH_BRANCH)=' /tmp/proposer.env
|
||||
|
||||
- name: Inspect open proposer PRs
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
|
||||
|
||||
curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls?state=open&limit=100" \
|
||||
-o /tmp/open_pulls.json
|
||||
|
||||
export TARGET_ISSUES="${TARGET_ISSUES:-}"
|
||||
export BATCH_BRANCH="${BATCH_BRANCH:-}"
|
||||
export BATCH_KEY="${BATCH_KEY:-}"
|
||||
|
||||
node --input-type=module - <<'NODE' >> /tmp/proposer.env
|
||||
import fs from "node:fs";
|
||||
|
||||
const pulls = JSON.parse(fs.readFileSync("/tmp/open_pulls.json", "utf8"));
|
||||
const issues = String(process.env.TARGET_ISSUES || "")
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.filter(Boolean);
|
||||
|
||||
const batchBranch = String(process.env.BATCH_BRANCH || "");
|
||||
const batchKey = String(process.env.BATCH_KEY || "");
|
||||
|
||||
const proposerOpen = Array.isArray(pulls)
|
||||
? pulls.filter((pr) => String(pr?.head?.ref || "").startsWith("bot/proposer-"))
|
||||
: [];
|
||||
|
||||
const sameBatch = proposerOpen.find((pr) => {
|
||||
const ref = String(pr?.head?.ref || "");
|
||||
const title = String(pr?.title || "");
|
||||
const body = String(pr?.body || "");
|
||||
|
||||
if (batchBranch && ref === batchBranch) return true;
|
||||
if (batchKey && body.includes(`Batch-Key: ${batchKey}`)) return true;
|
||||
|
||||
return issues.some((n) =>
|
||||
ref.startsWith(`bot/proposer-${n}-`) ||
|
||||
title.includes(`#${n}`) ||
|
||||
body.includes(`#${n}`) ||
|
||||
body.includes(`ticket #${n}`)
|
||||
);
|
||||
});
|
||||
|
||||
const out = [];
|
||||
|
||||
if (sameBatch) {
|
||||
out.push("SKIP=1");
|
||||
out.push(`SKIP_REASON=${JSON.stringify("issue_already_has_open_pr")}`);
|
||||
out.push(`OPEN_PR_URL=${JSON.stringify(String(sameBatch.html_url || sameBatch.url || ""))}`);
|
||||
out.push(`OPEN_PR_BRANCH=${JSON.stringify(String(sameBatch?.head?.ref || ""))}`);
|
||||
} else if (proposerOpen.length > 0) {
|
||||
const first = proposerOpen[0];
|
||||
out.push("SKIP=1");
|
||||
out.push(`SKIP_REASON=${JSON.stringify("queue_busy_open_proposer_pr")}`);
|
||||
out.push(`OPEN_PR_URL=${JSON.stringify(String(first.html_url || first.url || ""))}`);
|
||||
out.push(`OPEN_PR_BRANCH=${JSON.stringify(String(first?.head?.ref || ""))}`);
|
||||
}
|
||||
|
||||
process.stdout.write(out.join("\n") + (out.length ? "\n" : ""));
|
||||
NODE
|
||||
|
||||
- name: Guard on remote batch branch before heavy work
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
|
||||
|
||||
if git ls-remote --exit-code --heads origin "$BATCH_BRANCH" >/dev/null 2>&1; then
|
||||
echo 'SKIP=1' >> /tmp/proposer.env
|
||||
echo 'SKIP_REASON="batch_branch_exists_without_pr"' >> /tmp/proposer.env
|
||||
echo "OPEN_PR_BRANCH=${BATCH_BRANCH}" >> /tmp/proposer.env
|
||||
echo "Remote batch branch already exists -> skip duplicate materialization"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Remote batch branch is free"
|
||||
|
||||
- name: Comment issue if queued / skipped
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
|
||||
[[ "${SKIP:-0}" == "1" ]] || exit 0
|
||||
[[ "${EVENT_NAME:-}" != "push" ]] || exit 0
|
||||
|
||||
if [[ "${SKIP_REASON:-}" == "label_not_state_approved_event" || "${SKIP_REASON:-}" == "label_not_state_approved" ]]; then
|
||||
echo "Skip reason=${SKIP_REASON} -> no comment"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || exit 0
|
||||
|
||||
ISSUE_TO_COMMENT="${ISSUE_NUMBER:-0}"
|
||||
if [[ "$ISSUE_TO_COMMENT" == "0" || -z "$ISSUE_TO_COMMENT" ]]; then
|
||||
ISSUE_TO_COMMENT="${TARGET_PRIMARY_ISSUE:-0}"
|
||||
fi
|
||||
[[ "$ISSUE_TO_COMMENT" != "0" ]] || exit 0
|
||||
|
||||
case "${SKIP_REASON:-}" in
|
||||
queue_busy_open_proposer_pr)
|
||||
MSG="Ticket queued in proposer queue. An open proposer PR already exists: ${OPEN_PR_URL:-"(URL unavailable)"}. The workflow will resume after merge on main."
|
||||
;;
|
||||
issue_already_has_open_pr)
|
||||
MSG="This batch already has an open proposer PR: ${OPEN_PR_URL:-"(URL unavailable)"}"
|
||||
;;
|
||||
batch_branch_exists_without_pr)
|
||||
MSG="This batch already has a remote batch branch (${OPEN_PR_BRANCH:-"(unknown branch)"}). Manual inspection is required before any new proposer PR is created."
|
||||
;;
|
||||
batch_branch_already_materialized)
|
||||
MSG="This batch was already materialized by another run on branch ${OPEN_PR_BRANCH:-"(unknown branch)"}. No duplicate PR was created."
|
||||
;;
|
||||
explicit_issue_missing_chemin)
|
||||
MSG="Proposer Apply: cannot process this ticket automatically because field Chemin is missing or unreadable."
|
||||
;;
|
||||
explicit_issue_missing_type)
|
||||
MSG="Proposer Apply: cannot process this ticket automatically because field Type is missing or unreadable."
|
||||
;;
|
||||
explicit_issue_not_approved)
|
||||
MSG="Proposer Apply: this ticket is not currently labeled state/approved."
|
||||
;;
|
||||
explicit_issue_rejected)
|
||||
MSG="Proposer Apply: this ticket has state/rejected and is not eligible for the proposer queue."
|
||||
;;
|
||||
no_open_approved_proposer_issue)
|
||||
MSG="No approved proposer ticket is currently waiting."
|
||||
;;
|
||||
*)
|
||||
MSG="Proposer Apply: skip - ${SKIP_REASON:-unspecified reason}."
|
||||
;;
|
||||
esac
|
||||
|
||||
export MSG
|
||||
node --input-type=module - <<'NODE' > /tmp/proposer.skip.comment.json
|
||||
const msg = process.env.MSG || "";
|
||||
process.stdout.write(JSON.stringify({ body: msg }));
|
||||
NODE
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_TO_COMMENT/comments" \
|
||||
--data-binary @/tmp/proposer.skip.comment.json || true
|
||||
|
||||
- name: NPM harden
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
cd "$APP_DIR"
|
||||
npm config set fetch-retries 5
|
||||
npm config set fetch-retry-mintimeout 20000
|
||||
npm config set fetch-retry-maxtimeout 120000
|
||||
npm config set registry https://registry.npmjs.org
|
||||
|
||||
- name: Install deps
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
cd "$APP_DIR"
|
||||
npm ci --no-audit --no-fund
|
||||
|
||||
- name: Build dist baseline
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
cd "$APP_DIR"
|
||||
npm run build
|
||||
|
||||
- name: Apply proposer batch on bot branch
|
||||
continue-on-error: true
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
|
||||
BOT_GIT_EMAIL: ${{ secrets.BOT_GIT_EMAIL }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env
|
||||
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
|
||||
|
||||
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
|
||||
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
|
||||
|
||||
START_SHA="$(git rev-parse HEAD)"
|
||||
BR="$BATCH_BRANCH"
|
||||
echo "BRANCH=$BR" >> /tmp/proposer.env
|
||||
git checkout -b "$BR"
|
||||
|
||||
export GITEA_OWNER="$OWNER"
|
||||
export GITEA_REPO="$REPO"
|
||||
export FORGE_API="$API_BASE"
|
||||
|
||||
LOG="/tmp/proposer-apply.log"
|
||||
: > "$LOG"
|
||||
|
||||
RC=0
|
||||
FAILED_ISSUE=""
|
||||
|
||||
for ISSUE in $TARGET_ISSUES; do
|
||||
echo "" >> "$LOG"
|
||||
echo "== ticket #$ISSUE ==" >> "$LOG"
|
||||
|
||||
set +e
|
||||
(cd "$APP_DIR" && node scripts/apply-ticket.mjs "$ISSUE" --alias --commit) >> "$LOG" 2>&1
|
||||
STEP_RC=$?
|
||||
set -e
|
||||
|
||||
if [[ "$STEP_RC" -ne 0 ]]; then
|
||||
RC="$STEP_RC"
|
||||
FAILED_ISSUE="$ISSUE"
|
||||
break
|
||||
fi
|
||||
done
|
||||
|
||||
echo "APPLY_RC=$RC" >> /tmp/proposer.env
|
||||
echo "FAILED_ISSUE=${FAILED_ISSUE}" >> /tmp/proposer.env
|
||||
|
||||
echo "Apply log (tail):"
|
||||
tail -n 220 "$LOG" || true
|
||||
|
||||
END_SHA="$(git rev-parse HEAD)"
|
||||
|
||||
if [[ "$RC" -ne 0 ]]; then
|
||||
echo "NOOP=0" >> /tmp/proposer.env
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "$START_SHA" == "$END_SHA" ]]; then
|
||||
echo "NOOP=1" >> /tmp/proposer.env
|
||||
else
|
||||
echo "NOOP=0" >> /tmp/proposer.env
|
||||
echo "END_SHA=$END_SHA" >> /tmp/proposer.env
|
||||
fi
|
||||
|
||||
- name: Rebase bot branch on latest main
|
||||
continue-on-error: true
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||
[[ "${NOOP:-0}" == "0" ]] || exit 0
|
||||
|
||||
LOG="/tmp/proposer-apply.log"
|
||||
|
||||
git fetch origin "$DEFAULT_BRANCH"
|
||||
|
||||
set +e
|
||||
git rebase "origin/$DEFAULT_BRANCH" >> "$LOG" 2>&1
|
||||
RC=$?
|
||||
set -e
|
||||
|
||||
if [[ "$RC" -ne 0 ]]; then
|
||||
git rebase --abort || true
|
||||
fi
|
||||
|
||||
echo "REBASE_RC=$RC" >> /tmp/proposer.env
|
||||
|
||||
echo "Rebase log (tail):"
|
||||
tail -n 220 "$LOG" || true
|
||||
|
||||
- name: Comment issues on failure
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
APPLY_RC="${APPLY_RC:-0}"
|
||||
REBASE_RC="${REBASE_RC:-0}"
|
||||
|
||||
if [[ "$APPLY_RC" == "0" && "$REBASE_RC" == "0" ]]; then
|
||||
echo "No failure detected"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || exit 0
|
||||
|
||||
if [[ -f /tmp/proposer-apply.log ]]; then
|
||||
BODY="$(tail -n 160 /tmp/proposer-apply.log | sed 's/\r$//')"
|
||||
else
|
||||
BODY="(no proposer log found)"
|
||||
fi
|
||||
|
||||
export BODY APPLY_RC REBASE_RC FAILED_ISSUE
|
||||
|
||||
if [[ "$APPLY_RC" != "0" ]]; then
|
||||
export FAILURE_KIND="apply"
|
||||
else
|
||||
export FAILURE_KIND="rebase"
|
||||
fi
|
||||
|
||||
node --input-type=module - <<'NODE' > /tmp/proposer.failure.comment.json
|
||||
const body = process.env.BODY || "";
|
||||
const applyRc = process.env.APPLY_RC || "0";
|
||||
const rebaseRc = process.env.REBASE_RC || "0";
|
||||
const failedIssue = process.env.FAILED_ISSUE || "unknown";
|
||||
const kind = process.env.FAILURE_KIND || "apply";
|
||||
|
||||
const msg =
|
||||
kind === "apply"
|
||||
? `Batch proposer failed on ticket #${failedIssue} (rc=${applyRc}).\n\n\`\`\`\n${body}\n\`\`\`\n`
|
||||
: `Rebase proposer failed on main (rc=${rebaseRc}).\n\n\`\`\`\n${body}\n\`\`\`\n`;
|
||||
|
||||
process.stdout.write(JSON.stringify({ body: msg }));
|
||||
NODE
|
||||
|
||||
for ISSUE in ${TARGET_ISSUES:-}; do
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE/comments" \
|
||||
--data-binary @/tmp/proposer.failure.comment.json || true
|
||||
done
|
||||
|
||||
- name: Late guard against duplicate batch materialization
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||
[[ "${REBASE_RC:-0}" == "0" ]] || exit 0
|
||||
[[ "${NOOP:-0}" == "0" ]] || exit 0
|
||||
|
||||
REMOTE_SHA="$(git ls-remote --heads origin "$BATCH_BRANCH" | awk 'NR==1 {print $1}')"
|
||||
|
||||
if [[ -n "${REMOTE_SHA:-}" && "${REMOTE_SHA}" != "${END_SHA:-}" ]]; then
|
||||
echo 'SKIP=1' >> /tmp/proposer.env
|
||||
echo 'SKIP_REASON="batch_branch_already_materialized"' >> /tmp/proposer.env
|
||||
echo "OPEN_PR_BRANCH=${BATCH_BRANCH}" >> /tmp/proposer.env
|
||||
echo "Remote batch branch already exists at $REMOTE_SHA -> skip duplicate push/PR"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
echo "Late guard OK"
|
||||
|
||||
- name: Push bot branch
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "Apply failed -> skip push"; exit 0; }
|
||||
[[ "${REBASE_RC:-0}" == "0" ]] || { echo "Rebase failed -> skip push"; exit 0; }
|
||||
[[ "${NOOP:-0}" == "0" ]] || { echo "No-op -> skip push"; exit 0; }
|
||||
[[ -n "${BRANCH:-}" ]] || { echo "BRANCH unset -> skip push"; exit 0; }
|
||||
|
||||
AUTH_URL="$(node --input-type=module -e '
|
||||
const [clone, tok] = process.argv.slice(1);
|
||||
const u = new URL(clone);
|
||||
u.username = "oauth2";
|
||||
u.password = tok;
|
||||
console.log(u.toString());
|
||||
' "$CLONE_URL" "$FORGE_TOKEN")"
|
||||
|
||||
git remote set-url origin "$AUTH_URL"
|
||||
git push -u origin "$BRANCH"
|
||||
|
||||
- name: Create PR + comment issues + close issues
|
||||
if: ${{ always() }}
|
||||
env:
|
||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||
[[ "${REBASE_RC:-0}" == "0" ]] || exit 0
|
||||
[[ "${NOOP:-0}" == "0" ]] || exit 0
|
||||
[[ -n "${BRANCH:-}" ]] || { echo "BRANCH unset -> skip PR"; exit 0; }
|
||||
|
||||
test -n "${FORGE_TOKEN:-}" || { echo "Missing FORGE_TOKEN"; exit 1; }
|
||||
|
||||
OPEN_PRS_JSON="$(curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls?state=open&limit=100")"
|
||||
|
||||
export OPEN_PRS_JSON BATCH_BRANCH BATCH_KEY
|
||||
|
||||
EXISTING_PR_URL="$(node --input-type=module -e '
|
||||
const pulls = JSON.parse(process.env.OPEN_PRS_JSON || "[]");
|
||||
const branch = String(process.env.BATCH_BRANCH || "");
|
||||
const key = String(process.env.BATCH_KEY || "");
|
||||
const current = Array.isArray(pulls)
|
||||
? pulls.find((pr) => {
|
||||
const ref = String(pr?.head?.ref || "");
|
||||
const body = String(pr?.body || "");
|
||||
return (branch && ref === branch) || (key && body.includes(`Batch-Key: ${key}`));
|
||||
})
|
||||
: null;
|
||||
process.stdout.write(current ? String(current.html_url || current.url || "") : "");
|
||||
')"
|
||||
|
||||
if [[ -n "${EXISTING_PR_URL:-}" ]]; then
|
||||
echo "PR already exists for this batch: $EXISTING_PR_URL"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ "${TARGET_COUNT:-0}" == "1" ]]; then
|
||||
PR_TITLE="proposer: apply ticket #${TARGET_PRIMARY_ISSUE}"
|
||||
else
|
||||
PR_TITLE="proposer: apply ${TARGET_COUNT} tickets on ${TARGET_CHEMIN}"
|
||||
fi
|
||||
|
||||
export PR_TITLE TARGET_CHEMIN TARGET_ISSUES BRANCH END_SHA DEFAULT_BRANCH OWNER BATCH_KEY
|
||||
|
||||
node --input-type=module -e '
|
||||
import fs from "node:fs";
|
||||
|
||||
const issues = String(process.env.TARGET_ISSUES || "")
|
||||
.trim()
|
||||
.split(/\s+/)
|
||||
.filter(Boolean);
|
||||
|
||||
const body = [
|
||||
`PR auto depuis ticket${issues.length > 1 ? "s" : ""} ${issues.map((n) => `#${n}`).join(", ")} (state/approved).`,
|
||||
"",
|
||||
`- Chemin: ${process.env.TARGET_CHEMIN || "(inconnu)"}`,
|
||||
"- Tickets:",
|
||||
...issues.map((n) => ` - #${n}`),
|
||||
`- Branche: ${process.env.BRANCH || ""}`,
|
||||
`- Commit: ${process.env.END_SHA || "unknown"}`,
|
||||
`- Batch-Key: ${process.env.BATCH_KEY || ""}`,
|
||||
"",
|
||||
"Merge si CI OK."
|
||||
].join("\n");
|
||||
|
||||
fs.writeFileSync(
|
||||
"/tmp/proposer.pr.json",
|
||||
JSON.stringify({
|
||||
title: process.env.PR_TITLE || "proposer: apply tickets",
|
||||
body,
|
||||
base: process.env.DEFAULT_BRANCH || "main",
|
||||
head: `${process.env.OWNER}:${process.env.BRANCH}`,
|
||||
allow_maintainer_edit: true
|
||||
})
|
||||
);
|
||||
'
|
||||
|
||||
PR_JSON="$(curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls" \
|
||||
--data-binary @/tmp/proposer.pr.json)"
|
||||
|
||||
PR_URL="$(node --input-type=module -e 'const pr = JSON.parse(process.argv[1] || "{}"); console.log(pr.html_url || pr.url || "");' "$PR_JSON")"
|
||||
|
||||
test -n "$PR_URL" || {
|
||||
echo "PR URL missing. Raw: $PR_JSON"
|
||||
exit 1
|
||||
}
|
||||
|
||||
for ISSUE in $TARGET_ISSUES; do
|
||||
export ISSUE PR_URL
|
||||
|
||||
node --input-type=module -e '
|
||||
import fs from "node:fs";
|
||||
|
||||
const issue = process.env.ISSUE || "";
|
||||
const url = process.env.PR_URL || "";
|
||||
const msg =
|
||||
`PR proposer creee pour le ticket #${issue} : ${url}\n\n` +
|
||||
`Le ticket est cloture automatiquement ; la discussion peut se poursuivre dans la PR.`;
|
||||
|
||||
fs.writeFileSync(
|
||||
"/tmp/proposer.issue.close.comment.json",
|
||||
JSON.stringify({ body: msg })
|
||||
);
|
||||
'
|
||||
|
||||
curl -fsS -X POST \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE/comments" \
|
||||
--data-binary @/tmp/proposer.issue.close.comment.json
|
||||
|
||||
curl -fsS -X PATCH \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Content-Type: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE" \
|
||||
--data-binary '{"state":"closed"}'
|
||||
|
||||
ISSUE_STATE="$(curl -fsS \
|
||||
-H "Authorization: token $FORGE_TOKEN" \
|
||||
-H "Accept: application/json" \
|
||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE" | \
|
||||
node --input-type=module -e 'let s=""; process.stdin.on("data", d => s += d); process.stdin.on("end", () => { const j = JSON.parse(s || "{}"); process.stdout.write(String(j.state || "")); });')"
|
||||
|
||||
[[ "$ISSUE_STATE" == "closed" ]] || {
|
||||
echo "Issue #$ISSUE is still not closed after PATCH"
|
||||
exit 1
|
||||
}
|
||||
done
|
||||
|
||||
echo "PR: $PR_URL"
|
||||
|
||||
- name: Finalize
|
||||
if: ${{ always() }}
|
||||
run: |
|
||||
set -euo pipefail
|
||||
source /tmp/proposer.env || true
|
||||
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||
|
||||
if [[ "${APPLY_RC:-0}" != "0" ]]; then
|
||||
echo "Apply failed (rc=${APPLY_RC})"
|
||||
exit "${APPLY_RC}"
|
||||
fi
|
||||
|
||||
if [[ "${REBASE_RC:-0}" != "0" ]]; then
|
||||
echo "Rebase failed (rc=${REBASE_RC})"
|
||||
exit "${REBASE_RC}"
|
||||
fi
|
||||
|
||||
echo "Proposer queue OK"
|
||||
@@ -3,7 +3,7 @@ on: [push, workflow_dispatch]
|
||||
|
||||
jobs:
|
||||
smoke:
|
||||
runs-on: ubuntu-latest
|
||||
runs-on: mac-ci
|
||||
steps:
|
||||
- run: node -v && npm -v
|
||||
- run: echo "runner OK"
|
||||
|
||||
11
.gitignore
vendored
@@ -3,6 +3,10 @@
|
||||
.env.*
|
||||
!.env.example
|
||||
|
||||
# dev-only
|
||||
public/_auth/whoami
|
||||
public/_auth/whoami/*
|
||||
|
||||
# --- local backups ---
|
||||
*.bak
|
||||
*.bak.*
|
||||
@@ -21,3 +25,10 @@ dist/
|
||||
# local backups
|
||||
Dockerfile.bak.*
|
||||
public/favicon_io.zip
|
||||
|
||||
# macOS
|
||||
.DS_Store
|
||||
|
||||
# local temp workspace
|
||||
.tmp/
|
||||
public/__ops/health.json
|
||||
|
||||
18
Dockerfile
@@ -12,7 +12,7 @@ ENV npm_config_update_notifier=false \
|
||||
# (Optionnel mais propre) git + certificats
|
||||
RUN apt-get -o Acquire::Retries=5 -o Acquire::ForceIPv4=true update \
|
||||
&& apt-get install -y --no-install-recommends ca-certificates git \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Déps d’abord (cache Docker)
|
||||
COPY package.json package-lock.json ./
|
||||
@@ -25,9 +25,21 @@ COPY . .
|
||||
ARG PUBLIC_GITEA_BASE
|
||||
ARG PUBLIC_GITEA_OWNER
|
||||
ARG PUBLIC_GITEA_REPO
|
||||
|
||||
# ✅ Canonical + sitemap base (astro.config.mjs lit process.env.PUBLIC_SITE)
|
||||
ARG PUBLIC_SITE
|
||||
|
||||
# ✅ Garde-fou : si 1 → build fail si PUBLIC_SITE absent
|
||||
ARG REQUIRE_PUBLIC_SITE=0
|
||||
|
||||
ENV PUBLIC_GITEA_BASE=$PUBLIC_GITEA_BASE \
|
||||
PUBLIC_GITEA_OWNER=$PUBLIC_GITEA_OWNER \
|
||||
PUBLIC_GITEA_REPO=$PUBLIC_GITEA_REPO
|
||||
PUBLIC_GITEA_REPO=$PUBLIC_GITEA_REPO \
|
||||
PUBLIC_SITE=$PUBLIC_SITE \
|
||||
REQUIRE_PUBLIC_SITE=$REQUIRE_PUBLIC_SITE
|
||||
|
||||
# ✅ antifragile : refuse de builder sans PUBLIC_SITE quand on l’exige
|
||||
RUN node -e "if (process.env.REQUIRE_PUBLIC_SITE==='1' && !process.env.PUBLIC_SITE) { console.error('FATAL: PUBLIC_SITE is required (canonical/sitemap).'); process.exit(1) }"
|
||||
|
||||
# Build Astro (postbuild tourne via npm scripts)
|
||||
RUN npm run build
|
||||
@@ -38,4 +50,4 @@ COPY nginx.conf /etc/nginx/conf.d/default.conf
|
||||
COPY --from=build /app/dist/ /usr/share/nginx/html/
|
||||
RUN find /usr/share/nginx/html -type d -exec chmod 755 {} \; \
|
||||
&& find /usr/share/nginx/html -type f -exec chmod 644 {} \;
|
||||
EXPOSE 80
|
||||
EXPOSE 80
|
||||
@@ -86,6 +86,10 @@ function rehypeDedupeIds() {
|
||||
}
|
||||
|
||||
export default defineConfig({
|
||||
legacy: {
|
||||
collectionsBackwardsCompat: true,
|
||||
},
|
||||
|
||||
output: "static",
|
||||
trailingSlash: "always",
|
||||
site: process.env.PUBLIC_SITE ?? "http://localhost:4321",
|
||||
|
||||
14
config/anchor-churn-allowlist.json
Normal file
@@ -0,0 +1,14 @@
|
||||
{
|
||||
"accepted_resets": {
|
||||
"archicrat-ia/prologue/index.html": "Reset intentionnel des ancres après réimport DOCX et révision substantielle du prologue depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
|
||||
"archicrat-ia/chapitre-1/index.html": "Reset intentionnel des ancres après révision doctrinale substantielle du chapitre 1. Site neuf, sans annotations ni compatibilité descendante à préserver.",
|
||||
"archicrat-ia/chapitre-2/index.html": "Reset intentionnel des ancres après restauration doctrinale substantielle du chapitre 2 depuis la bonne source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
|
||||
"archicrat-ia/chapitre-3/index.html": "Reset intentionnel des ancres après réimport DOCX et perfectionnement doctrinal substantiel du chapitre 3 depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
|
||||
"archicrat-ia/chapitre-4/index.html": "Reset intentionnel des ancres après réimport DOCX et stabilisation doctrinale substantielle du chapitre 4 depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
|
||||
"archicrat-ia/chapitre-5/index.html": "Reset intentionnel des ancres après réimport DOCX et stabilisation doctrinale substantielle du chapitre 5 depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
|
||||
"archicrat-ia/conclusion/index.html": "Reset intentionnel des ancres après réimport DOCX et révision substantielle de la conclusion depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver."
|
||||
},
|
||||
"accepted_prefixes": {
|
||||
"glossaire/": "Reset intentionnel des ancres après révision substantielle des fiches paradigmes et doctrines du glossaire. Site neuf, sans annotations ni compatibilité descendante à préserver."
|
||||
}
|
||||
}
|
||||
@@ -5,6 +5,8 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
network: host
|
||||
args:
|
||||
REQUIRE_PUBLIC_SITE: "1"
|
||||
PUBLIC_SITE: "https://staging.archicratie.trans-hands.synology.me"
|
||||
PUBLIC_GITEA_BASE: ${PUBLIC_GITEA_BASE}
|
||||
PUBLIC_GITEA_OWNER: ${PUBLIC_GITEA_OWNER}
|
||||
PUBLIC_GITEA_REPO: ${PUBLIC_GITEA_REPO}
|
||||
@@ -20,6 +22,8 @@ services:
|
||||
dockerfile: Dockerfile
|
||||
network: host
|
||||
args:
|
||||
REQUIRE_PUBLIC_SITE: "1"
|
||||
PUBLIC_SITE: "https://archicratie.trans-hands.synology.me"
|
||||
PUBLIC_GITEA_BASE: ${PUBLIC_GITEA_BASE}
|
||||
PUBLIC_GITEA_OWNER: ${PUBLIC_GITEA_OWNER}
|
||||
PUBLIC_GITEA_REPO: ${PUBLIC_GITEA_REPO}
|
||||
@@ -27,4 +31,4 @@ services:
|
||||
container_name: archicratie-web-green
|
||||
ports:
|
||||
- "127.0.0.1:8082:80"
|
||||
restart: unless-stopped
|
||||
restart: unless-stopped
|
||||
327
docs/EDITORIAL-ANNOTATIONS-SPEC.md
Normal file
@@ -0,0 +1,327 @@
|
||||
# SPEC — Annotations éditoriales (YAML v1) + merge + anti-doublon
|
||||
> Objectif : permettre aux tickets (Gitea) de déposer “Références / Médias / Commentaires” dans `src/annotations/**`,
|
||||
> de façon univoque, stable, et sans régression.
|
||||
|
||||
## 0) Contexte et intention
|
||||
Le site est statique. L’édition collaborative se fait via :
|
||||
- un mode “proposition” (UI / modal)
|
||||
- un ticket Gitea (issue) standardisé
|
||||
- un script d’application côté éditeur (`apply-ticket.mjs` ou équivalent)
|
||||
- génération d’un YAML d’annotations versionné dans Git
|
||||
|
||||
La donnée d’annotation doit être :
|
||||
- **audit-able** (Git)
|
||||
- **merge-able** (sans tout casser)
|
||||
- **stable** (IDs paragraphes / liens / médias)
|
||||
- **scalable** (éviter YAML monstrueux à long terme)
|
||||
|
||||
## 1) Arborescence canonique
|
||||
### 1.1 Un workKey par “ouvrage / section du site”
|
||||
On veut une univocité entre :
|
||||
- SiteNav (Méthode, Essai-thèse, Traité, Cas IA, Glossaire, Atlas)
|
||||
et
|
||||
- l’arborescence annotations
|
||||
|
||||
Proposition canonique (workKey = route racine) :
|
||||
- `methode`
|
||||
- `archicrat-ia` (Essai-thèse ArchiCraT-IA)
|
||||
- `traite`
|
||||
- `ia`
|
||||
- `glossaire`
|
||||
- `atlas`
|
||||
|
||||
### 1.2 Règle de stockage “v1”
|
||||
**Par page**, un YAML unique :
|
||||
|
||||
src/annotations/<workKey>/<slugSansWorkKey>.yml
|
||||
|
||||
Exemples :
|
||||
- Page : `/archicrat-ia/prologue/`
|
||||
- slug content = `archicrat-ia/prologue`
|
||||
- fichier : `src/annotations/archicrat-ia/prologue.yml`
|
||||
|
||||
- Page : `/traite/00-demarrage/`
|
||||
- fichier : `src/annotations/traite/00-demarrage.yml`
|
||||
|
||||
> Note : “slugSansWorkKey” = la partie après `<workKey>/`.
|
||||
> S’il y a des sous-dossiers (chapitres), le chemin reflète la structure : `chapitre-1/section-a.yml` si on choisit du sharding.
|
||||
|
||||
## 2) Question “gros YAML” : page unique vs sharding par paragraphe
|
||||
### 2.1 Option A (v1 recommandée) : 1 YAML par page
|
||||
Avantages :
|
||||
- simple
|
||||
- peu de fichiers
|
||||
- diff lisible si volume modéré
|
||||
- cohérent avec un modèle “annotations par page”
|
||||
|
||||
Inconvénients :
|
||||
- YAML peut grossir si milliers d’annotations
|
||||
|
||||
### 2.2 Option B (v2 future) : sharding par paragraphe
|
||||
|
||||
src/annotations/<workKey>/<slugSansWorkKey>/<paraId>.yml
|
||||
|
||||
Avantages :
|
||||
- fichiers petits
|
||||
- merges moins conflictuels
|
||||
Inconvénients :
|
||||
- plus de fichiers
|
||||
- tooling plus complexe (indexation + merge multi-fichiers)
|
||||
|
||||
### 2.3 Recommandation de mission (sans casser l’existant)
|
||||
- On démarre en **Option A**.
|
||||
- On se garde une migration future (v2) quand le volume réel le justifie.
|
||||
- On impose dès v1 : **clé unique + merge déterministe + anti-doublon**, ce qui rend la migration future possible.
|
||||
|
||||
## 3) Format YAML v1 (schéma complet)
|
||||
### 3.1 Top-level
|
||||
en yaml :
|
||||
|
||||
schema: 1
|
||||
|
||||
# Optionnel mais recommandé (doit matcher la page)
|
||||
page: "<workKey>/<slugSansWorkKey>"
|
||||
|
||||
meta:
|
||||
title: "Titre de la page (optionnel)"
|
||||
updatedAt: "2026-02-21T12:34:56Z" # ISO8601
|
||||
updatedBy: "username" # compte editor
|
||||
source:
|
||||
kind: "ticket"
|
||||
id: 123
|
||||
url: "https://gitea.../issues/123"
|
||||
|
||||
paras:
|
||||
"<paraId>":
|
||||
references: []
|
||||
media: []
|
||||
comments: []
|
||||
|
||||
### 3.2 paras : clé = paraId (ex: p-0-d7974f88)
|
||||
|
||||
Chaque paragraphe peut porter 3 types d’éléments :
|
||||
|
||||
references
|
||||
|
||||
media
|
||||
|
||||
comments
|
||||
|
||||
Règle : si une section est vide, elle peut être [] ou absente.
|
||||
Mais pour simplifier les merges, on recommande de garder la forme canonique avec [].
|
||||
|
||||
## 4) Formats des items + clés uniques
|
||||
### 4.1 References
|
||||
#### 4.1.1 Format
|
||||
|
||||
references:
|
||||
- id: "ref:doi:10.1234/abcd.efgh" # clé stable (voir 4.1.2)
|
||||
kind: "doi" # doi | url | isbn | arxiv | hal | other
|
||||
label: "Titre court"
|
||||
target: "https://doi.org/10.1234/abcd.efgh"
|
||||
note: "Pourquoi c’est pertinent (optionnel)"
|
||||
addedAt: "2026-02-21T12:34:56Z"
|
||||
addedBy: "username"
|
||||
|
||||
#### 4.1.2 Règle de clé unique (anti-doublon)
|
||||
|
||||
id doit être stable et déterministe :
|
||||
|
||||
doi → ref:doi:<doi>
|
||||
|
||||
isbn → ref:isbn:<isbn>
|
||||
|
||||
url → ref:url:<normalizedUrl>
|
||||
|
||||
Normalisation URL (v1) : au minimum
|
||||
|
||||
trim
|
||||
|
||||
lowercase scheme/host
|
||||
|
||||
retirer trailing slash si non significatif
|
||||
|
||||
conserver query si importante
|
||||
|
||||
#### 4.1.3 Merge / précédence
|
||||
|
||||
Quand on merge deux listes references :
|
||||
|
||||
union par id (clé unique)
|
||||
|
||||
si même id existe des deux côtés :
|
||||
|
||||
conserver kind/target de l’item le plus “riche” (target non vide gagne)
|
||||
|
||||
concat/merge note :
|
||||
|
||||
si notes différentes : garder les deux en les séparant (ex: noteA + "\n---\n" + noteB)
|
||||
|
||||
addedAt : conserver le plus ancien
|
||||
|
||||
addedBy : conserver le premier (ou liste si on veut, mais v1 simple : first)
|
||||
|
||||
### 4.2 Media
|
||||
#### 4.2.1 Format
|
||||
|
||||
media:
|
||||
- id: "media:image:sha256:abcd..." # clé stable (voir 4.2.2)
|
||||
type: "image" # image | video | audio | file
|
||||
src: "/public/media/<workKey>/<slugSansWorkKey>/<paraId>/<filename>"
|
||||
caption: "Légende (optionnel)"
|
||||
credit: "Auteur/source (optionnel)"
|
||||
license: "CC-BY (optionnel)"
|
||||
addedAt: "2026-02-21T12:34:56Z"
|
||||
addedBy: "username"
|
||||
|
||||
#### 4.2.2 Règle de clé unique
|
||||
|
||||
id déterministe :
|
||||
|
||||
idéal : hash du fichier (sha256)
|
||||
|
||||
sinon : hash de type + src
|
||||
|
||||
v1 (si on ne calcule pas de hash fichier) :
|
||||
|
||||
media:<type>:<src>
|
||||
|
||||
#### 4.2.3 Merge / précédence
|
||||
|
||||
union par id
|
||||
|
||||
si collision :
|
||||
|
||||
garder src identique (sinon c’est un bug)
|
||||
|
||||
fusionner caption/credit/license selon “non vide gagne”
|
||||
|
||||
addedAt : plus ancien
|
||||
|
||||
### 4.3 Comments
|
||||
#### 4.3.1 Format
|
||||
|
||||
comments:
|
||||
- id: "cmt:20260221T123456Z:username:0001"
|
||||
kind: "comment" # comment | question | objection | todo | validation
|
||||
text: "Texte du commentaire"
|
||||
status: "open" # open | resolved
|
||||
addedAt: "2026-02-21T12:34:56Z"
|
||||
addedBy: "username"
|
||||
source:
|
||||
kind: "ticket"
|
||||
id: 123
|
||||
|
||||
#### 4.3.2 Clé unique
|
||||
|
||||
Les commentaires sont “append-only” → id peut être générée (timestamp + user + compteur)
|
||||
|
||||
Anti-doublon : si on ré-applique un ticket, on refuse de dupliquer un id existant.
|
||||
|
||||
#### 4.3.3 Merge / précédence
|
||||
|
||||
union par id
|
||||
|
||||
collisions rares, mais si elles arrivent :
|
||||
|
||||
si textes différents → garder les deux (on renomme l’id du second)
|
||||
|
||||
## 5) Règles globales de merge (résumé)
|
||||
|
||||
Quand on applique un ticket sur un YAML existant :
|
||||
|
||||
vérifier schema == 1
|
||||
|
||||
vérifier page si présent :
|
||||
|
||||
doit matcher <workKey>/<slugSansWorkKey>
|
||||
|
||||
paras :
|
||||
|
||||
créer paras[paraId] si absent
|
||||
|
||||
pour chaque liste (references/media/comments) :
|
||||
|
||||
merge par id (anti-doublon)
|
||||
|
||||
appliquer règles de précédence (non vide gagne / concat note / append-only comments)
|
||||
|
||||
## 6) Table de correspondance “UI ticket → YAML”
|
||||
|
||||
Cette table permet à un successeur IA d’implémenter apply-ticket.mjs sans ambiguïté.
|
||||
|
||||
### 6.1 Champs UI minimaux
|
||||
|
||||
workKey (sélection implicite via page)
|
||||
|
||||
pagePath (ex: /archicrat-ia/prologue/)
|
||||
|
||||
pageSlug (ex: archicrat-ia/prologue)
|
||||
|
||||
paraId (ex: p-0-d7974f88)
|
||||
|
||||
kind :
|
||||
|
||||
reference
|
||||
|
||||
media
|
||||
|
||||
comment
|
||||
|
||||
### 6.2 Mapping exact
|
||||
|
||||
| UI kind | UI champs | YAML cible |
|
||||
| --------- | ----------------------------------------------------------- | ---------------------------- |
|
||||
| reference | kind(doi/url/isbn), target, label, note | `paras[paraId].references[]` |
|
||||
| media | type(image/video/audio/file), src, caption, credit, license | `paras[paraId].media[]` |
|
||||
| comment | kind(comment/question/objection/todo/validation), text | `paras[paraId].comments[]` |
|
||||
|
||||
### 6.3 Règles de génération d’ID (implémentation)
|
||||
|
||||
reference.id :
|
||||
|
||||
doi : ref:doi:${doi}
|
||||
|
||||
isbn : ref:isbn:${isbn}
|
||||
|
||||
url : ref:url:${normalize(url)}
|
||||
|
||||
media.id :
|
||||
|
||||
media:${type}:${src}
|
||||
|
||||
comment.id :
|
||||
|
||||
cmt:${timestamp}:${user}:${counter}
|
||||
|
||||
## 7) Validation YAML (sanity)
|
||||
|
||||
Avant commit (et en CI) :
|
||||
|
||||
YAML parse OK
|
||||
|
||||
schema OK
|
||||
|
||||
page si présent cohérent
|
||||
|
||||
paras est un mapping
|
||||
|
||||
paraId match pattern : ^p-\d+-[a-f0-9]{8}$ (existant)
|
||||
|
||||
src media pointe dans /public/media/... (ou /media/... si on choisit un alias, mais v1 canon : /public/media/...)
|
||||
|
||||
## 8) Notes de compatibilité
|
||||
|
||||
Les routes “Essai-thèse” ont été migrées vers /archicrat-ia/*.
|
||||
|
||||
Les anciennes routes /archicratie/archicrat-ia/* peuvent exister en legacy, mais la donnée canonique d’annotation doit suivre le workKey final (archicrat-ia).
|
||||
|
||||
## 9) Ce que l’étape 9 devra implémenter
|
||||
|
||||
pipeline : ticket → YAML (apply-ticket)
|
||||
|
||||
index : build-annotations-index + check-annotations
|
||||
|
||||
tooling : détection médias orphelins / liens cassés
|
||||
|
||||
éventuellement : migration vers sharding par paragraphe (v2) si volume réel le justifie
|
||||
152
docs/GLOSSARY-GRAPH-GOVERNANCE.md
Normal file
@@ -0,0 +1,152 @@
|
||||
# Gouvernance du graphe du glossaire
|
||||
|
||||
## Lois actuelles vérifiées automatiquement
|
||||
|
||||
- Aucune fiche sans navigation.
|
||||
- Aucun `primaryNext` mort.
|
||||
- Aucun cycle direct.
|
||||
- Aucun `primaryNext` vers soi-même.
|
||||
- Aucune famille sans defaults.
|
||||
- Aucun hub `primaryNext` au-dessus du seuil 5.
|
||||
- Les paths effectifs tiennent compte des defaults famille.
|
||||
|
||||
## Liens `primaryNext` à surveiller qualitativement
|
||||
|
||||
- `contractualisme-hobbesien → droit-naturel-et-propriete`
|
||||
- `exception-souveraine → droit-naturel-et-propriete`
|
||||
- `regime-de-co-viabilite → gouvernance-des-communs`
|
||||
- `memoire-symbolique-et-instantaneite-computationnelle → meta-regime`
|
||||
- `scene-darchicration → co-viabilite`
|
||||
|
||||
## Critère de décision
|
||||
|
||||
Un `primaryNext` est bon s’il produit au moins l’un des effets suivants :
|
||||
|
||||
- déplier la notion ;
|
||||
- changer de niveau ;
|
||||
- rendre opératoire ;
|
||||
- mettre en tension ;
|
||||
- concrétiser.
|
||||
|
||||
Un `primaryNext` est faible s’il est seulement voisin, décoratif ou encyclopédique.
|
||||
|
||||
---
|
||||
|
||||
## Dynamique réelle du graphe (niveau avancé)
|
||||
|
||||
### 1. Attracteurs
|
||||
|
||||
L’audit de convergence effective met en évidence des nœuds fortement attractifs :
|
||||
|
||||
* dispositifs méthodologiques (audit, cartographie)
|
||||
* concepts fondamentaux (co-viabilité, tension, archicration)
|
||||
|
||||
Ces nœuds structurent la circulation globale du glossaire.
|
||||
|
||||
---
|
||||
|
||||
### 2. Bassins de convergence
|
||||
|
||||
Le graphe ne se distribue pas uniformément.
|
||||
|
||||
Il tend à s’organiser en bassins :
|
||||
|
||||
* bassin conceptuel central
|
||||
* bassin méthodologique
|
||||
* bassins secondaires (paradigmes, pathologies)
|
||||
|
||||
Un bassin est défini comme un ensemble de parcours convergeant vers un même noyau.
|
||||
|
||||
---
|
||||
|
||||
### 3. Risque de surconvergence
|
||||
|
||||
Lorsque trop de parcours convergent vers les mêmes nœuds :
|
||||
|
||||
* les parcours deviennent redondants
|
||||
* la navigation perd en différenciation
|
||||
* l’expérience de lecture devient monotone
|
||||
|
||||
Ce phénomène ne constitue pas une erreur technique, mais un déséquilibre structurel.
|
||||
|
||||
---
|
||||
|
||||
### 4. Bifurcation effective
|
||||
|
||||
Les nœuds à forte bifurcation jouent un rôle clé :
|
||||
|
||||
* ils ouvrent des trajectoires multiples
|
||||
* ils structurent la diversité des parcours
|
||||
|
||||
Ils doivent être préservés comme points d’expansion.
|
||||
|
||||
---
|
||||
|
||||
### 5. Principe de régulation avancée
|
||||
|
||||
L’objectif n’est pas de supprimer les attracteurs, mais de :
|
||||
|
||||
* limiter leur domination excessive
|
||||
* maintenir plusieurs bassins actifs
|
||||
* garantir des trajectoires réellement distinctes
|
||||
|
||||
---
|
||||
|
||||
### 6. Règle pratique
|
||||
|
||||
Avant toute modification :
|
||||
|
||||
* vérifier l’impact sur les convergences effectives
|
||||
* éviter d’ajouter un lien vers un nœud déjà dominant
|
||||
* privilégier l’ouverture de nouvelles zones de circulation
|
||||
|
||||
---
|
||||
|
||||
### 7. Nature du système
|
||||
|
||||
Le glossaire ne doit pas être compris comme :
|
||||
|
||||
* un index
|
||||
* ni un arbre
|
||||
|
||||
mais comme :
|
||||
|
||||
→ un graphe dynamique de circulation conceptuelle
|
||||
|
||||
dont la structure influence directement la pensée du lecteur.
|
||||
|
||||
|
||||
|
||||
# la première désaturation a déplacé les attracteurs vers scene-depreuve, journal-de-justification, regime-de-co-viabilite
|
||||
|
||||
## Attracteurs structurels du graphe
|
||||
|
||||
Certains nœuds présentent une forte convergence effective. Cela ne constitue pas un défaut, mais une propriété structurelle du modèle.
|
||||
|
||||
### Attracteurs assumés
|
||||
|
||||
- scene-depreuve (scène)
|
||||
→ cœur opératoire de mise à l’épreuve des régulations
|
||||
→ point de passage légitime pour une grande partie des parcours
|
||||
|
||||
- audit-archicratique (dispositif méthodologique)
|
||||
→ point d’entrée opératoire vers l’analyse des régulations
|
||||
|
||||
- cartographie-des-scenes-manquantes (dispositif méthodologique)
|
||||
→ prolongement naturel de l’audit vers l’identification des lacunes
|
||||
|
||||
- co-viabilite (concept fondamental)
|
||||
→ horizon de stabilisation des tensions
|
||||
|
||||
### Principe
|
||||
|
||||
Un attracteur est acceptable si :
|
||||
|
||||
- il correspond à un changement de niveau (concept → scène → dispositif)
|
||||
- il rend opératoire une notion
|
||||
- il constitue un passage obligé théoriquement justifié
|
||||
|
||||
Un attracteur devient problématique s’il :
|
||||
|
||||
- absorbe sans transformation
|
||||
- remplace une articulation par une répétition
|
||||
@@ -25,6 +25,19 @@ Objectif : déployer une nouvelle version du site sur le NAS (DS220+) sans jamai
|
||||
|
||||
➡️ Déploiement = `docs/DEPLOY_PROD_SYNOLOGY_DS220.md` (procédure détaillée, à jour).
|
||||
|
||||
## Mise à jour (2026-03-03) — Gate CI de déploiement (SKIP / HOTPATCH / FULL) + preuves A/B
|
||||
|
||||
La procédure de déploiement “vivante” est désormais pilotée par **Gitea Actions** via le workflow :
|
||||
- `.gitea/workflows/deploy-staging-live.yml`
|
||||
|
||||
Ce workflow décide automatiquement :
|
||||
- **FULL** (rebuild + restart blue + green) dès qu’un changement impacte le build (ex: `src/content/`, `src/pages/`, `scripts/`, `src/anchors/`, etc.)
|
||||
- **HOTPATCH** (patch JSON + copie media) quand le changement ne concerne que `src/annotations/` et/ou `public/media/`
|
||||
- **SKIP** sinon
|
||||
|
||||
Les preuves et la procédure de test reproductible A/B sont documentées dans :
|
||||
➡️ `docs/runbooks/DEPLOY-BLUE-GREEN.md` → section “CI Deploy gate (merge-proof) + Tests A/B + preuve alias injection”.
|
||||
|
||||
## Schéma (résumé, sans commandes)
|
||||
|
||||
- Ne jamais toucher au slot live.
|
||||
|
||||
1393
docs/OPS-LOCALHOST-AUTO-SYNC.md
Normal file
@@ -202,4 +202,33 @@ docker compose logs --tail=200 web_blue
|
||||
docker compose logs --tail=200 web_green
|
||||
|
||||
# Si tu veux suivre en live :
|
||||
docker compose logs -f web_green
|
||||
docker compose logs -f web_green
|
||||
|
||||
|
||||
## Historique synthétique (2026-03-03) — Stabilisation CI/CD “zéro surprise”
|
||||
|
||||
### Problème initial observé
|
||||
- Déploiement parfois lancé en “hotpatch” alors qu’un rebuild était nécessaire.
|
||||
- Sur merge commits, la détection de fichiers modifiés pouvait être ambiguë.
|
||||
- Résultat : besoin de `force=1` manuel pour éviter des incohérences.
|
||||
|
||||
### Correctif appliqué
|
||||
- Gate CI rendu **merge-proof** :
|
||||
- lecture de `BEFORE` et `AFTER` depuis `event.json`
|
||||
- calcul des fichiers modifiés via `git diff --name-only BEFORE AFTER`
|
||||
|
||||
- Politique de décision stabilisée :
|
||||
- FULL auto dès qu’un changement impacte build/runtime (content/pages/scripts/anchors/etc.)
|
||||
- HOTPATCH auto uniquement pour annotations/media
|
||||
|
||||
### Preuves
|
||||
- Test A (touch src/content) :
|
||||
- Gate flags: HAS_FULL=1 HAS_HOTPATCH=0 → MODE=full
|
||||
- Test B (touch src/annotations) :
|
||||
- Gate flags: HAS_FULL=0 HAS_HOTPATCH=1 → MODE=hotpatch
|
||||
|
||||
### Audit post-déploiement (preuves côté NAS)
|
||||
- 8081 + 8082 répondent HTTP 200
|
||||
- `/para-index.json` + `/annotations-index.json` OK
|
||||
- Aliases injectés visibles dans HTML via `.para-alias` quand alias présent
|
||||
|
||||
|
||||
683
docs/START-HERE.md
Normal file
@@ -0,0 +1,683 @@
|
||||
# START-HERE — Archicratie / Édition Web (v3)
|
||||
> Onboarding + exploitation “nickel chrome” (DEV → Gitea → CI → Release → Blue/Green → Edge/SSO → localhost auto-sync)
|
||||
|
||||
## 0) TL;DR (la règle d’or)
|
||||
|
||||
- **Gitea = source canonique**.
|
||||
- **`main` est protégée** : toute modification passe par **branche → PR → CI → merge**.
|
||||
- **Le NAS n’est pas la source** : si un hotfix est fait sur NAS, il doit être **backporté immédiatement** via PR.
|
||||
- **Le site est statique Astro** : la prod sert du HTML via nginx ; l’accès est contrôlé au niveau reverse-proxy (Traefik + Authelia).
|
||||
- **Le localhost automatique n’est pas le repo de dev** : il tourne depuis un **worktree dédié**, synchronisé sur `origin/main`.
|
||||
|
||||
---
|
||||
|
||||
## 1) Architecture mentale (ultra simple)
|
||||
|
||||
- **DEV canonique (Mac Studio)** : édition, dev, tests, commits, pushes
|
||||
- **Gitea** : dépôt canonique, PR, CI, workflows éditoriaux
|
||||
- **NAS (DS220+)** : déploiement blue/green
|
||||
- `web_blue` → staging upstream → `127.0.0.1:8081`
|
||||
- `web_green` → live upstream → `127.0.0.1:8082`
|
||||
- **Edge (Traefik)** : routage des hosts
|
||||
- `staging.archicratie...` → 8081
|
||||
- `archicratie...` → 8082
|
||||
- **Authelia** devant, via middleware `chain-auth@file`
|
||||
- **Localhost auto-sync**
|
||||
- un **repo canonique de développement**
|
||||
- un **worktree localhost miroir de `origin/main`**
|
||||
- un **agent de sync**
|
||||
- un **agent Astro**
|
||||
|
||||
---
|
||||
|
||||
## 2) Répertoires & conventions (repo)
|
||||
|
||||
### 2.1 Contenu canon (édition)
|
||||
|
||||
- `src/content/**` : contenu MD / MDX canon
|
||||
- `src/pages/**` : routes Astro
|
||||
- `src/components/**` : composants UI
|
||||
- `src/layouts/**` : layouts
|
||||
- `src/styles/**` : CSS global
|
||||
|
||||
### 2.2 Annotations (pré-Édition “tickets”)
|
||||
|
||||
- `src/annotations/<workKey>/<slug>.yml`
|
||||
- Exemple :
|
||||
`src/annotations/archicrat-ia/prologue.yml`
|
||||
|
||||
Objectif :
|
||||
stocker “Références / Médias / Commentaires” par page et par paragraphe (`p-...`).
|
||||
|
||||
### 2.3 Scripts (tooling / build)
|
||||
|
||||
- `scripts/inject-anchor-aliases.mjs` : injection aliases dans `dist`
|
||||
- `scripts/dedupe-ids-dist.mjs` : retrait IDs dupliqués
|
||||
- `scripts/build-para-index.mjs` : index paragraphes
|
||||
- `scripts/build-annotations-index.mjs` : index annotations
|
||||
- `scripts/check-anchors.mjs` : contrat stabilité d’ancres
|
||||
- `scripts/check-annotations*.mjs` : sanity YAML + médias
|
||||
|
||||
> Important : ces scripts ne sont pas accessoires.
|
||||
> Ils font partie du contrat de stabilité éditoriale.
|
||||
|
||||
---
|
||||
|
||||
## 3) Les trois espaces à ne jamais confondre
|
||||
|
||||
### 3.1 Repo canonique de développement
|
||||
|
||||
```text
|
||||
/Volumes/FunIA/dev/archicratie-edition/site
|
||||
```
|
||||
|
||||
Usage :
|
||||
|
||||
- développement normal
|
||||
- branches de travail
|
||||
- nouvelles fonctionnalités
|
||||
- corrections manuelles
|
||||
- commits
|
||||
- pushes
|
||||
- PR
|
||||
|
||||
### 3.2 Worktree localhost miroir de `main`
|
||||
|
||||
```text
|
||||
/Users/s-funia/ops-local/archicratie/localhost-worktree
|
||||
```
|
||||
|
||||
Branche attendue :
|
||||
|
||||
```text
|
||||
localhost-sync
|
||||
```
|
||||
|
||||
Usage :
|
||||
|
||||
- exécuter le localhost automatique
|
||||
- refléter `origin/main`
|
||||
- ne jamais servir d’espace de développement
|
||||
|
||||
### 3.3 Ops local hors repo
|
||||
|
||||
```text
|
||||
/Users/s-funia/ops-local/archicratie
|
||||
```
|
||||
|
||||
Usage :
|
||||
|
||||
- scripts d’exploitation
|
||||
- état
|
||||
- logs
|
||||
- automatisation `launchd`
|
||||
|
||||
---
|
||||
|
||||
## 4) Pourquoi cette séparation existe
|
||||
|
||||
Il ne faut pas utiliser le repo canonique de développement comme serveur localhost permanent.
|
||||
|
||||
Sinon on mélange :
|
||||
|
||||
- travail en cours
|
||||
- commits non poussés
|
||||
- essais temporaires
|
||||
- état réellement publié sur `main`
|
||||
|
||||
Le résultat devient ambigu.
|
||||
|
||||
La séparation retenue est donc :
|
||||
|
||||
- **repo canonique** = espace de développement
|
||||
- **worktree localhost** = miroir exécutable de `origin/main`
|
||||
- **ops local** = scripts et automatisation
|
||||
|
||||
C’est cette séparation qui rend le système lisible, robuste et opérable.
|
||||
|
||||
---
|
||||
|
||||
## 5) Workflow Git “pro” (main protégée)
|
||||
|
||||
### 5.1 Cycle standard (toute modif)
|
||||
|
||||
```bash
|
||||
git checkout main
|
||||
git pull --ff-only
|
||||
|
||||
BR="chore/xxx-$(date +%Y%m%d)"
|
||||
git checkout -b "$BR"
|
||||
|
||||
# dev…
|
||||
npm i
|
||||
npm run build
|
||||
npm run test:anchors
|
||||
|
||||
git add -A
|
||||
git commit -m "xxx: description claire"
|
||||
git push -u origin "$BR"
|
||||
```
|
||||
|
||||
### 5.2 PR vers `main`
|
||||
|
||||
- ouvrir une PR dans Gitea
|
||||
- attendre une CI verte
|
||||
- merger
|
||||
- laisser les workflows faire le reste
|
||||
|
||||
### 5.3 Cas spécial : hotfix prod (NAS)
|
||||
|
||||
On peut faire un hotfix d’urgence côté NAS si nécessaire.
|
||||
|
||||
Mais l’état final doit toujours revenir dans Gitea :
|
||||
|
||||
- branche
|
||||
- PR
|
||||
- CI
|
||||
- merge
|
||||
|
||||
---
|
||||
|
||||
## 6) Déploiement (NAS) — principe
|
||||
|
||||
### 6.1 Release pack
|
||||
|
||||
On génère un pack reproductible, puis on déploie.
|
||||
|
||||
### 6.2 Blue/Green
|
||||
|
||||
- `web_blue` = staging (`8081`)
|
||||
- `web_green` = live (`8082`)
|
||||
|
||||
Le reverse-proxy choisit l’upstream selon le host demandé.
|
||||
|
||||
---
|
||||
|
||||
## 7) Happy path complet
|
||||
|
||||
### 7.1 DEV (Mac)
|
||||
|
||||
```bash
|
||||
git checkout main && git pull --ff-only
|
||||
git checkout -b chore/my-change-$(date +%Y%m%d)
|
||||
|
||||
npm i
|
||||
rm -rf .astro node_modules/.vite dist
|
||||
npm run build
|
||||
npm run test:anchors
|
||||
npm run dev
|
||||
```
|
||||
|
||||
### 7.2 Push + PR
|
||||
|
||||
```bash
|
||||
git add -A
|
||||
git commit -m "chore: my change"
|
||||
git push -u origin chore/my-change-YYYYMMDD
|
||||
```
|
||||
|
||||
Puis ouvrir la PR dans Gitea.
|
||||
|
||||
### 7.3 Déploiement NAS
|
||||
|
||||
Voir :
|
||||
|
||||
```text
|
||||
docs/runbooks/DEPLOY-BLUE-GREEN.md
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 8) Localhost auto-sync — ce qu’il faut retenir
|
||||
|
||||
Le localhost automatique sert à voir **la vérité de `main`**, pas à développer du neuf.
|
||||
|
||||
### 8.1 Scripts principaux
|
||||
|
||||
#### Script de sync
|
||||
|
||||
```text
|
||||
~/ops-local/archicratie/auto-sync-localhost.sh
|
||||
```
|
||||
|
||||
Rôle :
|
||||
|
||||
- fetch `origin/main`
|
||||
- réaligner le worktree localhost
|
||||
- lancer `npm ci` si besoin
|
||||
- redéclencher l’agent Astro si nécessaire
|
||||
|
||||
#### Script Astro
|
||||
|
||||
```text
|
||||
~/ops-local/archicratie/run-astro-localhost.sh
|
||||
```
|
||||
|
||||
Rôle :
|
||||
|
||||
- lancer `astro dev`
|
||||
- depuis le bon worktree
|
||||
- avec le bon runtime Node
|
||||
- sur `127.0.0.1:4321`
|
||||
|
||||
> Oui : ce script est nécessaire.
|
||||
> Il isole proprement le lancement du serveur Astro dans un contexte `launchd` stable.
|
||||
|
||||
### 8.2 LaunchAgents
|
||||
|
||||
#### Agent sync
|
||||
|
||||
```text
|
||||
~/Library/LaunchAgents/me.archicratie.localhost-sync.plist
|
||||
```
|
||||
|
||||
#### Agent Astro
|
||||
|
||||
```text
|
||||
~/Library/LaunchAgents/me.archicratie.localhost-astro.plist
|
||||
```
|
||||
|
||||
### 8.3 Document de référence
|
||||
|
||||
Pour tout le détail d’exploitation du localhost automatique, lire :
|
||||
|
||||
```text
|
||||
docs/OPS-LOCALHOST-AUTO-SYNC.md
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 9) Règle d’or : il y a deux usages locaux distincts
|
||||
|
||||
### 9.1 Voir ce qui est réellement sur `main`
|
||||
|
||||
Utiliser :
|
||||
|
||||
```text
|
||||
http://127.0.0.1:4321
|
||||
```
|
||||
|
||||
Ce localhost doit être considéré comme :
|
||||
|
||||
**un miroir local exécutable de `origin/main`**
|
||||
|
||||
### 9.2 Développer / tester une nouvelle fonctionnalité
|
||||
|
||||
Utiliser le repo canonique :
|
||||
|
||||
```bash
|
||||
cd /Volumes/FunIA/dev/archicratie-edition/site
|
||||
npm run dev
|
||||
```
|
||||
|
||||
Donc :
|
||||
|
||||
- **localhost auto-sync** = vérité de `main`
|
||||
- **localhost de dev manuel** = expérimentation en cours
|
||||
|
||||
Il ne faut pas les confondre.
|
||||
|
||||
---
|
||||
|
||||
## 10) Ce qu’il ne faut pas faire
|
||||
|
||||
### 10.1 Ne pas développer dans le worktree localhost
|
||||
|
||||
Le worktree localhost est piloté automatiquement.
|
||||
|
||||
Il peut être :
|
||||
|
||||
- réaligné
|
||||
- nettoyé
|
||||
- redémarré
|
||||
|
||||
Donc :
|
||||
|
||||
- pas de commits dedans
|
||||
- pas de dev feature dedans
|
||||
- pas d’expérimentation de fond dedans
|
||||
|
||||
### 10.2 Ne pas utiliser le repo canonique comme miroir auto-sync
|
||||
|
||||
Sinon on mélange :
|
||||
|
||||
- espace de dev
|
||||
- état publié
|
||||
- serveur local permanent
|
||||
|
||||
### 10.3 Ne pas remettre les scripts ops sur un volume externe
|
||||
|
||||
Les scripts d’ops doivent rester sous `HOME`.
|
||||
|
||||
Le fait de les mettre sous `/Volumes/...` a déjà provoqué des erreurs du type :
|
||||
|
||||
```text
|
||||
Operation not permitted
|
||||
```
|
||||
|
||||
### 10.4 Ne pas supprimer `run-astro-localhost.sh`
|
||||
|
||||
Ce script fait partie de l’architecture actuelle.
|
||||
Le supprimer reviendrait à réintroduire le flou entre sync Git et exécution d’Astro.
|
||||
|
||||
---
|
||||
|
||||
## 11) Commandes de contrôle essentielles
|
||||
|
||||
### 11.1 État global
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/doctor-localhost.sh
|
||||
```
|
||||
|
||||
### 11.2 État Git
|
||||
|
||||
```bash
|
||||
git -C ~/ops-local/archicratie/localhost-worktree rev-parse HEAD
|
||||
git -C /Volumes/FunIA/dev/archicratie-edition/site ls-remote origin refs/heads/main
|
||||
git -C ~/ops-local/archicratie/localhost-worktree branch --show-current
|
||||
```
|
||||
|
||||
### 11.3 État LaunchAgents
|
||||
|
||||
```bash
|
||||
launchctl print "gui/$(id -u)/me.archicratie.localhost-sync" | sed -n '1,160p'
|
||||
launchctl print "gui/$(id -u)/me.archicratie.localhost-astro" | sed -n '1,160p'
|
||||
```
|
||||
|
||||
### 11.4 État logs
|
||||
|
||||
```bash
|
||||
tail -n 120 ~/ops-local/archicratie/logs/auto-sync-localhost.log
|
||||
tail -n 120 ~/ops-local/archicratie/logs/astro-localhost.log
|
||||
tail -n 80 ~/Library/Logs/archicratie-localhost-sync.err.log
|
||||
tail -n 80 ~/Library/Logs/archicratie-localhost-astro.err.log
|
||||
```
|
||||
|
||||
### 11.5 État serveur
|
||||
|
||||
```bash
|
||||
lsof -nP -iTCP:4321 -sTCP:LISTEN
|
||||
PID="$(lsof -tiTCP:4321 -sTCP:LISTEN | head -n 1)"
|
||||
ps -p "$PID" -o pid=,command=
|
||||
lsof -a -p "$PID" -d cwd
|
||||
```
|
||||
|
||||
### 11.6 Vérification contenu
|
||||
|
||||
```bash
|
||||
curl -s http://127.0.0.1:4321/archicrat-ia/prologue/ | grep -n "taxe Zucman"
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 12) Problèmes classiques + diagnostic
|
||||
|
||||
### 12.1 “Le staging ne ressemble pas au local”
|
||||
|
||||
Comparer les upstream directs :
|
||||
|
||||
```bash
|
||||
curl -sS http://127.0.0.1:8081/ | head -n 2
|
||||
curl -sS http://127.0.0.1:8082/ | head -n 2
|
||||
```
|
||||
|
||||
Vérifier le routeur edge :
|
||||
|
||||
```bash
|
||||
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router'
|
||||
```
|
||||
|
||||
Voir :
|
||||
|
||||
```text
|
||||
docs/runbooks/EDGE-TRAEFIK.md
|
||||
```
|
||||
|
||||
### 12.2 Canonical incorrect
|
||||
|
||||
Cause probable : `PUBLIC_SITE` mal injecté au build.
|
||||
|
||||
Test :
|
||||
|
||||
```bash
|
||||
curl -sS http://127.0.0.1:8082/ | grep -oE 'rel="canonical" href="[^"]+"' | head -1
|
||||
```
|
||||
|
||||
Voir :
|
||||
|
||||
```text
|
||||
docs/runbooks/ENV-PUBLIC_SITE.md
|
||||
```
|
||||
|
||||
### 12.3 Contrat anchors en échec après migration d’URL
|
||||
|
||||
Procédure safe :
|
||||
|
||||
```bash
|
||||
cp -a tests/anchors-baseline.json /tmp/anchors-baseline.json.bak.$(date +%F-%H%M%S)
|
||||
|
||||
node - <<'NODE'
|
||||
import fs from 'fs';
|
||||
const p='tests/anchors-baseline.json';
|
||||
const j=JSON.parse(fs.readFileSync(p,'utf8'));
|
||||
const out={};
|
||||
for (const [k,v] of Object.entries(j)) {
|
||||
const nk = k.replace(/^archicratie\/archicrat-ia\//, 'archicrat-ia/');
|
||||
out[nk]=v;
|
||||
}
|
||||
fs.writeFileSync(p, JSON.stringify(out,null,2)+'\n');
|
||||
console.log('updated keys:', Object.keys(j).length, '->', Object.keys(out).length);
|
||||
NODE
|
||||
|
||||
npm run test:anchors
|
||||
```
|
||||
|
||||
### 12.4 “Le localhost auto-sync ne montre pas les dernières modifs”
|
||||
|
||||
Commande réflexe :
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/doctor-localhost.sh
|
||||
```
|
||||
|
||||
Puis :
|
||||
|
||||
```bash
|
||||
git -C ~/ops-local/archicratie/localhost-worktree rev-parse HEAD
|
||||
git -C /Volumes/FunIA/dev/archicratie-edition/site ls-remote origin refs/heads/main
|
||||
```
|
||||
|
||||
Si les SHA diffèrent :
|
||||
- le sync n’a pas tourné
|
||||
- ou l’agent sync a un problème
|
||||
|
||||
### 12.5 “Le SHA est bon mais le contenu web est faux”
|
||||
|
||||
Vérifier quel Astro écoute réellement :
|
||||
|
||||
```bash
|
||||
lsof -nP -iTCP:4321 -sTCP:LISTEN
|
||||
PID="$(lsof -tiTCP:4321 -sTCP:LISTEN | head -n 1)"
|
||||
ps -p "$PID" -o pid=,command=
|
||||
lsof -a -p "$PID" -d cwd
|
||||
```
|
||||
|
||||
Attendu :
|
||||
- commande contenant `astro dev`
|
||||
- cwd = `~/ops-local/archicratie/localhost-worktree`
|
||||
|
||||
### 12.6 Erreur `EBADENGINE`
|
||||
|
||||
Cause probable :
|
||||
- Node 23 utilisé au lieu de Node 22
|
||||
|
||||
Résolution :
|
||||
- forcer `node@22` dans les scripts et les LaunchAgents
|
||||
|
||||
### 12.7 Erreur `Operation not permitted`
|
||||
|
||||
Cause probable :
|
||||
- scripts d’ops placés sous `/Volumes/...`
|
||||
|
||||
Résolution :
|
||||
- garder les scripts sous :
|
||||
|
||||
```text
|
||||
~/ops-local/archicratie
|
||||
```
|
||||
|
||||
### 12.8 Erreur `EPERM` sur `astro.mjs`
|
||||
|
||||
Cause probable :
|
||||
- ancien worktree sur volume externe
|
||||
- ancien chemin résiduel
|
||||
- Astro lancé depuis un mauvais emplacement
|
||||
|
||||
Résolution :
|
||||
- worktree localhost sous :
|
||||
|
||||
```text
|
||||
~/ops-local/archicratie/localhost-worktree
|
||||
```
|
||||
|
||||
- scripts cohérents avec ce chemin
|
||||
- réinstallation propre via :
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/install-localhost-sync.sh
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 13) Redémarrage machine
|
||||
|
||||
Après reboot, le comportement attendu est :
|
||||
|
||||
1. le LaunchAgent sync se recharge
|
||||
2. le LaunchAgent Astro se recharge
|
||||
3. le worktree localhost est réaligné
|
||||
4. Astro redémarre sur `127.0.0.1:4321`
|
||||
|
||||
### Vérification rapide après reboot
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/doctor-localhost.sh
|
||||
```
|
||||
|
||||
Si nécessaire :
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/install-localhost-sync.sh
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 14) Procédure de secours manuelle
|
||||
|
||||
### Forcer un sync
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/auto-sync-localhost.sh
|
||||
```
|
||||
|
||||
### Réinstaller proprement le dispositif local
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/install-localhost-sync.sh
|
||||
```
|
||||
|
||||
### Diagnostic complet
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/doctor-localhost.sh
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 15) Décision d’exploitation finale
|
||||
|
||||
La politique retenue est la suivante :
|
||||
|
||||
- **repo canonique** = espace de développement
|
||||
- **worktree localhost** = miroir automatique de `main`
|
||||
- **ops sous HOME** = scripts, logs, automation
|
||||
- **LaunchAgent sync** = réalignement Git
|
||||
- **LaunchAgent Astro** = exécution stable du serveur local
|
||||
- **Astro local** = lancé uniquement depuis le worktree localhost
|
||||
|
||||
Cette séparation rend le dispositif plus :
|
||||
|
||||
- lisible
|
||||
- robuste
|
||||
- opérable
|
||||
- antifragile
|
||||
|
||||
---
|
||||
|
||||
## 16) Résumé opératoire
|
||||
|
||||
### Pour voir la vérité de `main`
|
||||
|
||||
Ouvrir :
|
||||
|
||||
```text
|
||||
http://127.0.0.1:4321
|
||||
```
|
||||
|
||||
Le serveur doit provenir de :
|
||||
|
||||
```text
|
||||
/Users/s-funia/ops-local/archicratie/localhost-worktree
|
||||
```
|
||||
|
||||
### Pour développer
|
||||
|
||||
Travailler dans :
|
||||
|
||||
```text
|
||||
/Volumes/FunIA/dev/archicratie-edition/site
|
||||
```
|
||||
|
||||
avec les commandes habituelles.
|
||||
|
||||
### Pour réparer vite
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/doctor-localhost.sh
|
||||
~/ops-local/archicratie/auto-sync-localhost.sh
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 17) Mémoire courte
|
||||
|
||||
Si un jour plus rien n’est clair, repartir de ces commandes :
|
||||
|
||||
```bash
|
||||
~/ops-local/archicratie/doctor-localhost.sh
|
||||
git -C ~/ops-local/archicratie/localhost-worktree rev-parse HEAD
|
||||
git -C /Volumes/FunIA/dev/archicratie-edition/site ls-remote origin refs/heads/main
|
||||
lsof -nP -iTCP:4321 -sTCP:LISTEN
|
||||
```
|
||||
|
||||
Puis lire :
|
||||
|
||||
```bash
|
||||
tail -n 120 ~/ops-local/archicratie/logs/auto-sync-localhost.log
|
||||
tail -n 120 ~/ops-local/archicratie/logs/astro-localhost.log
|
||||
```
|
||||
|
||||
---
|
||||
|
||||
## 18) Statut actuel visé
|
||||
|
||||
Quand tout fonctionne correctement :
|
||||
|
||||
- le worktree localhost pointe sur le même SHA que `origin/main`
|
||||
- `astro dev` écoute sur `127.0.0.1:4321`
|
||||
- son cwd est `~/ops-local/archicratie/localhost-worktree`
|
||||
- le contenu servi correspond au contenu mergé sur `main`
|
||||
|
||||
C’est l’état de référence à préserver.
|
||||
|
After Width: | Height: | Size: 221 KiB |
|
After Width: | Height: | Size: 187 KiB |
BIN
docs/diagrams/out/archicratie-web-edition-git-ci-workflow-v1.png
Normal file
|
After Width: | Height: | Size: 395 KiB |
BIN
docs/diagrams/out/archicratie-web-edition-global-verbatim-v2.png
Normal file
|
After Width: | Height: | Size: 284 KiB |
|
After Width: | Height: | Size: 304 KiB |
|
After Width: | Height: | Size: 360 KiB |
100
docs/graph.dot
Normal file
@@ -0,0 +1,100 @@
|
||||
digraph G {
|
||||
rankdir="LR";
|
||||
node [shape=box, style="rounded"];
|
||||
"agencement-machinique" -> "cratialite";
|
||||
"arcalite" -> "cratialite";
|
||||
"archeogenese" -> "meta-regime-archicratique";
|
||||
"archicratie" -> "arcalite";
|
||||
"archicration-obliteree" -> "hypercratialite";
|
||||
"archicration" -> "scene-depreuve";
|
||||
"archicrations-differentielles-et-formes-hybrides" -> "co-viabilisation";
|
||||
"archicrations-epistemiques" -> "archicrations-differentielles-et-formes-hybrides";
|
||||
"archicrations-esthetico-symboliques" -> "archicrations-historiographiques";
|
||||
"archicrations-guerrieres" -> "archicrations-normativo-politiques";
|
||||
"archicrations-historiographiques" -> "archicrations-epistemiques";
|
||||
"archicrations-marchandes" -> "archicrations-techno-logistiques";
|
||||
"archicrations-normativo-politiques" -> "archicrations-marchandes";
|
||||
"archicrations-proto-symboliques" -> "archicrations-sacrales-non-etatiques";
|
||||
"archicrations-sacrales-non-etatiques" -> "archicrations-scripturo-cosmologiques";
|
||||
"archicrations-scripturo-cosmologiques" -> "archicrations-scripturo-normatives";
|
||||
"archicrations-scripturo-normatives" -> "archicrations-normativo-politiques";
|
||||
"archicrations-techno-logistiques" -> "archicrations-differentielles-et-formes-hybrides";
|
||||
"archicrations-theologiques" -> "archicrations-normativo-politiques";
|
||||
"archicratisation" -> "co-viabilisation";
|
||||
"archidiagnostic" -> "cartographie-des-scenes-manquantes";
|
||||
"audit-archicratique" -> "cartographie-des-scenes-manquantes";
|
||||
"autarchicratie" -> "autarchicration";
|
||||
"autarchicration" -> "obliteration-archicratique";
|
||||
"biopolitique" -> "gouvernementalite-algorithmique";
|
||||
"budget-scenique" -> "visa-daffectation";
|
||||
"cartographie-des-scenes-manquantes" -> "scene-manquante";
|
||||
"co-viabilisation" -> "regime-de-co-viabilite";
|
||||
"co-viabilite" -> "tension";
|
||||
"coexistence-ontologique-et-necessite-regulatrice" -> "formes-de-vie-et-cadres-dhabitabilite";
|
||||
"conatus-et-multitude" -> "configuration-et-interdependance";
|
||||
"configuration-et-interdependance" -> "transduction-et-individuation";
|
||||
"contractualisme-hobbesien" -> "droit-naturel-et-propriete";
|
||||
"cosmopolitique" -> "technodiversite-et-cosmotechnie";
|
||||
"coupe-circuit-citoyen" -> "droit-au-differe-contradictoire";
|
||||
"cratialite" -> "archicration";
|
||||
"cybernetique" -> "gouvernementalite-algorithmique";
|
||||
"decisionnisme-souverain" -> "exception-souveraine";
|
||||
"democratie-deliberative" -> "dissensus-politique";
|
||||
"desarchicration" -> "desarchicratisation";
|
||||
"desarchicratisation" -> "autarchicratie";
|
||||
"dissensus-politique" -> "lieu-vide-du-pouvoir";
|
||||
"domination-legale-rationnelle" -> "democratie-deliberative";
|
||||
"droit-au-differe-contradictoire" -> "tribunal-de-lalgorithme";
|
||||
"droit-naturel-et-propriete" -> "volonte-generale";
|
||||
"egalisation-normative-et-differenciation-singuliere" -> "dissensus-politique";
|
||||
"exception-souveraine" -> "droit-naturel-et-propriete";
|
||||
"fait-social-total" -> "configuration-et-interdependance";
|
||||
"formes-de-vie-et-cadres-dhabitabilite" -> "subsistance-vivante-et-captation-capitalistique";
|
||||
"gouvernance-des-communs" -> "co-viabilite";
|
||||
"gouvernementalite-algorithmique" -> "preemption-algorithmique";
|
||||
"gouvernementalite" -> "biopolitique";
|
||||
"grammatisation-et-proletarisation-cognitive" -> "pharmacologie-technique";
|
||||
"habitus-et-violence-symbolique" -> "obliteration-archicratique";
|
||||
"hyperarcalite" -> "desarchicration";
|
||||
"hypercratialite" -> "hyperarcalite";
|
||||
"inertie-sociale-symbolique" -> "habitus-et-violence-symbolique";
|
||||
"institution-invisible" -> "scene-depreuve";
|
||||
"journal-de-justification" -> "droit-au-differe-contradictoire";
|
||||
"liberte-daction-et-regimes-de-securite-algorithmique" -> "preemption-algorithmique";
|
||||
"lieu-vide-du-pouvoir" -> "visibilite-mediatique-et-reconnaissance-symbolique";
|
||||
"memoire-symbolique-et-instantaneite-computationnelle" -> "meta-regime";
|
||||
"meta-regime-archicratique" -> "archicrations-differentielles-et-formes-hybrides";
|
||||
"meta-regime" -> "meta-regime-archicratique";
|
||||
"monde-instituable" -> "scene-manquante";
|
||||
"obliteration-archicratique" -> "archicration-obliteree";
|
||||
"pensee-complexe" -> "configuration-et-interdependance";
|
||||
"pharmacologie-technique" -> "technodiversite-et-cosmotechnie";
|
||||
"pluralite-natalite-action" -> "dissensus-politique";
|
||||
"preemption-algorithmique" -> "droit-au-differe-contradictoire";
|
||||
"regime-de-co-viabilite" -> "gouvernance-des-communs";
|
||||
"regulation-morphogenetique-des-interdependances" -> "transduction-et-individuation";
|
||||
"regulation-technique-et-legitimation-democratique" -> "tribunal-de-lalgorithme";
|
||||
"regulations-fondatrices" -> "regulations-incorporees";
|
||||
"regulations-incorporees" -> "regulations-procedurales";
|
||||
"regulations-procedurales" -> "regulations-techniques";
|
||||
"regulations-relationnelles" -> "regime-de-co-viabilite";
|
||||
"regulations-techniques" -> "regulations-relationnelles";
|
||||
"resonance-sociale" -> "visibilite-mediatique-et-reconnaissance-symbolique";
|
||||
"scene-darchicration" -> "co-viabilite";
|
||||
"scene-depreuve" -> "scene-darchicration";
|
||||
"scene-empechee" -> "institution-invisible";
|
||||
"scene-manquante" -> "scene-empechee";
|
||||
"souverainetes-territoriales-et-interdependances-globales" -> "gouvernance-des-communs";
|
||||
"subsistance-vivante-et-captation-capitalistique" -> "travail-vivant-et-abstraction-de-la-valeur";
|
||||
"technodiversite-et-cosmotechnie" -> "regime-de-co-viabilite";
|
||||
"tension" -> "regime-de-co-viabilite";
|
||||
"theorie-de-la-justification" -> "journal-de-justification";
|
||||
"theorie-de-la-resonance" -> "resonance-sociale";
|
||||
"theorie-de-lacteur-reseau" -> "agencement-machinique";
|
||||
"transduction-et-individuation" -> "archeogenese";
|
||||
"travail-vivant-et-abstraction-de-la-valeur" -> "grammatisation-et-proletarisation-cognitive";
|
||||
"tribunal-de-lalgorithme" -> "budget-scenique";
|
||||
"visa-daffectation" -> "scene-depreuve";
|
||||
"visibilite-mediatique-et-reconnaissance-symbolique" -> "cartographie-des-scenes-manquantes";
|
||||
"volonte-generale" -> "democratie-deliberative";
|
||||
}
|
||||
BIN
docs/graph.png
Normal file
|
After Width: | Height: | Size: 359 KiB |
546
docs/runbooks/DEPLOY-BLUE-GREEN.md
Normal file
@@ -0,0 +1,546 @@
|
||||
# RUNBOOK — Déploiement Blue/Green (NAS DS220+)
|
||||
> Objectif : déployer une release **sans casser**, avec rollback immédiat.
|
||||
|
||||
## 0) Portée
|
||||
Ce runbook décrit le déploiement de l’édition web Archicratie sur NAS (Synology), en mode blue/green :
|
||||
- `web_blue` : upstream staging → `127.0.0.1:8081`
|
||||
- `web_green` : upstream live → `127.0.0.1:8082`
|
||||
- Edge Traefik publie :
|
||||
- `staging.archicratie.trans-hands.synology.me` → 8081
|
||||
- `archicratie.trans-hands.synology.me` → 8082
|
||||
|
||||
## 1) Pré-requis
|
||||
- Accès shell NAS (user `archicratia`) + `sudo`
|
||||
- Docker Compose Synology nécessite souvent :
|
||||
- `sudo env DOCKER_API_VERSION=1.43 docker compose ...`
|
||||
- Les fichiers edge Traefik sont dans :
|
||||
- `/volume2/docker/edge/config/dynamic/`
|
||||
|
||||
## 2) Répertoires canon (NAS)
|
||||
On considère ces chemins (adapter si besoin, mais rester cohérent) :
|
||||
- Base : `/volume2/docker/archicratie-web`
|
||||
- Releases : `/volume2/docker/archicratie-web/releases/YYYYMMDD-HHMMSS/app`
|
||||
- Symlink actif : `/volume2/docker/archicratie-web/current` → pointe vers le `.../app` actif
|
||||
|
||||
## 3) Garde-fous (AVANT toute action)
|
||||
### 3.1 Snapshot de l’état actuel
|
||||
en bash :
|
||||
|
||||
cd /volume2/docker/archicratie-web
|
||||
ls -la current || true
|
||||
readlink current || true
|
||||
|
||||
### 3.2 Vérifier l’état live/staging upstream direct
|
||||
|
||||
curl -sSI http://127.0.0.1:8081/ | head -n 12
|
||||
curl -sSI http://127.0.0.1:8082/ | head -n 12
|
||||
|
||||
### 3.3 Vérifier l’état edge (host routing)
|
||||
|
||||
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router' | head -n 30
|
||||
|
||||
curl -sSI -H 'Host: archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router' | head -n 30
|
||||
|
||||
Si tu n’es pas authentifié, tu verras un 302 vers auth... : c’est normal.
|
||||
|
||||
## 4) Procédure de déploiement (release pack → nouvelle release)
|
||||
### 4.1 Déposer le pack
|
||||
|
||||
Hypothèse : tu as un .tgz “release pack” (issu de release-pack.sh) dans incoming/ :
|
||||
|
||||
cd /volume2/docker/archicratie-web
|
||||
ls -la incoming | tail -n 20
|
||||
|
||||
### 4.2 Créer un répertoire release
|
||||
|
||||
TS="$(date +%Y%m%d-%H%M%S)"
|
||||
REL="/volume2/docker/archicratie-web/releases/$TS"
|
||||
APP="$REL/app"
|
||||
sudo mkdir -p "$APP"
|
||||
|
||||
### 4.3 Extraire le pack
|
||||
|
||||
PKG="/volume2/docker/archicratie-web/incoming/archicratie-web.tar.gz" # adapter au nom réel
|
||||
sudo tar -xzf "$PKG" -C "$APP"
|
||||
|
||||
### 4.4 Sanity check (fichiers attendus)
|
||||
|
||||
sudo test -f "$APP/Dockerfile" && echo "OK Dockerfile"
|
||||
sudo test -f "$APP/docker-compose.yml" && echo "OK compose"
|
||||
sudo test -f "$APP/astro.config.mjs" && echo "OK astro config"
|
||||
sudo test -f "$APP/src/layouts/EditionLayout.astro" && echo "OK layout"
|
||||
sudo test -f "$APP/src/pages/archicrat-ia/index.astro" && echo "OK archicrat-ia index"
|
||||
sudo test -f "$APP/docs/diagrams/archicratie-web-edition-global-verbatim-v2.svg" && echo "OK diagrams"
|
||||
|
||||
### 4.5 Permissions (crucial sur Synology)
|
||||
|
||||
But : archicratia:users doit pouvoir traverser le parent + lire le contenu.
|
||||
|
||||
sudo chown -R archicratia:users "$REL"
|
||||
sudo chmod -R u+rwX,g+rX,o-rwx "$REL"
|
||||
sudo chmod 750 "$REL" "$APP"
|
||||
|
||||
Vérifier :
|
||||
|
||||
ls -ld "$REL" "$APP"
|
||||
ls -la "$APP" | head
|
||||
|
||||
## 5) Activation : basculer current vers la nouvelle release
|
||||
### 5.1 Backup du current existant
|
||||
|
||||
cd /volume2/docker/archicratie-web
|
||||
TS2="$(date +%F-%H%M%S)"
|
||||
|
||||
# on backup "current" (symlink ou dossier)
|
||||
if [ -e current ] || [ -L current ]; then
|
||||
sudo mv -f current "current.BAK.$TS2"
|
||||
echo "✅ backup: current.BAK.$TS2"
|
||||
fi
|
||||
|
||||
### 5.2 Recréer current (symlink propre)
|
||||
|
||||
sudo ln -s "$APP" current
|
||||
|
||||
ls -la current
|
||||
readlink current
|
||||
sudo test -f current/docker-compose.yml && echo "✅ OK: current/docker-compose.yml"
|
||||
|
||||
Si cd current échoue, c’est que current n’est pas un symlink correct OU que le parent n’est pas traversable (permissions).
|
||||
|
||||
## 6) Build & run : (re)construire web_blue/web_green
|
||||
### 6.1 Vérifier la config compose
|
||||
|
||||
cd /volume2/docker/archicratie-web/current
|
||||
sudo env DOCKER_API_VERSION=1.43 docker compose -f docker-compose.yml config \
|
||||
| grep -nE 'services:|web_blue:|web_green:|context:|dockerfile:|PUBLIC_SITE|REQUIRE_PUBLIC_SITE' \
|
||||
| sed -n '1,220p'
|
||||
|
||||
### 6.2 Build propre (recommandé si changement de code/config)
|
||||
|
||||
sudo env DOCKER_API_VERSION=1.43 docker compose build --no-cache web_blue web_green
|
||||
|
||||
### 6.3 Up (force recreate)
|
||||
|
||||
sudo env DOCKER_API_VERSION=1.43 docker compose up -d --force-recreate web_blue web_green
|
||||
|
||||
### 6.4 Vérifier upstream direct (8081/8082)
|
||||
|
||||
curl -sSI http://127.0.0.1:8081/ | head -n 12
|
||||
curl -sSI http://127.0.0.1:8082/ | head -n 12
|
||||
|
||||
## 7) Tests de non-régression (MINIMAL CHECKLIST)
|
||||
|
||||
À exécuter systématiquement après up.
|
||||
|
||||
### 7.1 Upstreams directs
|
||||
|
||||
curl -sSI http://127.0.0.1:8081/ | head -n 12
|
||||
curl -sSI http://127.0.0.1:8082/ | head -n 12
|
||||
|
||||
### 7.2 Canonical (anti “localhost en prod”)
|
||||
|
||||
curl -sS http://127.0.0.1:8081/ | grep -oE 'rel="canonical" href="[^"]+"' | head -n 1
|
||||
curl -sS http://127.0.0.1:8082/ | grep -oE 'rel="canonical" href="[^"]+"' | head -n 1
|
||||
|
||||
Attendu :
|
||||
|
||||
blue (8081) → https://staging.archicratie.../
|
||||
|
||||
green (8082) → https://archicratie.../
|
||||
|
||||
### 7.3 Edge routing (Host header + diag)
|
||||
|
||||
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router' | head -n 30
|
||||
|
||||
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/_auth/whoami \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router' | head -n 30
|
||||
|
||||
### 7.4 Smoke UI (manuel)
|
||||
|
||||
Home : lien “Essai-thèse — ArchiCraT-IA” → /archicrat-ia/
|
||||
|
||||
TOC global : liens /archicrat-ia/* (pas de préfixe /archicratie/archicrat-ia/*)
|
||||
|
||||
Reading-follow/TOC local : scroll ok
|
||||
|
||||
## 8) Rollback (si un seul test est mauvais)
|
||||
|
||||
Objectif : revenir immédiatement à l’état précédent.
|
||||
|
||||
### 8.1 Repointer current sur l’ancien backup
|
||||
|
||||
cd /volume2/docker/archicratie-web
|
||||
ls -la current.BAK.* | tail -n 5
|
||||
|
||||
# choisir le plus récent
|
||||
OLD="current.BAK.YYYY-MM-DD-HHMMSS"
|
||||
sudo rm -f current
|
||||
sudo ln -s "$(readlink -f "$OLD")" current 2>/dev/null || sudo ln -s "$(readlink "$OLD")" current
|
||||
|
||||
ls -la current
|
||||
readlink current
|
||||
|
||||
### 8.2 Rebuild + recreate
|
||||
|
||||
cd /volume2/docker/archicratie-web/current
|
||||
sudo env DOCKER_API_VERSION=1.43 docker compose build --no-cache web_blue web_green
|
||||
sudo env DOCKER_API_VERSION=1.43 docker compose up -d --force-recreate web_blue web_green
|
||||
|
||||
### 8.3 Re-tester la checklist (section 7)
|
||||
|
||||
Si rollback OK : investiguer en environnement isolé (staging upstream uniquement, ou release dans un autre current).
|
||||
|
||||
## 9) Notes opérationnelles
|
||||
|
||||
Ne jamais modifier dist/ “à la main” sur NAS.
|
||||
|
||||
Si un hotfix prod est indispensable : documenter et backporter via PR Gitea.
|
||||
|
||||
Le canonical dépend du build : PUBLIC_SITE doit être injecté (voir runbook ENV-PUBLIC_SITE).
|
||||
|
||||
## 10) CI Deploy (Gitea Actions) — Gate SKIP / HOTPATCH / FULL (merge-proof) + preuves
|
||||
|
||||
Cette section documente le comportement **canonique** du workflow :
|
||||
- `.gitea/workflows/deploy-staging-live.yml`
|
||||
|
||||
Objectif : **zéro surprise**.
|
||||
On ne veut plus “penser à force=1”.
|
||||
Le gate doit décider automatiquement, y compris sur des **merge commits**.
|
||||
|
||||
### 10.1 — Principe (ce que fait réellement le gate)
|
||||
|
||||
Le job `deploy` calcule les fichiers modifiés entre :
|
||||
- `BEFORE` = commit précédent (avant le push sur main)
|
||||
- `AFTER` = commit actuel (après le push / merge sur main)
|
||||
|
||||
Puis il classe le déploiement dans un mode :
|
||||
|
||||
- **MODE=full**
|
||||
- rebuild image + restart `archicratie-web-blue` (8081) + `archicratie-web-green` (8082)
|
||||
- warmup endpoints (para-index, annotations-index, pagefind.js)
|
||||
- vérification canonical staging + live
|
||||
|
||||
- **MODE=hotpatch**
|
||||
- rebuild d’un `annotations-index.json` consolidé depuis `src/annotations/**`
|
||||
- patch direct dans les conteneurs en cours d’exécution (blue+green)
|
||||
- copie des médias modifiés `public/media/**` vers `/usr/share/nginx/html/media/**`
|
||||
- smoke sur `/annotations-index.json` des deux ports
|
||||
|
||||
- **MODE=skip**
|
||||
- pas de déploiement (on évite le bruit)
|
||||
|
||||
⚠️ Important : le mode “hotpatch” **ne rebuild pas** Astro.
|
||||
Donc toute modification de contenu, routes, scripts, anchors, etc. doit déclencher **full**.
|
||||
|
||||
### 10.2 — Matrice de décision (règles officielles)
|
||||
|
||||
Le gate définit deux flags :
|
||||
- `HAS_FULL=1` si changement “build-impacting”
|
||||
- `HAS_HOTPATCH=1` si changement “annotations/media only”
|
||||
|
||||
Règle de priorité :
|
||||
1) Si `HAS_FULL=1` → **MODE=full**
|
||||
2) Sinon si `HAS_HOTPATCH=1` → **MODE=hotpatch**
|
||||
3) Sinon → **MODE=skip**
|
||||
|
||||
#### 10.2.1 — Changements qui déclenchent FULL (build-impacting)
|
||||
|
||||
Exemples typiques (non exhaustif, mais on couvre le cœur) :
|
||||
- `src/content/**` (contenu MD/MDX)
|
||||
- `src/pages/**` (routes Astro)
|
||||
- `src/anchors/**` (aliases d’ancres)
|
||||
- `scripts/**` (tooling postbuild : injection, index, tests)
|
||||
- `src/layouts/**`, `src/components/**`, `src/styles/**` (rendu et scripts inline)
|
||||
- `astro.config.mjs`, `package.json`, `package-lock.json`
|
||||
- `Dockerfile`, `docker-compose.yml`, `nginx.conf`
|
||||
- `.gitea/workflows/**` (changement infra CI/CD)
|
||||
|
||||
=> On veut **full** pour garantir cohérence et éviter “site partiellement mis à jour”.
|
||||
|
||||
#### 10.2.2 — Changements qui déclenchent HOTPATCH (sans rebuild)
|
||||
|
||||
Uniquement :
|
||||
- `src/annotations/**` (shards YAML)
|
||||
- `public/media/**` (assets média)
|
||||
|
||||
=> On veut hotpatch pour vitesse et éviter rebuild NAS.
|
||||
|
||||
### 10.3 — “Merge-proof” : pourquoi on ne lit PAS seulement `git show $SHA`
|
||||
|
||||
Sur un merge commit, `git show --name-only $SHA` peut être trompeur selon le contexte.
|
||||
La méthode robuste est :
|
||||
- utiliser `event.json` (Gitea Actions) pour récupérer `before` et `after`
|
||||
- calculer `git diff --name-only BEFORE AFTER`
|
||||
|
||||
C’est ce qui rend le gate **merge-proof**.
|
||||
|
||||
### 10.4 — Tests de preuve A/B (reproductibles)
|
||||
|
||||
Ces tests valident le gate sans ambiguïté.
|
||||
But : vérifier que le mode choisi est EXACTEMENT celui attendu.
|
||||
|
||||
#### Test A — toucher `src/content/...` (FULL auto)
|
||||
|
||||
1) Créer une branche test
|
||||
2) Modifier 1 fichier dans `src/content/` (ex : ajouter une ligne de commentaire non destructive)
|
||||
3) PR → merge dans `main`
|
||||
4) Vérifier dans `deploy-staging-live.yml` :
|
||||
|
||||
Attendus :
|
||||
- `Gate flags: HAS_FULL=1 HAS_HOTPATCH=0`
|
||||
- `✅ build-impacting change -> MODE=full (rebuild+restart)`
|
||||
- Les étapes FULL (blue puis green) s’exécutent réellement
|
||||
|
||||
#### Test B — toucher `src/annotations/...` uniquement (HOTPATCH auto)
|
||||
|
||||
1) Créer une branche test
|
||||
2) Modifier 1 fichier sous `src/annotations/**` (ex: un champ comment, ts, etc.)
|
||||
3) PR → merge dans `main`
|
||||
4) Vérifier dans `deploy-staging-live.yml` :
|
||||
|
||||
Attendus :
|
||||
- `Gate flags: HAS_FULL=0 HAS_HOTPATCH=1`
|
||||
- `✅ annotations/media change -> MODE=hotpatch`
|
||||
- Les étapes FULL sont “skip” (durée 0s)
|
||||
- L’étape HOTPATCH s’exécute réellement
|
||||
|
||||
### 10.5 — Preuve opérationnelle côté NAS (2 URLs + 2 commandes)
|
||||
|
||||
But : prouver que staging+live servent bien les endpoints essentiels (et que le déploiement n’a pas “fait semblant”).
|
||||
|
||||
#### 10.5.1 — Deux URLs à vérifier (staging et live)
|
||||
|
||||
- Staging (blue) : `http://127.0.0.1:8081/`
|
||||
- Live (green) : `http://127.0.0.1:8082/`
|
||||
|
||||
#### 10.5.2 — Deux commandes minimales (zéro débat)
|
||||
|
||||
```bash
|
||||
curl -fsSI http://127.0.0.1:8081/ | head -n 1
|
||||
curl -fsSI http://127.0.0.1:8082/ | head -n 1
|
||||
|
||||
---
|
||||
|
||||
## 10) CI Deploy (Gitea Actions) — Gate SKIP / HOTPATCH / FULL (merge-proof) + preuves
|
||||
|
||||
Cette section documente le comportement **canonique** du workflow :
|
||||
- `.gitea/workflows/deploy-staging-live.yml`
|
||||
|
||||
Objectif : **zéro surprise**.
|
||||
On ne veut plus “penser à force=1”.
|
||||
Le gate doit décider automatiquement, y compris sur des **merge commits**.
|
||||
|
||||
### 10.1 — Principe (ce que fait réellement le gate)
|
||||
|
||||
Le job `deploy` calcule les fichiers modifiés entre :
|
||||
- `BEFORE` = commit précédent (avant le push sur main)
|
||||
- `AFTER` = commit actuel (après le push / merge sur main)
|
||||
|
||||
Puis il classe le déploiement dans un mode :
|
||||
|
||||
- **MODE=full**
|
||||
- rebuild image + restart `archicratie-web-blue` (8081) + `archicratie-web-green` (8082)
|
||||
- warmup endpoints (para-index, annotations-index, pagefind.js)
|
||||
- vérification canonical staging + live
|
||||
|
||||
- **MODE=hotpatch**
|
||||
- rebuild d’un `annotations-index.json` consolidé depuis `src/annotations/**`
|
||||
- patch direct dans les conteneurs en cours d’exécution (blue+green)
|
||||
- copie des médias modifiés `public/media/**` vers `/usr/share/nginx/html/media/**`
|
||||
- smoke sur `/annotations-index.json` des deux ports
|
||||
|
||||
- **MODE=skip**
|
||||
- pas de déploiement (on évite le bruit)
|
||||
|
||||
⚠️ Important : le mode “hotpatch” **ne rebuild pas** Astro.
|
||||
Donc toute modification de contenu, routes, scripts, anchors, etc. doit déclencher **full**.
|
||||
|
||||
### 10.2 — Matrice de décision (règles officielles)
|
||||
|
||||
Le gate définit deux flags :
|
||||
- `HAS_FULL=1` si changement “build-impacting”
|
||||
- `HAS_HOTPATCH=1` si changement “annotations/media only”
|
||||
|
||||
Règle de priorité :
|
||||
1) Si `HAS_FULL=1` → **MODE=full**
|
||||
2) Sinon si `HAS_HOTPATCH=1` → **MODE=hotpatch**
|
||||
3) Sinon → **MODE=skip**
|
||||
|
||||
#### 10.2.1 — Changements qui déclenchent FULL (build-impacting)
|
||||
|
||||
Exemples typiques (non exhaustif, mais on couvre le cœur) :
|
||||
- `src/content/**` (contenu MD/MDX)
|
||||
- `src/pages/**` (routes Astro)
|
||||
- `src/anchors/**` (aliases d’ancres)
|
||||
- `scripts/**` (tooling postbuild : injection, index, tests)
|
||||
- `src/layouts/**`, `src/components/**`, `src/styles/**` (rendu et scripts inline)
|
||||
- `astro.config.mjs`, `package.json`, `package-lock.json`
|
||||
- `Dockerfile`, `docker-compose.yml`, `nginx.conf`
|
||||
- `.gitea/workflows/**` (changement infra CI/CD)
|
||||
|
||||
=> On veut **full** pour garantir cohérence et éviter “site partiellement mis à jour”.
|
||||
|
||||
#### 10.2.2 — Changements qui déclenchent HOTPATCH (sans rebuild)
|
||||
|
||||
Uniquement :
|
||||
- `src/annotations/**` (shards YAML)
|
||||
- `public/media/**` (assets média)
|
||||
|
||||
=> On veut hotpatch pour vitesse et éviter rebuild NAS.
|
||||
|
||||
### 10.3 — “Merge-proof” : pourquoi on ne lit PAS seulement `git show $SHA`
|
||||
|
||||
Sur un merge commit, `git show --name-only $SHA` peut être trompeur selon le contexte.
|
||||
La méthode robuste est :
|
||||
- utiliser `event.json` (Gitea Actions) pour récupérer `before` et `after`
|
||||
- calculer `git diff --name-only BEFORE AFTER`
|
||||
|
||||
C’est ce qui rend le gate **merge-proof**.
|
||||
|
||||
### 10.4 — Tests de preuve A/B (reproductibles)
|
||||
|
||||
Ces tests valident le gate sans ambiguïté.
|
||||
But : vérifier que le mode choisi est EXACTEMENT celui attendu.
|
||||
|
||||
#### Test A — toucher `src/content/...` (FULL auto)
|
||||
|
||||
1) Créer une branche test
|
||||
2) Modifier 1 fichier dans `src/content/` (ex : ajouter une ligne de commentaire non destructive)
|
||||
3) PR → merge dans `main`
|
||||
4) Vérifier dans `deploy-staging-live.yml` :
|
||||
|
||||
Attendus :
|
||||
- `Gate flags: HAS_FULL=1 HAS_HOTPATCH=0`
|
||||
- `✅ build-impacting change -> MODE=full (rebuild+restart)`
|
||||
- Les étapes FULL (blue puis green) s’exécutent réellement
|
||||
|
||||
#### Test B — toucher `src/annotations/...` uniquement (HOTPATCH auto)
|
||||
|
||||
1) Créer une branche test
|
||||
2) Modifier 1 fichier sous `src/annotations/**` (ex: un champ comment, ts, etc.)
|
||||
3) PR → merge dans `main`
|
||||
4) Vérifier dans `deploy-staging-live.yml` :
|
||||
|
||||
Attendus :
|
||||
- `Gate flags: HAS_FULL=0 HAS_HOTPATCH=1`
|
||||
- `✅ annotations/media change -> MODE=hotpatch`
|
||||
- Les étapes FULL sont “skip” (durée 0s)
|
||||
- L’étape HOTPATCH s’exécute réellement
|
||||
|
||||
### 10.5 — Preuve opérationnelle côté NAS (2 URLs + 2 commandes)
|
||||
|
||||
But : prouver que staging+live servent bien les endpoints essentiels (et que le déploiement n’a pas “fait semblant”).
|
||||
|
||||
#### 10.5.1 — Deux URLs à vérifier (staging et live)
|
||||
|
||||
- Staging (blue) : `http://127.0.0.1:8081/`
|
||||
- Live (green) : `http://127.0.0.1:8082/`
|
||||
|
||||
#### 10.5.2 — Deux commandes minimales (zéro débat)
|
||||
|
||||
en bash :
|
||||
curl -fsSI http://127.0.0.1:8081/ | head -n 1
|
||||
curl -fsSI http://127.0.0.1:8082/ | head -n 1
|
||||
|
||||
Attendu : HTTP/1.1 200 OK des deux côtés.
|
||||
|
||||
10.6 — Preuve “alias injection” (ancre ancienne → nouvelle) sur une page
|
||||
|
||||
Contexte : lorsqu’un paragraphe change (ex: ticket “Proposer” appliqué),
|
||||
l’ID de paragraphe peut changer, mais on doit préserver les liens anciens via :
|
||||
|
||||
src/anchors/anchor-aliases.json
|
||||
|
||||
injection build-time dans dist (span .para-alias)
|
||||
|
||||
10.6.1 — Check rapide (staging + live)
|
||||
|
||||
Remplacer OLD/NEW par tes ids réels :
|
||||
|
||||
Attendu : HTTP/1.1 200 OK des deux côtés.
|
||||
|
||||
10.6 — Preuve “alias injection” (ancre ancienne → nouvelle) sur une page
|
||||
|
||||
Contexte : lorsqu’un paragraphe change (ex: ticket “Proposer” appliqué),
|
||||
l’ID de paragraphe peut changer, mais on doit préserver les liens anciens via :
|
||||
|
||||
src/anchors/anchor-aliases.json
|
||||
|
||||
injection build-time dans dist (span .para-alias)
|
||||
|
||||
10.6.1 — Check rapide (staging + live)
|
||||
|
||||
Remplacer OLD/NEW par tes ids réels :
|
||||
|
||||
OLD="p-1-60c7ea48"
|
||||
NEW="p-1-a21087b0"
|
||||
|
||||
for P in 8081 8082; do
|
||||
echo "=== $P ==="
|
||||
HTML="$(curl -fsS "http://127.0.0.1:${P}/archicrat-ia/chapitre-3/" | tr -d '\r')"
|
||||
echo "OLD count: $(printf '%s' "$HTML" | grep -o "$OLD" | wc -l | tr -d ' ')"
|
||||
echo "NEW count: $(printf '%s' "$HTML" | grep -o "$NEW" | wc -l | tr -d ' ')"
|
||||
printf '%s\n' "$HTML" | grep -nE "$OLD|$NEW|class=\"para-alias\"" | head -n 40 || true
|
||||
done
|
||||
|
||||
Attendu :
|
||||
|
||||
présence d’un alias : <span id="$OLD" class="para-alias"...>
|
||||
|
||||
présence du nouveau paragraphe : <p id="$NEW">...
|
||||
|
||||
10.6.2 — Check “lien ancien ne casse pas” (HTTP 200)
|
||||
|
||||
for P in 8081 8082; do
|
||||
curl -fsSI "http://127.0.0.1:${P}/archicrat-ia/chapitre-3/#${OLD}" | head -n 1
|
||||
done
|
||||
|
||||
Attendu : HTTP/1.1 200 OK et navigation fonctionnelle côté navigateur.
|
||||
|
||||
10.7 — Troubleshooting gate (symptômes typiques)
|
||||
Symptom 1 : job bloqué “Set up job” très longtemps
|
||||
|
||||
Causes fréquentes :
|
||||
|
||||
runner indisponible / capacity saturée
|
||||
|
||||
runner ne récupère pas les tâches (fetch_timeout trop court + réseau instable)
|
||||
|
||||
erreur dans “Gate — decide …” qui casse bash (et donne l’impression d’un hang)
|
||||
|
||||
Commandes NAS (diagnostic rapide) :
|
||||
|
||||
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | grep -E 'gitea-act-runner|registry|archicratie-web'
|
||||
docker logs --since 30m --tail 400 gitea-act-runner | tail -n 200
|
||||
Symptom 2 : conditional binary operator expected
|
||||
|
||||
Cause :
|
||||
|
||||
test bash du type [[ "$X" == "1" && "$Y" == "2" ]] mal formé
|
||||
|
||||
variable vide non quotée
|
||||
|
||||
usage d’un opérateur non supporté dans la shell effective
|
||||
|
||||
Fix :
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
toujours quoter : [[ "${VAR:-}" == "..." ]]
|
||||
|
||||
logguer BEFORE/AFTER/FORCE et s’assurer qu’ils ne sont pas vides
|
||||
|
||||
Symptom 3 : le gate liste “trop de fichiers” alors qu’on a changé 1 seul fichier
|
||||
|
||||
Cause :
|
||||
|
||||
comparaison faite sur le mauvais range (ex: git show sur merge, ou mauvais parent)
|
||||
Fix :
|
||||
|
||||
toujours utiliser git diff --name-only "$BEFORE" "$AFTER" (merge-proof)
|
||||
|
||||
confirmer dans le log : Gate ctx: BEFORE=... AFTER=...
|
||||
|
||||
147
docs/runbooks/EDGE-TRAEFIK.md
Normal file
@@ -0,0 +1,147 @@
|
||||
# RUNBOOK — Edge Traefik (routing + SSO Authelia)
|
||||
> Objectif : comprendre et diagnostiquer rapidement qui route quoi, et pourquoi staging/live peuvent diverger.
|
||||
|
||||
## 0) Portée
|
||||
Edge Traefik route plusieurs hosts vers des backends locaux (127.0.0.1:*), avec Auth via Authelia.
|
||||
|
||||
Répertoire :
|
||||
- `/volume2/docker/edge/config/dynamic/`
|
||||
|
||||
Port d’entrée edge :
|
||||
- `http://127.0.0.1:18080/` (entryPoint `web`)
|
||||
- Les hosts publics pointent vers cet edge.
|
||||
|
||||
## 1) Fichiers dynamiques (canon)
|
||||
### 00-smoke.yml
|
||||
- route `/__smoke` vers le service `smoke_svc` → `127.0.0.1:18081`
|
||||
|
||||
### 10-core.yml
|
||||
- définit les middlewares :
|
||||
- `sanitize-remote`
|
||||
- `authelia` (forwardAuth vers 9091)
|
||||
- `chain-auth` (chain sanitize-remote + authelia)
|
||||
|
||||
### 20-archicratie-backend.yml
|
||||
- définit service `archicratie_web` → `127.0.0.1:8082` (live upstream)
|
||||
|
||||
### 21-archicratie-staging.yml
|
||||
- route staging host vers `127.0.0.1:8081` (staging upstream)
|
||||
- applique middlewares `diag-staging@file` et `chain-auth@file`
|
||||
- IMPORTANT : `diag-staging@file` doit exister
|
||||
|
||||
### 22-archicratie-authinfo-staging.yml
|
||||
- route `/ _auth /` sur staging vers `whoami@file`
|
||||
- applique `diag-staging-authinfo@file` + `chain-auth@file`
|
||||
- IMPORTANT : `diag-staging-authinfo@file` doit exister
|
||||
|
||||
### 90-overlay-staging-fix.yml (overlay de diagnostic + fallback)
|
||||
Rôle :
|
||||
- **fournir** les middlewares manquants (`diag-staging`, `diag-staging-authinfo`)
|
||||
- optionnel : fallback route si 21/22 sont cassés
|
||||
- injecter un header `X-Archi-Router` pour identifier le routeur utilisé
|
||||
|
||||
### 92-overlay-live-fix.yml
|
||||
- route live host `archicratie.trans-hands.synology.me` → `archicratie_web@file` (8082)
|
||||
- route `/ _auth/whoami` → `whoami@file` (18081)
|
||||
|
||||
## 2) Diagnostiquer rapidement : quel routeur répond ?
|
||||
### 2.1 Test “host header” (sans UI)
|
||||
# en bash :
|
||||
|
||||
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router' | head -n 30
|
||||
|
||||
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/_auth/whoami \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router' | head -n 30
|
||||
|
||||
# Interprétation :
|
||||
|
||||
X-Archi-Router: staging@21 → routeur 21-archicratie-staging.yml OK
|
||||
|
||||
X-Archi-Router: staging-authinfo@22 → routeur authinfo OK
|
||||
|
||||
Si tu vois staging-fallback@90 → tu es tombé sur le fallback 90 (donc 21/22 potentiellement invalides)
|
||||
|
||||
### 2.2 Vérifier l’upstream direct derrière edge
|
||||
|
||||
curl -sSI http://127.0.0.1:8081/ | head -n 12
|
||||
curl -sSI http://127.0.0.1:8082/ | head -n 12
|
||||
|
||||
Si 8081 et 8082 servent des versions différentes : c’est “normal” en blue/green, mais il faut savoir laquelle est censée être staging/live.
|
||||
|
||||
## 3) Diagnostiquer les erreurs Traefik (fichier invalide / middleware manquant)
|
||||
### 3.1 Grep “level=error”
|
||||
|
||||
sudo docker logs edge-traefik --since 5m | grep -Ei 'level=error|middleware|router|service|yaml' | tail -n 80
|
||||
|
||||
# Cas typique :
|
||||
|
||||
middleware "diag-staging@file" does not exist
|
||||
→ 21-archicratie-staging.yml référence un middleware absent. Solution : le définir (souvent dans 90-overlay-staging-fix.yml).
|
||||
|
||||
## 4) Procédure safe de modification (jamais en aveugle)
|
||||
### 4.1 Backup
|
||||
|
||||
cd /volume2/docker/edge/config/dynamic
|
||||
TS="$(date +%F-%H%M%S)"
|
||||
sudo cp -a 90-overlay-staging-fix.yml "90-overlay-staging-fix.yml.bak.$TS"
|
||||
|
||||
### 4.2 Édition (ex : ajouter middlewares diag)
|
||||
|
||||
Faire une modif minimale
|
||||
|
||||
Ne pas casser les règles existantes (Host + PathPrefix)
|
||||
|
||||
Respecter les priorités (voir section 5)
|
||||
|
||||
### 4.3 Reload Traefik
|
||||
|
||||
sudo docker restart edge-traefik
|
||||
|
||||
### 4.4 Tests immédiats
|
||||
|
||||
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router'
|
||||
|
||||
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/_auth/whoami \
|
||||
| grep -iE 'HTTP/|location:|x-archi-router'
|
||||
|
||||
## 5) Priorités Traefik (le point subtil)
|
||||
|
||||
Traefik choisit le routeur selon :
|
||||
|
||||
la correspondance de règle
|
||||
|
||||
la priority (plus grand gagne)
|
||||
|
||||
en cas d’égalité, l’ordre interne (à éviter)
|
||||
|
||||
### 5.1 Canon pour staging
|
||||
|
||||
21-archicratie-staging.yml : priority 10
|
||||
|
||||
22-archicratie-authinfo-staging.yml : priority 10000
|
||||
|
||||
90-overlay-staging-fix.yml :
|
||||
|
||||
fallback host : priority faible (ex: 5) pour ne PAS écraser 21
|
||||
|
||||
fallback whoami : priority < 10000 (ex: 9000) pour ne PAS écraser 22
|
||||
|
||||
=> On garde 90 comme filet de sécurité / diag, pas comme “source”.
|
||||
|
||||
## 6) Rollback (si un changement edge casse staging/live)
|
||||
|
||||
cd /volume2/docker/edge/config/dynamic
|
||||
# choisir le bon backup
|
||||
sudo mv -f 90-overlay-staging-fix.yml "90-overlay-staging-fix.yml.BAD.$(date +%F-%H%M%S)"
|
||||
sudo cp -a 90-overlay-staging-fix.yml.bak.YYYY-MM-DD-HHMMSS 90-overlay-staging-fix.yml
|
||||
sudo docker restart edge-traefik
|
||||
|
||||
Puis re-tests section 2.
|
||||
|
||||
## 7) Remarques
|
||||
|
||||
Les 302 Authelia sont normaux si non authentifié.
|
||||
|
||||
Un 404 “Not Found” depuis edge alors que 8081 répond : souvent routeur manquant / invalidé / middleware absent.
|
||||
114
docs/runbooks/ENV-PUBLIC_SITE.md
Normal file
@@ -0,0 +1,114 @@
|
||||
# RUNBOOK — PUBLIC_SITE (canonical + sitemap) “anti localhost en prod”
|
||||
> Objectif : ne plus jamais voir `rel="canonical" href="http://localhost:4321/"` en staging/live.
|
||||
|
||||
## 0) Pourquoi c’est critique
|
||||
Astro génère :
|
||||
- `<link rel="canonical" href="...">`
|
||||
- `sitemap-index.xml`
|
||||
|
||||
Ces valeurs dépendent de `site` dans `astro.config.mjs`.
|
||||
|
||||
Si `site` vaut `http://localhost:4321` au moment du build Docker, **la prod sortira des canonical faux** :
|
||||
- SEO / partage / cohérence de navigation impactés
|
||||
- confusion staging/live
|
||||
|
||||
## 1) Règle canonique
|
||||
- `astro.config.mjs` :
|
||||
# en js :
|
||||
|
||||
site: process.env.PUBLIC_SITE ?? "http://localhost:4321"
|
||||
|
||||
# Donc :
|
||||
|
||||
En DEV local : pas besoin de PUBLIC_SITE (fallback ok)
|
||||
|
||||
En build “déploiement” : on DOIT fournir PUBLIC_SITE
|
||||
|
||||
## 2) Exigence “antifragile”
|
||||
### 2.1 Dockerfile (build stage)
|
||||
|
||||
On injecte PUBLIC_SITE au build et on peut le rendre obligatoire :
|
||||
|
||||
ARG PUBLIC_SITE
|
||||
|
||||
ARG REQUIRE_PUBLIC_SITE=0
|
||||
|
||||
ENV PUBLIC_SITE=$PUBLIC_SITE
|
||||
|
||||
# garde-fou :
|
||||
|
||||
RUN if [ "$REQUIRE_PUBLIC_SITE" = "1" ] && [ -z "$PUBLIC_SITE" ]; then \
|
||||
echo "ERROR: PUBLIC_SITE is required (REQUIRE_PUBLIC_SITE=1)"; exit 1; \
|
||||
fi
|
||||
|
||||
=> Si quelqu’un oublie l’URL en prod, le build casse au lieu de produire une release mauvaise.
|
||||
|
||||
## 3) docker-compose : blue/staging vs green/live
|
||||
|
||||
Objectif : injecter deux valeurs différentes, sans bricolage.
|
||||
|
||||
### 3.1 .env (NAS)
|
||||
|
||||
Exemple canonique :
|
||||
|
||||
PUBLIC_SITE_BLUE=https://staging.archicratie.trans-hands.synology.me
|
||||
PUBLIC_SITE_GREEN=https://archicratie.trans-hands.synology.me
|
||||
|
||||
### 3.2 docker-compose.yml
|
||||
|
||||
web_blue :
|
||||
|
||||
REQUIRE_PUBLIC_SITE: "1"
|
||||
|
||||
PUBLIC_SITE: ${PUBLIC_SITE_BLUE}
|
||||
|
||||
web_green :
|
||||
|
||||
REQUIRE_PUBLIC_SITE: "1"
|
||||
|
||||
PUBLIC_SITE: ${PUBLIC_SITE_GREEN}
|
||||
|
||||
## 4) Tests (obligatoires après build)
|
||||
### 4.1 Vérifier l’injection dans compose
|
||||
|
||||
sudo env DOCKER_API_VERSION=1.43 docker compose config \
|
||||
| grep -nE 'PUBLIC_SITE|REQUIRE_PUBLIC_SITE|web_blue:|web_green:' | sed -n '1,200p'
|
||||
|
||||
### 4.2 Vérifier canonical (upstream direct)
|
||||
|
||||
curl -sS http://127.0.0.1:8081/ | grep -oE 'rel="canonical" href="[^"]+"' | head -n 1
|
||||
curl -sS http://127.0.0.1:8082/ | grep -oE 'rel="canonical" href="[^"]+"' | head -n 1
|
||||
|
||||
# Attendu :
|
||||
|
||||
blue : https://staging.../
|
||||
|
||||
green : https://archicratie.../
|
||||
|
||||
## 5) Procédure de correction (si canonical est faux)
|
||||
### 5.1 Vérifier astro.config.mjs dans la release courante
|
||||
|
||||
cd /volume2/docker/archicratie-web/current
|
||||
grep -nE 'site:\s*process\.env\.PUBLIC_SITE' astro.config.mjs
|
||||
|
||||
### 5.2 Vérifier que Dockerfile exporte PUBLIC_SITE
|
||||
|
||||
grep -nE 'ARG PUBLIC_SITE|ENV PUBLIC_SITE|REQUIRE_PUBLIC_SITE' Dockerfile
|
||||
|
||||
### 5.3 Vérifier .env et compose
|
||||
|
||||
grep -nE 'PUBLIC_SITE_BLUE|PUBLIC_SITE_GREEN' .env
|
||||
grep -nE 'PUBLIC_SITE|REQUIRE_PUBLIC_SITE' docker-compose.yml
|
||||
|
||||
### 5.4 Rebuild + recreate
|
||||
|
||||
sudo env DOCKER_API_VERSION=1.43 docker compose build --no-cache web_blue web_green
|
||||
sudo env DOCKER_API_VERSION=1.43 docker compose up -d --force-recreate web_blue web_green
|
||||
|
||||
Puis tests section 4.
|
||||
|
||||
## 6) Notes
|
||||
|
||||
Cette mécanique doit être backportée dans Gitea (source canonique), sinon ça re-cassera au prochain pack.
|
||||
|
||||
En DEV local, conserver le fallback http://localhost:4321 est utile et normal.
|
||||
1330
package-lock.json
generated
25
package.json
@@ -4,35 +4,34 @@
|
||||
"version": "0.0.1",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "astro dev",
|
||||
"dev": "node scripts/write-dev-whoami.mjs && astro dev",
|
||||
"preview": "astro preview",
|
||||
"astro": "astro",
|
||||
|
||||
"clean": "rm -rf dist",
|
||||
"build": "astro build",
|
||||
"build:clean": "npm run clean && npm run build",
|
||||
|
||||
"postbuild": "node scripts/inject-anchor-aliases.mjs && node scripts/dedupe-ids-dist.mjs && npx pagefind --site dist",
|
||||
|
||||
"build:search": "pagefind --site dist",
|
||||
"postbuild": "node scripts/inject-anchor-aliases.mjs && node scripts/dedupe-ids-dist.mjs && node scripts/build-para-index.mjs && node scripts/build-annotations-index.mjs && node scripts/purge-dist-dev-whoami.mjs && npm run build:search",
|
||||
"import": "node scripts/import-docx.mjs",
|
||||
"apply:ticket": "node scripts/apply-ticket.mjs",
|
||||
|
||||
"audit:dist": "node scripts/audit-dist.mjs",
|
||||
|
||||
"audit:glossary": "node scripts/audit-glossary-navigation.mjs",
|
||||
"build:para-index": "node scripts/build-para-index.mjs",
|
||||
"build:annotations-index": "node scripts/build-annotations-index.mjs",
|
||||
"test:aliases": "node scripts/check-anchor-aliases.mjs",
|
||||
"test:anchors": "node scripts/check-anchors.mjs",
|
||||
"test:anchors:update": "node scripts/check-anchors.mjs --update",
|
||||
|
||||
"test": "npm run test:aliases && npm run build:clean && npm run audit:dist && node scripts/verify-anchor-aliases-in-dist.mjs && npm run test:anchors && node scripts/check-inline-js.mjs",
|
||||
|
||||
"test:annotations": "node scripts/check-annotations.mjs",
|
||||
"test:annotations:media": "node scripts/check-annotations-media.mjs",
|
||||
"test": "npm run test:aliases && npm run build:clean && npm run audit:dist && node scripts/verify-anchor-aliases-in-dist.mjs && npm run test:anchors && npm run test:annotations && npm run test:annotations:media && node scripts/check-inline-js.mjs",
|
||||
"ci": "CI=1 npm test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@astrojs/mdx": "^4.3.13",
|
||||
"astro": "^5.16.11"
|
||||
"@astrojs/mdx": "^5.0.0",
|
||||
"astro": "^6.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@astrojs/sitemap": "^3.7.0",
|
||||
"@astrojs/sitemap": "^3.7.1",
|
||||
"mammoth": "^1.11.0",
|
||||
"pagefind": "^1.4.0",
|
||||
"rehype-autolink-headings": "^7.1.0",
|
||||
|
||||
@@ -1 +1 @@
|
||||
{"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"}
|
||||
{"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone","orientation":"any"}
|
||||
899
scripts/apply-annotation-ticket.mjs
Normal file
@@ -0,0 +1,899 @@
|
||||
#!/usr/bin/env node
|
||||
// scripts/apply-annotation-ticket.mjs
|
||||
//
|
||||
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
|
||||
//
|
||||
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
|
||||
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
|
||||
//
|
||||
// Compat rétro : lit (si présent) l'ancien monolithe:
|
||||
// src/annotations/<oeuvre>/<chapitre>.yml
|
||||
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
|
||||
// pour permettre une migration progressive sans perte.
|
||||
//
|
||||
// Robuste, idempotent, non destructif.
|
||||
// DRY RUN si --dry-run
|
||||
// Options: --dry-run --no-download --verify --strict --commit --close
|
||||
//
|
||||
// Env requis:
|
||||
// FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
|
||||
// FORGE_TOKEN = PAT Gitea (repo + issues)
|
||||
//
|
||||
// Env optionnel:
|
||||
// GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||||
// ANNO_DIR (défaut: src/annotations)
|
||||
// PUBLIC_DIR (défaut: public)
|
||||
// MEDIA_ROOT (défaut URL: /media)
|
||||
//
|
||||
// Ticket attendu (body):
|
||||
// Chemin: /archicrat-ia/chapitre-4/
|
||||
// Ancre: #p-0-xxxxxxxx
|
||||
// Type: type/media | type/reference | type/comment
|
||||
//
|
||||
// Exit codes:
|
||||
// 0 ok
|
||||
// 1 erreur fatale
|
||||
// 2 refus (strict/verify/usage)
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import process from "node:process";
|
||||
import { spawnSync } from "node:child_process";
|
||||
import YAML from "yaml";
|
||||
|
||||
/* ---------------------------------- usage --------------------------------- */
|
||||
|
||||
function usage(exitCode = 0) {
|
||||
console.log(`
|
||||
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/ (shard par paragraphe)
|
||||
|
||||
Usage:
|
||||
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
|
||||
|
||||
Flags:
|
||||
--dry-run : n'écrit rien (affiche un aperçu)
|
||||
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
||||
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
|
||||
--strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
|
||||
--commit : git add + git commit (commit dans la branche courante)
|
||||
--close : ferme le ticket (nécessite --commit)
|
||||
|
||||
Env requis:
|
||||
FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
|
||||
FORGE_TOKEN = PAT Gitea (repo + issues)
|
||||
|
||||
Env optionnel:
|
||||
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||||
ANNO_DIR (défaut: src/annotations)
|
||||
PUBLIC_DIR (défaut: public)
|
||||
MEDIA_ROOT (défaut URL: /media)
|
||||
|
||||
Exit codes:
|
||||
0 ok
|
||||
1 erreur fatale
|
||||
2 refus (strict/verify/close sans commit / incohérence)
|
||||
`);
|
||||
process.exit(exitCode);
|
||||
}
|
||||
|
||||
/* ---------------------------------- args ---------------------------------- */
|
||||
|
||||
const argv = process.argv.slice(2);
|
||||
if (argv.length === 0 || argv.includes("--help") || argv.includes("-h")) usage(0);
|
||||
|
||||
const issueNum = Number(argv[0]);
|
||||
if (!Number.isFinite(issueNum) || issueNum <= 0) {
|
||||
console.error("❌ Numéro de ticket invalide.");
|
||||
usage(2);
|
||||
}
|
||||
|
||||
const DRY_RUN = argv.includes("--dry-run");
|
||||
const NO_DOWNLOAD = argv.includes("--no-download");
|
||||
const DO_VERIFY = argv.includes("--verify");
|
||||
const STRICT = argv.includes("--strict");
|
||||
const DO_COMMIT = argv.includes("--commit");
|
||||
const DO_CLOSE = argv.includes("--close");
|
||||
|
||||
if (DO_CLOSE && !DO_COMMIT) {
|
||||
console.error("❌ --close nécessite --commit.");
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
if (typeof fetch !== "function") {
|
||||
console.error("❌ fetch() indisponible. Utilise Node 18+.");
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
/* --------------------------------- config --------------------------------- */
|
||||
|
||||
const CWD = process.cwd();
|
||||
const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations");
|
||||
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
|
||||
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
|
||||
|
||||
/* --------------------------------- helpers -------------------------------- */
|
||||
|
||||
function getEnv(name, fallback = "") {
|
||||
return (process.env[name] ?? fallback).trim();
|
||||
}
|
||||
|
||||
function run(cmd, args, opts = {}) {
|
||||
const r = spawnSync(cmd, args, { stdio: "inherit", ...opts });
|
||||
if (r.error) throw r.error;
|
||||
if (r.status !== 0) throw new Error(`Command failed: ${cmd} ${args.join(" ")}`);
|
||||
}
|
||||
|
||||
function runQuiet(cmd, args, opts = {}) {
|
||||
const r = spawnSync(cmd, args, { encoding: "utf8", stdio: "pipe", ...opts });
|
||||
if (r.error) throw r.error;
|
||||
if (r.status !== 0) {
|
||||
const out = (r.stdout || "") + (r.stderr || "");
|
||||
throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${out}`);
|
||||
}
|
||||
return r.stdout || "";
|
||||
}
|
||||
|
||||
async function exists(p) {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function inferOwnerRepoFromGit() {
|
||||
const r = spawnSync("git", ["remote", "get-url", "origin"], { encoding: "utf-8" });
|
||||
if (r.status !== 0) return null;
|
||||
const u = (r.stdout || "").trim();
|
||||
const m = u.match(/[:/](?<owner>[^/]+)\/(?<repo>[^/]+?)(?:\.git)?$/);
|
||||
if (!m?.groups) return null;
|
||||
return { owner: m.groups.owner, repo: m.groups.repo };
|
||||
}
|
||||
|
||||
function gitHasStagedChanges() {
|
||||
const r = spawnSync("git", ["diff", "--cached", "--quiet"]);
|
||||
return r.status === 1;
|
||||
}
|
||||
|
||||
function escapeRegExp(s) {
|
||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function pickLine(body, key) {
|
||||
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||
const m = String(body || "").match(re);
|
||||
return m ? m[1].trim() : "";
|
||||
}
|
||||
|
||||
function pickSection(body, markers) {
|
||||
const text = String(body || "").replace(/\r\n/g, "\n");
|
||||
const idx = markers
|
||||
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
||||
.filter((x) => x.i >= 0)
|
||||
.sort((a, b) => a.i - b.i)[0];
|
||||
if (!idx) return "";
|
||||
|
||||
const start = idx.i + idx.m.length;
|
||||
const tail = text.slice(start);
|
||||
|
||||
const stops = ["\n## ", "\n---", "\nJustification", "\nProposition", "\nSources"];
|
||||
let end = tail.length;
|
||||
for (const s of stops) {
|
||||
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
||||
if (j >= 0 && j < end) end = j;
|
||||
}
|
||||
return tail.slice(0, end).trim();
|
||||
}
|
||||
|
||||
function normalizeChemin(chemin) {
|
||||
let c = String(chemin || "").trim();
|
||||
if (!c) return "";
|
||||
if (!c.startsWith("/")) c = "/" + c;
|
||||
if (!c.endsWith("/")) c = c + "/";
|
||||
c = c.replace(/\/{2,}/g, "/");
|
||||
return c;
|
||||
}
|
||||
|
||||
function normalizePageKeyFromChemin(chemin) {
|
||||
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
|
||||
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
|
||||
function normalizeAnchorId(s) {
|
||||
let a = String(s || "").trim();
|
||||
if (a.startsWith("#")) a = a.slice(1);
|
||||
return a;
|
||||
}
|
||||
|
||||
function assert(cond, msg, code = 1) {
|
||||
if (!cond) {
|
||||
const e = new Error(msg);
|
||||
e.__exitCode = code;
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function paraIndexFromId(id) {
|
||||
const m = String(id).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.NaN;
|
||||
}
|
||||
|
||||
function isHttpUrl(u) {
|
||||
try {
|
||||
const x = new URL(String(u));
|
||||
return x.protocol === "http:" || x.protocol === "https:";
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function stableSortByTs(arr) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(a?.ts || "") || 0;
|
||||
const tb = Date.parse(b?.ts || "") || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function normPage(s) {
|
||||
let x = String(s || "").trim();
|
||||
if (!x) return "";
|
||||
// retire origin si on a une URL complète
|
||||
x = x.replace(/^https?:\/\/[^/]+/i, "");
|
||||
// enlève query/hash
|
||||
x = x.split("#")[0].split("?")[0];
|
||||
// enlève index.html
|
||||
x = x.replace(/index\.html$/i, "");
|
||||
// enlève slashs de bord
|
||||
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
return x;
|
||||
}
|
||||
|
||||
/* ------------------------------ para-index (verify + order) ------------------------------ */
|
||||
|
||||
async function loadParaOrderFromDist(pageKey) {
|
||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||
if (!(await exists(distIdx))) return null;
|
||||
|
||||
let j;
|
||||
try {
|
||||
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
|
||||
const want = normPage(pageKey);
|
||||
|
||||
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
|
||||
const items = Array.isArray(j?.items)
|
||||
? j.items
|
||||
: Array.isArray(j?.index?.items)
|
||||
? j.index.items
|
||||
: null;
|
||||
|
||||
if (items) {
|
||||
const ids = [];
|
||||
for (const it of items) {
|
||||
// page peut être dans plein de clés différentes
|
||||
const pageCand = normPage(
|
||||
it?.page ??
|
||||
it?.pageKey ??
|
||||
it?.path ??
|
||||
it?.route ??
|
||||
it?.href ??
|
||||
it?.url ??
|
||||
""
|
||||
);
|
||||
|
||||
// id peut être dans plein de clés différentes
|
||||
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
|
||||
if (id.startsWith("#")) id = id.slice(1);
|
||||
|
||||
if (pageCand === want && id) ids.push(id);
|
||||
}
|
||||
if (ids.length) return ids;
|
||||
}
|
||||
|
||||
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
|
||||
if (j?.byId && typeof j.byId === "object") {
|
||||
const ids = Object.keys(j.byId)
|
||||
.filter((id) => {
|
||||
const meta = j.byId[id] || {};
|
||||
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
|
||||
return pageCand === want;
|
||||
});
|
||||
|
||||
if (ids.length) {
|
||||
ids.sort((a, b) => {
|
||||
const ia = paraIndexFromId(a);
|
||||
const ib = paraIndexFromId(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
return ids;
|
||||
}
|
||||
}
|
||||
|
||||
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
|
||||
const keys = Object.keys(j.pages);
|
||||
const hit = keys.find((k) => normPage(k) === want);
|
||||
if (hit) {
|
||||
const pg = j.pages[hit];
|
||||
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
|
||||
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
async function tryVerifyAnchor(pageKey, anchorId) {
|
||||
// 1) dist/para-index.json : order complet si possible
|
||||
const order = await loadParaOrderFromDist(pageKey);
|
||||
if (order) return order.includes(anchorId);
|
||||
|
||||
// 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
|
||||
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||
if (await exists(distIdx)) {
|
||||
try {
|
||||
const raw = await fs.readFile(distIdx, "utf8");
|
||||
if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
|
||||
return true;
|
||||
}
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
// 2) tests/anchors-baseline.json (fallback)
|
||||
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||||
if (await exists(base)) {
|
||||
try {
|
||||
const j = JSON.parse(await fs.readFile(base, "utf8"));
|
||||
const candidates = [];
|
||||
if (j?.pages && typeof j.pages === "object") {
|
||||
for (const [k, v] of Object.entries(j.pages)) {
|
||||
if (!Array.isArray(v)) continue;
|
||||
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
|
||||
}
|
||||
}
|
||||
if (Array.isArray(j?.entries)) {
|
||||
for (const it of j.entries) {
|
||||
const p = String(it?.page || "");
|
||||
const ids = it?.ids;
|
||||
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
|
||||
}
|
||||
}
|
||||
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
}
|
||||
|
||||
return null; // cannot verify
|
||||
}
|
||||
|
||||
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
|
||||
|
||||
function keyMedia(x) {
|
||||
return String(x?.src || "");
|
||||
}
|
||||
function keyRef(x) {
|
||||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||
}
|
||||
function keyComment(x) {
|
||||
return String(x?.text || "").trim();
|
||||
}
|
||||
|
||||
function uniqUnion(dstArr, srcArr, keyFn) {
|
||||
const out = Array.isArray(dstArr) ? [...dstArr] : [];
|
||||
const seen = new Set(out.map((x) => keyFn(x)));
|
||||
for (const it of (Array.isArray(srcArr) ? srcArr : [])) {
|
||||
const k = keyFn(it);
|
||||
if (!k) continue;
|
||||
if (!seen.has(k)) {
|
||||
seen.add(k);
|
||||
out.push(it);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function deepMergeEntry(dst, src) {
|
||||
if (!isPlainObject(dst) || !isPlainObject(src)) return;
|
||||
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
if (k === "media" && Array.isArray(v)) {
|
||||
dst.media = uniqUnion(dst.media, v, keyMedia);
|
||||
continue;
|
||||
}
|
||||
if (k === "refs" && Array.isArray(v)) {
|
||||
dst.refs = uniqUnion(dst.refs, v, keyRef);
|
||||
continue;
|
||||
}
|
||||
if (k === "comments_editorial" && Array.isArray(v)) {
|
||||
dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isPlainObject(v)) {
|
||||
if (!isPlainObject(dst[k])) dst[k] = {};
|
||||
deepMergeEntry(dst[k], v);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (Array.isArray(v)) {
|
||||
const cur = Array.isArray(dst[k]) ? dst[k] : [];
|
||||
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
for (const it of v) {
|
||||
const s = JSON.stringify(it);
|
||||
if (!seen.has(s)) {
|
||||
seen.add(s);
|
||||
out.push(it);
|
||||
}
|
||||
}
|
||||
dst[k] = out;
|
||||
continue;
|
||||
}
|
||||
|
||||
// scalar: set only if missing/empty
|
||||
if (!(k in dst) || dst[k] == null || dst[k] === "") {
|
||||
dst[k] = v;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/* ----------------------------- annotations I/O ----------------------------- */
|
||||
|
||||
async function loadAnnoDocYaml(fileAbs, pageKey) {
|
||||
if (!(await exists(fileAbs))) {
|
||||
return { schema: 1, page: pageKey, paras: {} };
|
||||
}
|
||||
|
||||
const raw = await fs.readFile(fileAbs, "utf8");
|
||||
let doc;
|
||||
try {
|
||||
doc = YAML.parse(raw);
|
||||
} catch (e) {
|
||||
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
|
||||
}
|
||||
|
||||
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`, 2);
|
||||
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`, 2);
|
||||
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`, 2);
|
||||
|
||||
if (doc.page != null) {
|
||||
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`, 2);
|
||||
} else {
|
||||
doc.page = pageKey;
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
function sortParasObject(paras, order) {
|
||||
const keys = Object.keys(paras || {});
|
||||
const idx = new Map();
|
||||
if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
|
||||
|
||||
keys.sort((a, b) => {
|
||||
const ha = idx.has(a);
|
||||
const hb = idx.has(b);
|
||||
if (ha && hb) return idx.get(a) - idx.get(b);
|
||||
if (ha && !hb) return -1;
|
||||
if (!ha && hb) return 1;
|
||||
|
||||
const ia = paraIndexFromId(a);
|
||||
const ib = paraIndexFromId(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
|
||||
const out = {};
|
||||
for (const k of keys) out[k] = paras[k];
|
||||
return out;
|
||||
}
|
||||
|
||||
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
|
||||
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||||
|
||||
doc.paras = sortParasObject(doc.paras, order);
|
||||
|
||||
for (const e of Object.values(doc.paras || {})) {
|
||||
if (!isPlainObject(e)) continue;
|
||||
stableSortByTs(e.media);
|
||||
stableSortByTs(e.refs);
|
||||
stableSortByTs(e.comments_editorial);
|
||||
}
|
||||
|
||||
const out = YAML.stringify(doc);
|
||||
await fs.writeFile(fileAbs, out, "utf8");
|
||||
}
|
||||
|
||||
/* ------------------------------ gitea helpers ------------------------------ */
|
||||
|
||||
function apiBaseNorm(forgeApiBase) {
|
||||
return forgeApiBase.replace(/\/+$/, "");
|
||||
}
|
||||
|
||||
async function giteaGET(url, token) {
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/json",
|
||||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||
},
|
||||
});
|
||||
if (!res.ok) {
|
||||
const t = await res.text().catch(() => "");
|
||||
throw new Error(`HTTP ${res.status} GET ${url}\n${t}`);
|
||||
}
|
||||
return await res.json();
|
||||
}
|
||||
|
||||
async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
|
||||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||||
return await giteaGET(url, token);
|
||||
}
|
||||
|
||||
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
|
||||
// Gitea: /issues/{index}/assets
|
||||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
|
||||
try {
|
||||
const json = await giteaGET(url, token);
|
||||
return Array.isArray(json) ? json : [];
|
||||
} catch {
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
async function postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment }) {
|
||||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/comments`;
|
||||
const res = await fetch(url, {
|
||||
method: "POST",
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||
},
|
||||
body: JSON.stringify({ body: comment }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const t = await res.text().catch(() => "");
|
||||
throw new Error(`HTTP ${res.status} POST comment ${url}\n${t}`);
|
||||
}
|
||||
}
|
||||
|
||||
async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment }) {
|
||||
if (comment) await postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment });
|
||||
|
||||
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||||
const res = await fetch(url, {
|
||||
method: "PATCH",
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||
},
|
||||
body: JSON.stringify({ state: "closed" }),
|
||||
});
|
||||
if (!res.ok) {
|
||||
const t = await res.text().catch(() => "");
|
||||
throw new Error(`HTTP ${res.status} closing issue: ${url}\n${t}`);
|
||||
}
|
||||
}
|
||||
|
||||
/* ------------------------------ media helpers ------------------------------ */
|
||||
|
||||
function inferMediaTypeFromFilename(name) {
|
||||
const n = String(name || "").toLowerCase();
|
||||
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
|
||||
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
|
||||
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
|
||||
return "link";
|
||||
}
|
||||
|
||||
function sanitizeFilename(name) {
|
||||
return String(name || "file")
|
||||
.replace(/[\/\\]/g, "_")
|
||||
.replace(/[^\w.\-]+/g, "_")
|
||||
.replace(/_+/g, "_")
|
||||
.slice(0, 180);
|
||||
}
|
||||
|
||||
async function downloadToFile(url, token, destAbs) {
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||
},
|
||||
redirect: "follow",
|
||||
});
|
||||
if (!res.ok) {
|
||||
const t = await res.text().catch(() => "");
|
||||
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
|
||||
}
|
||||
const buf = Buffer.from(await res.arrayBuffer());
|
||||
await fs.mkdir(path.dirname(destAbs), { recursive: true });
|
||||
await fs.writeFile(destAbs, buf);
|
||||
return buf.length;
|
||||
}
|
||||
|
||||
/* ------------------------------ type parsers ------------------------------ */
|
||||
|
||||
function parseReferenceBlock(body) {
|
||||
const block =
|
||||
pickSection(body, ["Référence (à compléter):", "Reference (à compléter):"]) ||
|
||||
pickSection(body, ["Référence:", "Reference:"]);
|
||||
|
||||
const lines = String(block || "").split(/\r?\n/).map((l) => l.trim());
|
||||
const get = (k) => {
|
||||
const re = new RegExp(`^[-*]\\s*${escapeRegExp(k)}\\s*:\\s*(.*)$`, "i");
|
||||
const m = lines.map((l) => l.match(re)).find(Boolean);
|
||||
return (m?.[1] ?? "").trim();
|
||||
};
|
||||
|
||||
return {
|
||||
url: get("URL") || "",
|
||||
label: get("Label") || "",
|
||||
kind: get("Kind") || "",
|
||||
citation: get("Citation") || get("Passage") || get("Extrait") || "",
|
||||
rawBlock: block || "",
|
||||
};
|
||||
}
|
||||
|
||||
/* ----------------------------------- main ---------------------------------- */
|
||||
|
||||
async function main() {
|
||||
const token = getEnv("FORGE_TOKEN");
|
||||
assert(token, "❌ FORGE_TOKEN manquant.", 2);
|
||||
|
||||
const forgeApiBase = getEnv("FORGE_API") || getEnv("FORGE_BASE");
|
||||
assert(forgeApiBase, "❌ FORGE_API (ou FORGE_BASE) manquant.", 2);
|
||||
|
||||
const inferred = inferOwnerRepoFromGit() || {};
|
||||
const owner = getEnv("GITEA_OWNER", inferred.owner || "");
|
||||
const repo = getEnv("GITEA_REPO", inferred.repo || "");
|
||||
assert(owner && repo, "❌ Impossible de déterminer owner/repo. Fix: export GITEA_OWNER=... GITEA_REPO=...", 2);
|
||||
|
||||
console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo} …`);
|
||||
const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum });
|
||||
|
||||
if (issue?.pull_request) {
|
||||
console.error(`❌ #${issueNum} est une Pull Request, pas un ticket annotations.`);
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||
const title = String(issue.title || "");
|
||||
|
||||
const type = pickLine(body, "Type").toLowerCase();
|
||||
const chemin = normalizeChemin(pickLine(body, "Chemin"));
|
||||
const ancre = normalizeAnchorId(pickLine(body, "Ancre"));
|
||||
|
||||
assert(chemin, "Ticket: Chemin manquant.", 2);
|
||||
assert(ancre && /^p-\d+-/i.test(ancre), `Ticket: Ancre invalide ("${ancre}")`, 2);
|
||||
assert(type, "Ticket: Type manquant.", 2);
|
||||
|
||||
const pageKey = normalizePageKeyFromChemin(chemin);
|
||||
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
||||
|
||||
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
|
||||
|
||||
if (DO_VERIFY) {
|
||||
const ok = await tryVerifyAnchor(pageKey, ancre);
|
||||
if (ok === false) {
|
||||
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
||||
}
|
||||
if (ok === null) {
|
||||
if (STRICT) {
|
||||
throw Object.assign(
|
||||
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
|
||||
{ __exitCode: 2 }
|
||||
);
|
||||
}
|
||||
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
|
||||
}
|
||||
}
|
||||
|
||||
// ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
|
||||
const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
|
||||
const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
|
||||
|
||||
// legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
|
||||
const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||||
|
||||
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
|
||||
|
||||
// load shard doc
|
||||
const doc = await loadAnnoDocYaml(shardAbs, pageKey);
|
||||
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
|
||||
const entry = doc.paras[ancre];
|
||||
|
||||
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
|
||||
if (await exists(legacyAbs)) {
|
||||
try {
|
||||
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
|
||||
const legacyEntry = legacy?.paras?.[ancre];
|
||||
if (isPlainObject(legacyEntry)) {
|
||||
deepMergeEntry(entry, legacyEntry);
|
||||
}
|
||||
} catch {
|
||||
// ignore legacy parse issues; shard still applies new data
|
||||
}
|
||||
}
|
||||
|
||||
const touchedFiles = [];
|
||||
const notes = [];
|
||||
let changed = false;
|
||||
const nowIso = new Date().toISOString();
|
||||
|
||||
if (type === "type/comment") {
|
||||
const comment = pickSection(body, ["Commentaire:", "Comment:", "Commentaires:"]) || "";
|
||||
const text = comment.trim();
|
||||
assert(text.length >= 3, "Ticket comment: bloc 'Commentaire:' introuvable ou trop court.", 2);
|
||||
|
||||
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
|
||||
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
|
||||
|
||||
const before = entry.comments_editorial.length;
|
||||
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
|
||||
if (entry.comments_editorial.length !== before) {
|
||||
changed = true;
|
||||
notes.push(`+ comment added (len=${text.length})`);
|
||||
} else {
|
||||
notes.push(`~ comment already present (dedup)`);
|
||||
}
|
||||
stableSortByTs(entry.comments_editorial);
|
||||
}
|
||||
|
||||
else if (type === "type/reference") {
|
||||
const ref = parseReferenceBlock(body);
|
||||
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
|
||||
|
||||
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
|
||||
throw Object.assign(new Error(`Ticket reference (strict): URL invalide (http/https requis): "${ref.url}"`), { __exitCode: 2 });
|
||||
}
|
||||
|
||||
if (!Array.isArray(entry.refs)) entry.refs = [];
|
||||
const item = {
|
||||
url: ref.url || "",
|
||||
label: ref.label || (ref.url ? ref.url : "Référence"),
|
||||
kind: ref.kind || "",
|
||||
ts: nowIso,
|
||||
fromIssue: issueNum,
|
||||
};
|
||||
if (ref.citation) item.citation = ref.citation;
|
||||
|
||||
const before = entry.refs.length;
|
||||
entry.refs = uniqUnion(entry.refs, [item], keyRef);
|
||||
if (entry.refs.length !== before) {
|
||||
changed = true;
|
||||
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
|
||||
} else {
|
||||
notes.push(`~ reference already present (dedup)`);
|
||||
}
|
||||
stableSortByTs(entry.refs);
|
||||
}
|
||||
|
||||
else if (type === "type/media") {
|
||||
if (!Array.isArray(entry.media)) entry.media = [];
|
||||
|
||||
const caption = (title || "").trim();
|
||||
if (STRICT && !caption) {
|
||||
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
|
||||
}
|
||||
const captionFinal = caption || ".";
|
||||
|
||||
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||||
if (!atts.length) notes.push("! no assets found (nothing to download).");
|
||||
|
||||
for (const a of atts) {
|
||||
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
|
||||
const dl = a?.browser_download_url || a?.download_url || "";
|
||||
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
||||
|
||||
const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
|
||||
const destAbs = path.join(mediaDirAbs, name);
|
||||
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
||||
|
||||
if (await exists(destAbs)) {
|
||||
notes.push(`~ media already exists: ${urlPath}`);
|
||||
} else if (!DRY_RUN) {
|
||||
const bytes = await downloadToFile(dl, token, destAbs);
|
||||
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
|
||||
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
|
||||
changed = true;
|
||||
} else {
|
||||
notes.push(`(dry) would download ${name} -> ${urlPath}`);
|
||||
changed = true;
|
||||
}
|
||||
|
||||
const item = {
|
||||
type: inferMediaTypeFromFilename(name),
|
||||
src: urlPath,
|
||||
caption: captionFinal,
|
||||
credit: "",
|
||||
ts: nowIso,
|
||||
fromIssue: issueNum,
|
||||
};
|
||||
|
||||
const before = entry.media.length;
|
||||
entry.media = uniqUnion(entry.media, [item], keyMedia);
|
||||
if (entry.media.length !== before) changed = true;
|
||||
}
|
||||
|
||||
stableSortByTs(entry.media);
|
||||
}
|
||||
|
||||
else {
|
||||
throw Object.assign(new Error(`Type non supporté: "${type}"`), { __exitCode: 2 });
|
||||
}
|
||||
|
||||
if (!changed) {
|
||||
console.log("ℹ️ No changes to apply.");
|
||||
for (const n of notes) console.log(" ", n);
|
||||
return;
|
||||
}
|
||||
|
||||
if (DRY_RUN) {
|
||||
console.log("\n--- DRY RUN (no write) ---");
|
||||
console.log(`Would update: ${shardRel}`);
|
||||
for (const n of notes) console.log(" ", n);
|
||||
console.log("\nExcerpt (resulting entry):");
|
||||
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
||||
console.log("\n✅ Dry-run terminé.");
|
||||
return;
|
||||
}
|
||||
|
||||
await saveAnnoDocYaml(shardAbs, doc, paraOrder);
|
||||
touchedFiles.unshift(shardRel);
|
||||
|
||||
console.log(`✅ Updated: ${shardRel}`);
|
||||
for (const n of notes) console.log(" ", n);
|
||||
|
||||
if (DO_COMMIT) {
|
||||
run("git", ["add", ...touchedFiles], { cwd: CWD });
|
||||
|
||||
if (!gitHasStagedChanges()) {
|
||||
console.log("ℹ️ Nothing to commit (aucun changement staged).");
|
||||
return;
|
||||
}
|
||||
|
||||
const msg = `anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})`;
|
||||
run("git", ["commit", "-m", msg], { cwd: CWD });
|
||||
|
||||
const sha = runQuiet("git", ["rev-parse", "--short", "HEAD"], { cwd: CWD }).trim();
|
||||
console.log(`✅ Committed: ${msg} (${sha})`);
|
||||
|
||||
if (DO_CLOSE) {
|
||||
const comment = `✅ Appliqué par apply-annotation-ticket.\nCommit: ${sha}`;
|
||||
await closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment });
|
||||
console.log(`✅ Ticket #${issueNum} fermé.`);
|
||||
}
|
||||
} else {
|
||||
console.log("\nNext (manuel) :");
|
||||
console.log(` git diff -- ${touchedFiles[0]}`);
|
||||
console.log(` git add ${touchedFiles.join(" ")}`);
|
||||
console.log(` git commit -m "anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})"`);
|
||||
}
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
const code = e?.__exitCode || 1;
|
||||
console.error("💥", e?.message || e);
|
||||
process.exit(code);
|
||||
});
|
||||
@@ -9,8 +9,9 @@ import { spawnSync } from "node:child_process";
|
||||
*
|
||||
* Conçu pour:
|
||||
* - prendre un ticket [Correction]/[Fact-check] (issue) avec Chemin + Ancre + Proposition
|
||||
* - retrouver le bon paragraphe dans le .mdx
|
||||
* - retrouver le bon paragraphe dans le .mdx/.md
|
||||
* - remplacer proprement
|
||||
* - ne JAMAIS toucher au frontmatter
|
||||
* - optionnel: écrire un alias d’ancre old->new (build-time) dans src/anchors/anchor-aliases.json
|
||||
* - optionnel: committer automatiquement
|
||||
* - optionnel: fermer le ticket (après commit)
|
||||
@@ -39,7 +40,7 @@ Env (recommandé):
|
||||
|
||||
Notes:
|
||||
- Si dist/<chemin>/index.html est absent, le script lance "npm run build" sauf si --no-build.
|
||||
- Sauvegarde automatique: <fichier>.bak.issue-<N> (uniquement si on écrit)
|
||||
- Sauvegarde automatique: .tmp/apply-ticket/<fichier>.bak.issue-<N> (uniquement si on écrit)
|
||||
- Avec --alias : le script rebuild pour identifier le NOUVEL id, puis écrit l'alias old->new.
|
||||
- Refuse automatiquement les Pull Requests (PR) : ce ne sont pas des tickets éditoriaux.
|
||||
`);
|
||||
@@ -89,6 +90,7 @@ const CWD = process.cwd();
|
||||
const CONTENT_ROOT = path.join(CWD, "src", "content");
|
||||
const DIST_ROOT = path.join(CWD, "dist");
|
||||
const ALIASES_FILE = path.join(CWD, "src", "anchors", "anchor-aliases.json");
|
||||
const BACKUP_ROOT = path.join(CWD, ".tmp", "apply-ticket");
|
||||
|
||||
/* -------------------------- utils texte / matching -------------------------- */
|
||||
|
||||
@@ -136,31 +138,26 @@ function scoreText(candidate, targetText) {
|
||||
let hit = 0;
|
||||
for (const w of tgtSet) if (blkSet.has(w)) hit++;
|
||||
|
||||
// Bonus si un long préfixe ressemble
|
||||
const tgtNorm = normalizeText(stripMd(targetText));
|
||||
const blkNorm = normalizeText(stripMd(candidate));
|
||||
const prefix = tgtNorm.slice(0, Math.min(180, tgtNorm.length));
|
||||
const prefixBonus = prefix && blkNorm.includes(prefix) ? 1000 : 0;
|
||||
|
||||
// Ratio bonus (0..100)
|
||||
const ratio = hit / Math.max(1, tgtSet.size);
|
||||
const ratioBonus = Math.round(ratio * 100);
|
||||
|
||||
return prefixBonus + hit + ratioBonus;
|
||||
}
|
||||
|
||||
function bestBlockMatchIndex(blocks, targetText) {
|
||||
let best = { i: -1, score: -1 };
|
||||
for (let i = 0; i < blocks.length; i++) {
|
||||
const sc = scoreText(blocks[i], targetText);
|
||||
if (sc > best.score) best = { i, score: sc };
|
||||
}
|
||||
return best;
|
||||
}
|
||||
|
||||
function splitParagraphBlocks(mdxText) {
|
||||
const raw = String(mdxText ?? "").replace(/\r\n/g, "\n");
|
||||
return raw.split(/\n{2,}/);
|
||||
function rankedBlockMatches(blocks, targetText, limit = 5) {
|
||||
return blocks
|
||||
.map((b, i) => ({
|
||||
i,
|
||||
score: scoreText(b, targetText),
|
||||
excerpt: stripMd(b).slice(0, 140),
|
||||
}))
|
||||
.sort((a, b) => b.score - a.score)
|
||||
.slice(0, limit);
|
||||
}
|
||||
|
||||
function isLikelyExcerpt(s) {
|
||||
@@ -172,6 +169,89 @@ function isLikelyExcerpt(s) {
|
||||
return false;
|
||||
}
|
||||
|
||||
/* --------------------------- frontmatter / structure ------------------------ */
|
||||
|
||||
function normalizeNewlines(s) {
|
||||
return String(s ?? "").replace(/^\uFEFF/, "").replace(/\r\n/g, "\n");
|
||||
}
|
||||
|
||||
function splitMdxFrontmatter(src) {
|
||||
const text = normalizeNewlines(src);
|
||||
const m = text.match(/^---\n[\s\S]*?\n---\n?/);
|
||||
|
||||
if (!m) {
|
||||
return {
|
||||
hasFrontmatter: false,
|
||||
frontmatter: "",
|
||||
body: text,
|
||||
};
|
||||
}
|
||||
|
||||
const frontmatter = m[0];
|
||||
const body = text.slice(frontmatter.length);
|
||||
|
||||
return {
|
||||
hasFrontmatter: true,
|
||||
frontmatter,
|
||||
body,
|
||||
};
|
||||
}
|
||||
|
||||
function joinMdxFrontmatter(frontmatter, body) {
|
||||
if (!frontmatter) return String(body ?? "");
|
||||
return String(frontmatter) + String(body ?? "");
|
||||
}
|
||||
|
||||
function assertFrontmatterIntegrity({ hadFrontmatter, originalFrontmatter, finalText, filePath }) {
|
||||
if (!hadFrontmatter) return;
|
||||
|
||||
const text = normalizeNewlines(finalText);
|
||||
|
||||
if (!text.startsWith("---\n")) {
|
||||
throw new Error(`Frontmatter perdu pendant la mise à jour de ${filePath}`);
|
||||
}
|
||||
|
||||
if (!text.startsWith(originalFrontmatter)) {
|
||||
throw new Error(`Frontmatter altéré pendant la mise à jour de ${filePath}`);
|
||||
}
|
||||
}
|
||||
|
||||
function splitParagraphBlocksPreserve(bodyText) {
|
||||
const text = normalizeNewlines(bodyText);
|
||||
|
||||
if (!text) {
|
||||
return { blocks: [], separators: [] };
|
||||
}
|
||||
|
||||
const blocks = [];
|
||||
const separators = [];
|
||||
|
||||
const re = /(\n{2,})/g;
|
||||
let last = 0;
|
||||
let m;
|
||||
|
||||
while ((m = re.exec(text))) {
|
||||
blocks.push(text.slice(last, m.index));
|
||||
separators.push(m[1]);
|
||||
last = m.index + m[1].length;
|
||||
}
|
||||
|
||||
blocks.push(text.slice(last));
|
||||
|
||||
return { blocks, separators };
|
||||
}
|
||||
|
||||
function joinParagraphBlocksPreserve(blocks, separators) {
|
||||
if (!Array.isArray(blocks) || blocks.length === 0) return "";
|
||||
|
||||
let out = "";
|
||||
for (let i = 0; i < blocks.length; i++) {
|
||||
out += blocks[i];
|
||||
if (i < separators.length) out += separators[i];
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
/* ------------------------------ utils système ------------------------------ */
|
||||
|
||||
function run(cmd, args, opts = {}) {
|
||||
@@ -251,7 +331,9 @@ function pickSection(body, markers) {
|
||||
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
||||
.filter((x) => x.i >= 0)
|
||||
.sort((a, b) => a.i - b.i)[0];
|
||||
|
||||
if (!idx) return "";
|
||||
|
||||
const start = idx.i + idx.m.length;
|
||||
const tail = text.slice(start);
|
||||
|
||||
@@ -266,11 +348,13 @@ function pickSection(body, markers) {
|
||||
"\n## Proposition",
|
||||
"\n## Problème",
|
||||
];
|
||||
|
||||
let end = tail.length;
|
||||
for (const s of stops) {
|
||||
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
||||
if (j >= 0 && j < end) end = j;
|
||||
}
|
||||
|
||||
return tail.slice(0, end).trim();
|
||||
}
|
||||
|
||||
@@ -298,8 +382,6 @@ function extractAnchorIdAnywhere(text) {
|
||||
|
||||
function extractCheminFromAnyUrl(text) {
|
||||
const s = String(text || "");
|
||||
// Exemple: http://localhost:4321/archicratie/prologue/#p-3-xxxx
|
||||
// ou: /archicratie/prologue/#p-3-xxxx
|
||||
const m = s.match(/(\/[a-z0-9\-]+\/[a-z0-9\-\/]+\/)#p-\d+-[0-9a-f]{8}/i);
|
||||
return m ? m[1] : "";
|
||||
}
|
||||
@@ -400,7 +482,7 @@ async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/json",
|
||||
"User-Agent": "archicratie-apply-ticket/2.0",
|
||||
"User-Agent": "archicratie-apply-ticket/2.1",
|
||||
},
|
||||
});
|
||||
if (!res.ok) {
|
||||
@@ -416,7 +498,7 @@ async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/json",
|
||||
"Content-Type": "application/json",
|
||||
"User-Agent": "archicratie-apply-ticket/2.0",
|
||||
"User-Agent": "archicratie-apply-ticket/2.1",
|
||||
};
|
||||
|
||||
if (comment) {
|
||||
@@ -425,7 +507,11 @@ async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment
|
||||
}
|
||||
|
||||
const url = `${base}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||||
const res = await fetch(url, { method: "PATCH", headers, body: JSON.stringify({ state: "closed" }) });
|
||||
const res = await fetch(url, {
|
||||
method: "PATCH",
|
||||
headers,
|
||||
body: JSON.stringify({ state: "closed" }),
|
||||
});
|
||||
|
||||
if (!res.ok) {
|
||||
const t = await res.text().catch(() => "");
|
||||
@@ -529,10 +615,9 @@ async function main() {
|
||||
console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo} …`);
|
||||
const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum });
|
||||
|
||||
// Guard PR (Pull Request = "Demande d'ajout" = pas un ticket éditorial)
|
||||
if (issue?.pull_request) {
|
||||
console.error(`❌ #${issueNum} est une Pull Request (demande d’ajout), pas un ticket éditorial.`);
|
||||
console.error(`➡️ Ouvre un ticket [Correction]/[Fact-check] depuis le site (Proposer), puis relance apply-ticket sur ce numéro.`);
|
||||
console.error("➡️ Ouvre un ticket [Correction]/[Fact-check] depuis le site (Proposer), puis relance apply-ticket sur ce numéro.");
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
@@ -553,7 +638,6 @@ async function main() {
|
||||
ancre = (ancre || "").trim();
|
||||
if (ancre.startsWith("#")) ancre = ancre.slice(1);
|
||||
|
||||
// fallback si ticket mal formé
|
||||
if (!ancre) ancre = extractAnchorIdAnywhere(title) || extractAnchorIdAnywhere(body);
|
||||
|
||||
chemin = normalizeChemin(chemin);
|
||||
@@ -592,7 +676,6 @@ async function main() {
|
||||
const distHtmlPath = path.join(DIST_ROOT, chemin.replace(/^\/+|\/+$/g, ""), "index.html");
|
||||
await ensureBuildIfNeeded(distHtmlPath);
|
||||
|
||||
// Texte cible: préférence au texte complet (ticket), sinon dist si extrait probable
|
||||
let targetText = texteActuel;
|
||||
let distText = "";
|
||||
|
||||
@@ -609,21 +692,24 @@ async function main() {
|
||||
throw new Error("Impossible de reconstruire le texte du paragraphe (ni texte actuel, ni dist html).");
|
||||
}
|
||||
|
||||
const original = await fs.readFile(contentFile, "utf-8");
|
||||
const blocks = splitParagraphBlocks(original);
|
||||
const originalRaw = await fs.readFile(contentFile, "utf-8");
|
||||
const { hasFrontmatter, frontmatter, body: originalBody } = splitMdxFrontmatter(originalRaw);
|
||||
|
||||
const best = bestBlockMatchIndex(blocks, targetText);
|
||||
const split = splitParagraphBlocksPreserve(originalBody);
|
||||
const blocks = split.blocks;
|
||||
const separators = split.separators;
|
||||
|
||||
if (!blocks.length) {
|
||||
throw new Error(`Aucun bloc éditorial exploitable dans ${path.relative(CWD, contentFile)}`);
|
||||
}
|
||||
|
||||
const ranked = rankedBlockMatches(blocks, targetText, 5);
|
||||
const best = ranked[0] || { i: -1, score: -1, excerpt: "" };
|
||||
const runnerUp = ranked[1] || null;
|
||||
|
||||
// seuil de sécurité
|
||||
if (best.i < 0 || best.score < 40) {
|
||||
console.error("❌ Match trop faible: je refuse de remplacer automatiquement.");
|
||||
console.error(`➡️ Score=${best.score}. Recommandation: ticket avec 'Texte actuel (copie exacte du paragraphe)'.`);
|
||||
|
||||
const ranked = blocks
|
||||
.map((b, i) => ({ i, score: scoreText(b, targetText), excerpt: stripMd(b).slice(0, 140) }))
|
||||
.sort((a, b) => b.score - a.score)
|
||||
.slice(0, 5);
|
||||
|
||||
console.error("Top candidates:");
|
||||
for (const r of ranked) {
|
||||
console.error(` #${r.i + 1} score=${r.score} ${r.excerpt}${r.excerpt.length >= 140 ? "…" : ""}`);
|
||||
@@ -631,12 +717,34 @@ async function main() {
|
||||
process.exit(2);
|
||||
}
|
||||
|
||||
if (runnerUp) {
|
||||
const ambiguityGap = best.score - runnerUp.score;
|
||||
if (ambiguityGap < 15) {
|
||||
console.error("❌ Match ambigu: le meilleur candidat est trop proche du second.");
|
||||
console.error(`➡️ best=${best.score} / second=${runnerUp.score} / gap=${ambiguityGap}`);
|
||||
console.error("Top candidates:");
|
||||
for (const r of ranked) {
|
||||
console.error(` #${r.i + 1} score=${r.score} ${r.excerpt}${r.excerpt.length >= 140 ? "…" : ""}`);
|
||||
}
|
||||
process.exit(2);
|
||||
}
|
||||
}
|
||||
|
||||
const beforeBlock = blocks[best.i];
|
||||
const afterBlock = proposition.trim();
|
||||
|
||||
const nextBlocks = blocks.slice();
|
||||
nextBlocks[best.i] = afterBlock;
|
||||
const updated = nextBlocks.join("\n\n");
|
||||
|
||||
const updatedBody = joinParagraphBlocksPreserve(nextBlocks, separators);
|
||||
const updatedRaw = joinMdxFrontmatter(frontmatter, updatedBody);
|
||||
|
||||
assertFrontmatterIntegrity({
|
||||
hadFrontmatter: hasFrontmatter,
|
||||
originalFrontmatter: frontmatter,
|
||||
finalText: updatedRaw,
|
||||
filePath: path.relative(CWD, contentFile),
|
||||
});
|
||||
|
||||
console.log(`🧩 Matched block #${best.i + 1}/${blocks.length} score=${best.score}`);
|
||||
|
||||
@@ -650,13 +758,15 @@ async function main() {
|
||||
return;
|
||||
}
|
||||
|
||||
// backup uniquement si on écrit
|
||||
const bakPath = `${contentFile}.bak.issue-${issueNum}`;
|
||||
const relContentFile = path.relative(CWD, contentFile);
|
||||
const bakPath = path.join(BACKUP_ROOT, `${relContentFile}.bak.issue-${issueNum}`);
|
||||
await fs.mkdir(path.dirname(bakPath), { recursive: true });
|
||||
|
||||
if (!(await fileExists(bakPath))) {
|
||||
await fs.writeFile(bakPath, original, "utf-8");
|
||||
await fs.writeFile(bakPath, originalRaw, "utf-8");
|
||||
}
|
||||
|
||||
await fs.writeFile(contentFile, updated, "utf-8");
|
||||
await fs.writeFile(contentFile, updatedRaw, "utf-8");
|
||||
console.log("✅ Applied.");
|
||||
|
||||
let aliasChanged = false;
|
||||
@@ -677,13 +787,13 @@ async function main() {
|
||||
|
||||
if (aliasChanged) {
|
||||
console.log(`✅ Alias ajouté: ${chemin} ${ancre} -> ${newId}`);
|
||||
// MàJ dist sans rebuild complet (inject seulement)
|
||||
run("node", ["scripts/inject-anchor-aliases.mjs"], { cwd: CWD });
|
||||
} else {
|
||||
console.log(`ℹ️ Alias déjà présent ou inutile (${ancre} -> ${newId}).`);
|
||||
}
|
||||
|
||||
// garde-fous rapides
|
||||
run("node", ["scripts/check-anchor-aliases.mjs"], { cwd: CWD });
|
||||
run("node", ["scripts/verify-anchor-aliases-in-dist.mjs"], { cwd: CWD });
|
||||
run("npm", ["run", "test:anchors"], { cwd: CWD });
|
||||
run("node", ["scripts/check-inline-js.mjs"], { cwd: CWD });
|
||||
}
|
||||
@@ -713,7 +823,6 @@ async function main() {
|
||||
return;
|
||||
}
|
||||
|
||||
// mode manuel
|
||||
console.log("Next (manuel) :");
|
||||
console.log(` git diff -- ${path.relative(CWD, contentFile)}`);
|
||||
console.log(
|
||||
@@ -730,4 +839,4 @@ async function main() {
|
||||
main().catch((e) => {
|
||||
console.error("💥", e?.message || e);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
72
scripts/audit-docx-source.py
Executable file
@@ -0,0 +1,72 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
import unicodedata
|
||||
import xml.etree.ElementTree as ET
|
||||
from zipfile import ZipFile
|
||||
|
||||
NS = {"w": "http://schemas.openxmlformats.org/wordprocessingml/2006/main"}
|
||||
|
||||
FORBIDDEN = [
|
||||
"coviabilité",
|
||||
"sacroinstitutionnelle",
|
||||
"technologistique",
|
||||
"scripturonormative",
|
||||
"textesrepères",
|
||||
"ellemême",
|
||||
"opérateur de d’archicration",
|
||||
"systèmes plusieurs statuts",
|
||||
"celle-ci se donne à voir",
|
||||
"Pour autant il serait",
|
||||
"Telles peuvent être le cas de",
|
||||
"la co-viabilité devient ,",
|
||||
]
|
||||
|
||||
|
||||
def norm(s: str) -> str:
|
||||
return unicodedata.normalize("NFC", s or "")
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Audit simple d’un DOCX source officiel.")
|
||||
parser.add_argument("docx", help="Chemin du fichier .docx")
|
||||
args = parser.parse_args()
|
||||
|
||||
try:
|
||||
with ZipFile(args.docx) as zf:
|
||||
data = zf.read("word/document.xml")
|
||||
except FileNotFoundError:
|
||||
print(f"ECHEC: fichier introuvable: {args.docx}", file=sys.stderr)
|
||||
return 2
|
||||
except KeyError:
|
||||
print("ECHEC: word/document.xml introuvable dans le DOCX.", file=sys.stderr)
|
||||
return 2
|
||||
except Exception as e:
|
||||
print(f"ECHEC: impossible d’ouvrir le DOCX: {e}", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
root = ET.fromstring(data)
|
||||
found = False
|
||||
|
||||
for i, p in enumerate(root.findall(".//w:p", NS), start=1):
|
||||
txt = "".join(t.text or "" for t in p.findall(".//w:t", NS))
|
||||
txt_n = norm(txt)
|
||||
hits = [needle for needle in FORBIDDEN if needle in txt_n]
|
||||
if hits:
|
||||
found = True
|
||||
print(f"\n[paragraphe {i}]")
|
||||
print("Hits :", ", ".join(hits))
|
||||
print(txt_n)
|
||||
|
||||
if found:
|
||||
print("\nECHEC: formes interdites encore présentes dans le DOCX.")
|
||||
return 1
|
||||
|
||||
print("OK: aucune forme interdite trouvée dans le DOCX.")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
raise SystemExit(main())
|
||||
232
scripts/audit-glossary-navigation.mjs
Normal file
@@ -0,0 +1,232 @@
|
||||
import fs from "fs";
|
||||
import path from "path";
|
||||
import yaml from "js-yaml";
|
||||
|
||||
const ROOT = "src/content/glossaire";
|
||||
const DEFAULTS_FILE = "src/lib/glossary-navigation-defaults.ts";
|
||||
const HUB_LIMIT = 5;
|
||||
const EFFECTIVE_TOP_LIMIT = 10;
|
||||
const PATH_KEYS = ["understand", "deepen", "compare", "apply"];
|
||||
|
||||
const defaultsRaw = fs.readFileSync(DEFAULTS_FILE, "utf-8");
|
||||
|
||||
const defaultFamilies = new Set(
|
||||
[...defaultsRaw.matchAll(/^\s{4}"?([a-z0-9-]+)"?\s*:/gm)].map((m) => m[1]),
|
||||
);
|
||||
|
||||
const defaultPathKeysByFamily = new Map();
|
||||
const defaultTargetsByFamily = new Map();
|
||||
|
||||
for (const match of defaultsRaw.matchAll(
|
||||
/^\s{4}"?([a-z0-9-]+)"?\s*:\s*\{([\s\S]*?)^\s{4}\},/gm,
|
||||
)) {
|
||||
const family = match[1];
|
||||
const body = match[2];
|
||||
const keys = new Set();
|
||||
const targetsByKey = new Map();
|
||||
|
||||
for (const key of PATH_KEYS) {
|
||||
const pathMatch = body.match(new RegExp(`\\b${key}\\s*:\\s*\\[([^\\]]*)\\]`));
|
||||
const targets = pathMatch
|
||||
? pathMatch[1]
|
||||
.split(",")
|
||||
.map((x) => x.trim().replace(/^["']|["']$/g, ""))
|
||||
.filter(Boolean)
|
||||
: [];
|
||||
|
||||
if (targets.length > 0) keys.add(key);
|
||||
targetsByKey.set(key, targets);
|
||||
}
|
||||
|
||||
defaultPathKeysByFamily.set(family, keys);
|
||||
defaultTargetsByFamily.set(family, targetsByKey);
|
||||
}
|
||||
|
||||
const files = fs.readdirSync(ROOT).filter((f) => f.endsWith(".md"));
|
||||
const slugs = new Set(files.map((f) => f.replace(".md", "")));
|
||||
|
||||
const entries = [];
|
||||
|
||||
for (const file of files) {
|
||||
const full = path.join(ROOT, file);
|
||||
const raw = fs.readFileSync(full, "utf-8");
|
||||
const slug = file.replace(".md", "");
|
||||
|
||||
if (!raw.startsWith("---")) {
|
||||
entries.push({ slug, data: {}, noFrontmatter: true });
|
||||
continue;
|
||||
}
|
||||
|
||||
const frontmatter = raw.split("---", 3)[1];
|
||||
const data = yaml.load(frontmatter) || {};
|
||||
entries.push({ slug, data, noFrontmatter: false });
|
||||
}
|
||||
|
||||
const missingNavigation = [];
|
||||
const missingReason = [];
|
||||
const weakPaths = [];
|
||||
const selfLoops = [];
|
||||
const deadPrimaryNext = [];
|
||||
const directCycles = [];
|
||||
const edges = {};
|
||||
const incoming = {};
|
||||
const families = new Set();
|
||||
|
||||
const effectiveOutgoing = new Map();
|
||||
const effectiveIncoming = new Map();
|
||||
|
||||
function addEffectiveEdge(from, to) {
|
||||
if (!from || !to || from === to || !slugs.has(to)) return;
|
||||
|
||||
if (!effectiveOutgoing.has(from)) effectiveOutgoing.set(from, new Set());
|
||||
if (!effectiveIncoming.has(to)) effectiveIncoming.set(to, new Set());
|
||||
|
||||
effectiveOutgoing.get(from).add(to);
|
||||
effectiveIncoming.get(to).add(from);
|
||||
}
|
||||
|
||||
for (const { slug, data, noFrontmatter } of entries) {
|
||||
if (noFrontmatter) continue;
|
||||
|
||||
if (data.family) families.add(data.family);
|
||||
|
||||
const nav = data.navigation;
|
||||
|
||||
if (!nav) {
|
||||
missingNavigation.push(slug);
|
||||
continue;
|
||||
}
|
||||
|
||||
const next = nav.primaryNext;
|
||||
|
||||
if (next) {
|
||||
edges[slug] = next;
|
||||
incoming[next] = (incoming[next] || 0) + 1;
|
||||
addEffectiveEdge(slug, next);
|
||||
|
||||
if (next === slug) selfLoops.push(slug);
|
||||
if (!slugs.has(next)) deadPrimaryNext.push(`${slug} → ${next}`);
|
||||
|
||||
if (!nav.primaryReason) missingReason.push(slug);
|
||||
}
|
||||
|
||||
const explicitPaths = nav.paths || {};
|
||||
const familyDefaults = defaultPathKeysByFamily.get(data.family) || new Set();
|
||||
const familyDefaultTargets = defaultTargetsByFamily.get(data.family) || new Map();
|
||||
|
||||
const pathCount = PATH_KEYS.filter((key) => {
|
||||
const explicit = Array.isArray(explicitPaths[key]) && explicitPaths[key].length > 0;
|
||||
const fromDefault = familyDefaults.has(key);
|
||||
return explicit || fromDefault;
|
||||
}).length;
|
||||
|
||||
if (pathCount < 2) weakPaths.push(slug);
|
||||
|
||||
for (const key of PATH_KEYS) {
|
||||
const explicitTargets = Array.isArray(explicitPaths[key]) ? explicitPaths[key] : [];
|
||||
const defaultTargets = familyDefaultTargets.get(key) || [];
|
||||
|
||||
for (const target of [...explicitTargets, ...defaultTargets]) {
|
||||
addEffectiveEdge(slug, target);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const seenPairs = new Set();
|
||||
|
||||
for (const [a, b] of Object.entries(edges)) {
|
||||
if (edges[b] === a) {
|
||||
const pair = [a, b].sort().join(" <-> ");
|
||||
if (!seenPairs.has(pair)) {
|
||||
seenPairs.add(pair);
|
||||
directCycles.push(`${a} <-> ${b}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const missingDefaults = [...families].filter((f) => !defaultFamilies.has(f));
|
||||
|
||||
const bigHubs = Object.entries(incoming)
|
||||
.filter(([, count]) => count > HUB_LIMIT)
|
||||
.sort((a, b) => b[1] - a[1]);
|
||||
|
||||
console.log("\n🔍 Glossary navigation audit");
|
||||
|
||||
if (missingNavigation.length > 0) {
|
||||
console.log("\n❌ Missing navigation:");
|
||||
missingNavigation.forEach((s) => console.log(" -", s));
|
||||
}
|
||||
|
||||
console.log("\n🔍 Direct cycles:");
|
||||
if (directCycles.length) directCycles.forEach((c) => console.log(" -", c));
|
||||
else console.log(" (none)");
|
||||
|
||||
console.log("\n📊 Top hubs:");
|
||||
Object.entries(incoming)
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.slice(0, 10)
|
||||
.forEach(([slug, n]) => {
|
||||
if (n > HUB_LIMIT) console.log(`⚠️ ${slug}: ${n}`);
|
||||
else console.log(` ${slug}: ${n}`);
|
||||
});
|
||||
|
||||
console.log("\n🔗 Checking dead primaryNext:");
|
||||
if (deadPrimaryNext.length) deadPrimaryNext.forEach((x) => console.log("❌", x));
|
||||
else console.log(" (none)");
|
||||
|
||||
if (missingDefaults.length) {
|
||||
console.log("\n❌ Families without defaults:");
|
||||
missingDefaults.forEach((f) => console.log(" -", f));
|
||||
}
|
||||
|
||||
if (bigHubs.length) {
|
||||
console.log(`\n⚠️ Hubs above limit (${HUB_LIMIT}):`);
|
||||
bigHubs.forEach(([slug, n]) => console.log(` - ${slug}: ${n}`));
|
||||
}
|
||||
|
||||
if (missingReason.length) {
|
||||
console.log("\n⚠️ Missing primaryReason:");
|
||||
missingReason.forEach((s) => console.log(" -", s));
|
||||
}
|
||||
|
||||
if (weakPaths.length) {
|
||||
console.log("\n⚠️ Weak path coverage (<2):");
|
||||
weakPaths.forEach((s) => console.log(" -", s));
|
||||
}
|
||||
|
||||
if (selfLoops.length) {
|
||||
console.log("\n❌ Self-referencing primaryNext:");
|
||||
selfLoops.forEach((s) => console.log(" -", s));
|
||||
}
|
||||
|
||||
console.log(`\n📊 Effective convergence top ${EFFECTIVE_TOP_LIMIT}:`);
|
||||
[...effectiveIncoming.entries()]
|
||||
.map(([slug, sources]) => [slug, sources.size])
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.slice(0, EFFECTIVE_TOP_LIMIT)
|
||||
.forEach(([slug, n]) => {
|
||||
console.log(` ${n} ${slug}`);
|
||||
});
|
||||
|
||||
console.log(`\n📊 Effective branching top ${EFFECTIVE_TOP_LIMIT}:`);
|
||||
[...effectiveOutgoing.entries()]
|
||||
.map(([slug, targets]) => [slug, targets.size])
|
||||
.sort((a, b) => b[1] - a[1])
|
||||
.slice(0, EFFECTIVE_TOP_LIMIT)
|
||||
.forEach(([slug, n]) => {
|
||||
console.log(` ${n} ${slug}`);
|
||||
});
|
||||
|
||||
const hardFailures =
|
||||
missingNavigation.length +
|
||||
directCycles.length +
|
||||
deadPrimaryNext.length +
|
||||
missingDefaults.length +
|
||||
selfLoops.length;
|
||||
|
||||
if (hardFailures > 0) {
|
||||
console.log(`\n❌ Audit failed: ${hardFailures} hard issue(s)`);
|
||||
process.exitCode = 1;
|
||||
} else {
|
||||
console.log("\n✅ Audit done");
|
||||
}
|
||||
246
scripts/build-annotations-index.mjs
Normal file
@@ -0,0 +1,246 @@
|
||||
#!/usr/bin/env node
|
||||
// scripts/build-annotations-index.mjs
|
||||
// Construit dist/annotations-index.json à partir de src/annotations/**/*.yml
|
||||
// Supporte:
|
||||
// - monolith : src/annotations/<pageKey>.yml
|
||||
// - shard : src/annotations/<pageKey>/<paraId>.yml (paraId = p-<n>-...)
|
||||
// Invariants:
|
||||
// - doc.schema === 1
|
||||
// - doc.page (si présent) == pageKey déduit du chemin
|
||||
// - shard: doc.paras doit contenir EXACTEMENT la clé paraId (sinon fail)
|
||||
//
|
||||
// Deep-merge non destructif (media/refs/comments dédupliqués), tri stable.
|
||||
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import YAML from "yaml";
|
||||
|
||||
const ROOT = process.cwd();
|
||||
const ANNO_ROOT = path.join(ROOT, "src", "annotations");
|
||||
const DIST_DIR = path.join(ROOT, "dist");
|
||||
const OUT = path.join(DIST_DIR, "annotations-index.json");
|
||||
|
||||
function assert(cond, msg) {
|
||||
if (!cond) throw new Error(msg);
|
||||
}
|
||||
|
||||
function isObj(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
function isArr(x) {
|
||||
return Array.isArray(x);
|
||||
}
|
||||
|
||||
function normPath(s) {
|
||||
return String(s || "")
|
||||
.replace(/\\/g, "/")
|
||||
.replace(/^\/+|\/+$/g, "");
|
||||
}
|
||||
|
||||
function paraNum(pid) {
|
||||
const m = String(pid).match(/^p-(\d+)-/i);
|
||||
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
|
||||
}
|
||||
|
||||
function stableSortByTs(arr) {
|
||||
if (!Array.isArray(arr)) return;
|
||||
arr.sort((a, b) => {
|
||||
const ta = Date.parse(a?.ts || "") || 0;
|
||||
const tb = Date.parse(b?.ts || "") || 0;
|
||||
if (ta !== tb) return ta - tb;
|
||||
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||
});
|
||||
}
|
||||
|
||||
function keyMedia(x) { return String(x?.src || ""); }
|
||||
function keyRef(x) {
|
||||
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
|
||||
}
|
||||
function keyComment(x) { return String(x?.text || "").trim(); }
|
||||
|
||||
function uniqUnion(dst, src, keyFn) {
|
||||
const out = isArr(dst) ? [...dst] : [];
|
||||
const seen = new Set(out.map((x) => keyFn(x)));
|
||||
for (const it of (isArr(src) ? src : [])) {
|
||||
const k = keyFn(it);
|
||||
if (!k) continue;
|
||||
if (!seen.has(k)) {
|
||||
seen.add(k);
|
||||
out.push(it);
|
||||
}
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function deepMergeEntry(dst, src) {
|
||||
if (!isObj(dst) || !isObj(src)) return;
|
||||
|
||||
for (const [k, v] of Object.entries(src)) {
|
||||
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
|
||||
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
|
||||
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
|
||||
|
||||
if (isObj(v)) {
|
||||
if (!isObj(dst[k])) dst[k] = {};
|
||||
deepMergeEntry(dst[k], v);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (isArr(v)) {
|
||||
const cur = isArr(dst[k]) ? dst[k] : [];
|
||||
const seen = new Set(cur.map((x) => JSON.stringify(x)));
|
||||
const out = [...cur];
|
||||
for (const it of v) {
|
||||
const s = JSON.stringify(it);
|
||||
if (!seen.has(s)) { seen.add(s); out.push(it); }
|
||||
}
|
||||
dst[k] = out;
|
||||
continue;
|
||||
}
|
||||
|
||||
// scalar: set only if missing/empty
|
||||
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
|
||||
}
|
||||
}
|
||||
|
||||
async function walk(dir) {
|
||||
const out = [];
|
||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const e of ents) {
|
||||
const p = path.join(dir, e.name);
|
||||
if (e.isDirectory()) out.push(...await walk(p));
|
||||
else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function inferExpectedFromRel(relNoExt) {
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const last = parts.at(-1) || "";
|
||||
const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
|
||||
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
|
||||
const paraId = isShard ? last : null;
|
||||
return { isShard, pageKey, paraId };
|
||||
}
|
||||
|
||||
function validateAndNormalizeDoc(doc, relFile, expectedPageKey, expectedParaId) {
|
||||
assert(isObj(doc), `${relFile}: doc must be an object`);
|
||||
assert(doc.schema === 1, `${relFile}: schema must be 1`);
|
||||
assert(isObj(doc.paras), `${relFile}: missing object key "paras"`);
|
||||
|
||||
const gotPage = doc.page != null ? normPath(doc.page) : "";
|
||||
const expPage = normPath(expectedPageKey);
|
||||
|
||||
if (gotPage) {
|
||||
assert(
|
||||
gotPage === expPage,
|
||||
`${relFile}: page mismatch (page="${doc.page}" vs path="${expectedPageKey}")`
|
||||
);
|
||||
} else {
|
||||
doc.page = expPage;
|
||||
}
|
||||
|
||||
if (expectedParaId) {
|
||||
const keys = Object.keys(doc.paras || {}).map(String);
|
||||
assert(
|
||||
keys.includes(expectedParaId),
|
||||
`${relFile}: shard mismatch: must contain paras["${expectedParaId}"]`
|
||||
);
|
||||
assert(
|
||||
keys.length === 1 && keys[0] === expectedParaId,
|
||||
`${relFile}: shard invariant violated: shard file must contain ONLY paras["${expectedParaId}"] (got: ${keys.join(", ")})`
|
||||
);
|
||||
}
|
||||
|
||||
return doc;
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const pages = {};
|
||||
const errors = [];
|
||||
|
||||
await fs.mkdir(DIST_DIR, { recursive: true });
|
||||
|
||||
const files = await walk(ANNO_ROOT);
|
||||
|
||||
for (const fp of files) {
|
||||
const rel = normPath(path.relative(ANNO_ROOT, fp));
|
||||
const relNoExt = rel.replace(/\.ya?ml$/i, "");
|
||||
const { isShard, pageKey, paraId } = inferExpectedFromRel(relNoExt);
|
||||
|
||||
try {
|
||||
const raw = await fs.readFile(fp, "utf8");
|
||||
const doc = YAML.parse(raw) || {};
|
||||
|
||||
if (!isObj(doc) || doc.schema !== 1) continue;
|
||||
|
||||
validateAndNormalizeDoc(
|
||||
doc,
|
||||
`src/annotations/${rel}`,
|
||||
pageKey,
|
||||
isShard ? paraId : null
|
||||
);
|
||||
|
||||
const pg = (pages[pageKey] ??= { paras: {} });
|
||||
|
||||
if (isShard) {
|
||||
const entry = doc.paras[paraId];
|
||||
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
|
||||
|
||||
stableSortByTs(pg.paras[paraId].media);
|
||||
stableSortByTs(pg.paras[paraId].refs);
|
||||
stableSortByTs(pg.paras[paraId].comments_editorial);
|
||||
} else {
|
||||
for (const [pid, entry] of Object.entries(doc.paras || {})) {
|
||||
const p = String(pid);
|
||||
if (!isObj(pg.paras[p])) pg.paras[p] = {};
|
||||
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
|
||||
|
||||
stableSortByTs(pg.paras[p].media);
|
||||
stableSortByTs(pg.paras[p].refs);
|
||||
stableSortByTs(pg.paras[p].comments_editorial);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
|
||||
}
|
||||
}
|
||||
|
||||
for (const [pageKey, pg] of Object.entries(pages)) {
|
||||
const keys = Object.keys(pg.paras || {});
|
||||
keys.sort((a, b) => {
|
||||
const ia = paraNum(a);
|
||||
const ib = paraNum(b);
|
||||
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||
return String(a).localeCompare(String(b));
|
||||
});
|
||||
const next = {};
|
||||
for (const k of keys) next[k] = pg.paras[k];
|
||||
pg.paras = next;
|
||||
}
|
||||
|
||||
const out = {
|
||||
schema: 1,
|
||||
generatedAt: new Date().toISOString(),
|
||||
pages,
|
||||
stats: {
|
||||
pages: Object.keys(pages).length,
|
||||
paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
|
||||
errors: errors.length,
|
||||
},
|
||||
errors,
|
||||
};
|
||||
|
||||
if (errors.length) {
|
||||
throw new Error(`${errors[0].file}: ${errors[0].error}`);
|
||||
}
|
||||
|
||||
await fs.writeFile(OUT, JSON.stringify(out), "utf8");
|
||||
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> dist/annotations-index.json`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error(`FAIL: build-annotations-index crashed: ${e?.stack || e?.message || e}`);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -14,6 +14,9 @@ const DIST_DIR = getArg("--dist", "dist");
|
||||
const BASELINE = getArg("--baseline", path.join("tests", "anchors-baseline.json"));
|
||||
const UPDATE = args.has("--update");
|
||||
|
||||
const ACCEPT_GLOSSARY_RESETS =
|
||||
process.env.ACCEPT_GLOSSARY_ANCHOR_RESETS === "1";
|
||||
|
||||
// Ex: 0.2 => 20%
|
||||
const THRESHOLD = Number(getArg("--threshold", process.env.ANCHORS_THRESHOLD ?? "0.2"));
|
||||
const MIN_PREV = Number(getArg("--min-prev", process.env.ANCHORS_MIN_PREV ?? "10"));
|
||||
@@ -74,7 +77,42 @@ function loadAllowMissing() {
|
||||
return new Set(arr.map(String));
|
||||
}
|
||||
|
||||
function loadAnchorChurnAllowlist() {
|
||||
const p = path.resolve("config/anchor-churn-allowlist.json");
|
||||
if (!fssync.existsSync(p)) return { acceptedResets: {}, acceptedPrefixes: {} };
|
||||
const raw = fssync.readFileSync(p, "utf8").trim();
|
||||
if (!raw) return { acceptedResets: {}, acceptedPrefixes: {} };
|
||||
const data = JSON.parse(raw);
|
||||
if (!data || typeof data !== "object" || Array.isArray(data)) {
|
||||
throw new Error("anchor-churn-allowlist.json must be an object");
|
||||
}
|
||||
|
||||
const acceptedResets = data.accepted_resets || {};
|
||||
if (!acceptedResets || typeof acceptedResets !== "object" || Array.isArray(acceptedResets)) {
|
||||
throw new Error("anchor-churn-allowlist.json: accepted_resets must be an object");
|
||||
}
|
||||
|
||||
const acceptedPrefixes = data.accepted_prefixes || {};
|
||||
if (!acceptedPrefixes || typeof acceptedPrefixes !== "object" || Array.isArray(acceptedPrefixes)) {
|
||||
throw new Error("anchor-churn-allowlist.json: accepted_prefixes must be an object");
|
||||
}
|
||||
|
||||
return { acceptedResets, acceptedPrefixes };
|
||||
}
|
||||
|
||||
function acceptedResetReasonForPage(page) {
|
||||
if (ACCEPTED_RESETS[page]) return ACCEPTED_RESETS[page];
|
||||
|
||||
for (const [prefix, reason] of Object.entries(ACCEPTED_PREFIXES)) {
|
||||
if (page.startsWith(prefix)) return reason;
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
const ALLOW_MISSING = loadAllowMissing();
|
||||
const { acceptedResets: ACCEPTED_RESETS, acceptedPrefixes: ACCEPTED_PREFIXES } =
|
||||
loadAnchorChurnAllowlist();
|
||||
|
||||
async function buildSnapshot() {
|
||||
const absDist = path.resolve(DIST_DIR);
|
||||
@@ -139,6 +177,7 @@ function diffPage(prevIds, curIds) {
|
||||
|
||||
let failed = false;
|
||||
let changedPages = 0;
|
||||
let acceptedPages = 0;
|
||||
|
||||
for (const p of pages) {
|
||||
const prevIds = base[p] || null;
|
||||
@@ -172,6 +211,7 @@ function diffPage(prevIds, curIds) {
|
||||
const prevN = prevIds.length || 1;
|
||||
const churn = (added.length + removed.length) / prevN;
|
||||
const removedRatio = removed.length / prevN;
|
||||
const acceptedReason = acceptedResetReasonForPage(p);
|
||||
|
||||
console.log(
|
||||
`~ ${p} prev=${prevIds.length} now=${curIds.length}` +
|
||||
@@ -182,11 +222,23 @@ function diffPage(prevIds, curIds) {
|
||||
console.log(` removed: ${removed.slice(0, 20).join(", ")}${removed.length > 20 ? " …" : ""}`);
|
||||
}
|
||||
|
||||
if (prevIds.length >= MIN_PREV && churn > THRESHOLD) failed = true;
|
||||
if (prevIds.length >= MIN_PREV && removedRatio > THRESHOLD) failed = true;
|
||||
const exceeds =
|
||||
(prevIds.length >= MIN_PREV && churn > THRESHOLD) ||
|
||||
(prevIds.length >= MIN_PREV && removedRatio > THRESHOLD);
|
||||
|
||||
if (exceeds && acceptedReason) {
|
||||
acceptedPages += 1;
|
||||
console.log(` ✅ accepted reset: ${acceptedReason}`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (exceeds) failed = true;
|
||||
}
|
||||
|
||||
console.log(`\nSummary: pages compared=${pages.length}, pages changed=${changedPages}`);
|
||||
console.log(
|
||||
`\nSummary: pages compared=${pages.length}, pages changed=${changedPages}, accepted resets=${acceptedPages}`
|
||||
);
|
||||
|
||||
if (failed) {
|
||||
console.error(`FAIL: anchor churn above threshold (threshold=${pct(THRESHOLD)} minPrev=${MIN_PREV})`);
|
||||
process.exit(1);
|
||||
|
||||
104
scripts/check-annotations-media.mjs
Normal file
@@ -0,0 +1,104 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
import YAML from "yaml";
|
||||
|
||||
const CWD = process.cwd();
|
||||
const ANNO_DIR = path.join(CWD, "src", "annotations");
|
||||
const PUBLIC_DIR = path.join(CWD, "public");
|
||||
|
||||
async function exists(p) {
|
||||
try { await fs.access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
async function walk(dir) {
|
||||
const out = [];
|
||||
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||
for (const e of ents) {
|
||||
const p = path.join(dir, e.name);
|
||||
if (e.isDirectory()) out.push(...(await walk(p)));
|
||||
else out.push(p);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
|
||||
function parseDoc(raw, fileAbs) {
|
||||
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
||||
return YAML.parse(raw);
|
||||
}
|
||||
|
||||
function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function toPublicPathFromUrl(urlPath) {
|
||||
// "/media/..." -> "public/media/..."
|
||||
const clean = String(urlPath || "").split("?")[0].split("#")[0];
|
||||
if (!clean.startsWith("/media/")) return null;
|
||||
return path.join(PUBLIC_DIR, clean.replace(/^\/+/, ""));
|
||||
}
|
||||
|
||||
async function main() {
|
||||
if (!(await exists(ANNO_DIR))) {
|
||||
console.log("✅ annotations-media: aucun src/annotations — rien à vérifier.");
|
||||
process.exit(0);
|
||||
}
|
||||
|
||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
let checked = 0;
|
||||
let missing = 0;
|
||||
const notes = [];
|
||||
|
||||
// Optim: éviter de vérifier 100 fois le même fichier media
|
||||
const seenMedia = new Set(); // src string
|
||||
|
||||
for (const f of files) {
|
||||
const rel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||
const raw = await fs.readFile(f, "utf8");
|
||||
|
||||
let doc;
|
||||
try { doc = parseDoc(raw, f); }
|
||||
catch (e) {
|
||||
missing++;
|
||||
notes.push(`- PARSE FAIL: ${rel} (${String(e?.message ?? e)})`);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!isPlainObject(doc) || doc.schema !== 1 || !isPlainObject(doc.paras)) continue;
|
||||
|
||||
for (const [paraId, entry] of Object.entries(doc.paras)) {
|
||||
const media = entry?.media;
|
||||
if (!Array.isArray(media)) continue;
|
||||
|
||||
for (const m of media) {
|
||||
const src = String(m?.src || "");
|
||||
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
|
||||
|
||||
// dédupe
|
||||
if (seenMedia.has(src)) continue;
|
||||
seenMedia.add(src);
|
||||
|
||||
checked++;
|
||||
const p = toPublicPathFromUrl(src);
|
||||
if (!p) continue;
|
||||
|
||||
if (!(await exists(p))) {
|
||||
missing++;
|
||||
notes.push(`- MISSING MEDIA: ${src} (from ${rel} para ${paraId})`);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (missing > 0) {
|
||||
console.error(`FAIL: annotations media missing (checked=${checked} missing=${missing})`);
|
||||
for (const n of notes) console.error(n);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
console.log(`✅ annotations-media OK: checked=${checked}`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("FAIL: check-annotations-media crashed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
@@ -27,11 +27,6 @@ function escRe(s) {
|
||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function inferPageKeyFromFile(fileAbs) {
|
||||
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
||||
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
}
|
||||
|
||||
function normalizePageKey(s) {
|
||||
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
||||
}
|
||||
@@ -40,6 +35,31 @@ function isPlainObject(x) {
|
||||
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||
}
|
||||
|
||||
function isParaId(s) {
|
||||
return /^p-\d+-/i.test(String(s || ""));
|
||||
}
|
||||
|
||||
/**
|
||||
* Supporte:
|
||||
* - monolith: src/annotations/<pageKey>.yml -> pageKey = rel sans ext
|
||||
* - shard : src/annotations/<pageKey>/<paraId>.yml -> pageKey = dirname(rel), paraId = basename
|
||||
*
|
||||
* shard seulement si le fichier est dans un sous-dossier (anti cas pathologique).
|
||||
*/
|
||||
function inferFromFile(fileAbs) {
|
||||
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
|
||||
const relNoExt = rel.replace(/\.(ya?ml|json)$/i, "");
|
||||
const parts = relNoExt.split("/").filter(Boolean);
|
||||
const base = parts[parts.length - 1] || "";
|
||||
const dirParts = parts.slice(0, -1);
|
||||
|
||||
const isShard = dirParts.length > 0 && isParaId(base);
|
||||
const pageKey = isShard ? dirParts.join("/") : relNoExt;
|
||||
const paraId = isShard ? base : "";
|
||||
|
||||
return { pageKey: normalizePageKey(pageKey), paraId };
|
||||
}
|
||||
|
||||
async function loadAliases() {
|
||||
if (!(await exists(ALIASES_PATH))) return {};
|
||||
try {
|
||||
@@ -60,10 +80,12 @@ function getAlias(aliases, pageKey, oldId) {
|
||||
// supporte:
|
||||
// 1) { "<pageKey>": { "<old>": "<new>" } }
|
||||
// 2) { "<old>": "<new>" }
|
||||
const a1 = aliases?.[pageKey]?.[oldId];
|
||||
if (a1) return a1;
|
||||
const k1 = String(pageKey || "");
|
||||
const k2 = k1 ? ("/" + k1.replace(/^\/+|\/+$/g, "") + "/") : "";
|
||||
const a1 = (aliases?.[k1]?.[oldId]) || (k2 ? aliases?.[k2]?.[oldId] : "");
|
||||
if (a1) return String(a1);
|
||||
const a2 = aliases?.[oldId];
|
||||
if (a2) return a2;
|
||||
if (a2) return String(a2);
|
||||
return "";
|
||||
}
|
||||
|
||||
@@ -81,7 +103,11 @@ async function main() {
|
||||
const aliases = await loadAliases();
|
||||
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||
|
||||
let pages = 0;
|
||||
// perf: cache HTML par page (shards = beaucoup de fichiers pour 1 page)
|
||||
const htmlCache = new Map(); // pageKey -> html
|
||||
const missingDistPage = new Set(); // pageKey
|
||||
|
||||
let pagesSeen = new Set();
|
||||
let checked = 0;
|
||||
let failures = 0;
|
||||
const notes = [];
|
||||
@@ -105,7 +131,7 @@ async function main() {
|
||||
continue;
|
||||
}
|
||||
|
||||
const pageKey = normalizePageKey(inferPageKeyFromFile(f));
|
||||
const { pageKey, paraId: shardParaId } = inferFromFile(f);
|
||||
|
||||
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
|
||||
failures++;
|
||||
@@ -119,20 +145,44 @@ async function main() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// shard invariant (fort) : doit contenir paras[paraId]
|
||||
if (shardParaId) {
|
||||
if (!Object.prototype.hasOwnProperty.call(doc.paras, shardParaId)) {
|
||||
failures++;
|
||||
notes.push(`- SHARD MISMATCH: ${rel} (expected paras["${shardParaId}"] present)`);
|
||||
continue;
|
||||
}
|
||||
// si extras -> warning (non destructif)
|
||||
const keys = Object.keys(doc.paras);
|
||||
if (!(keys.length === 1 && keys[0] === shardParaId)) {
|
||||
notes.push(`- WARN shard has extra paras: ${rel} (expected only "${shardParaId}", got ${keys.join(", ")})`);
|
||||
}
|
||||
}
|
||||
|
||||
pagesSeen.add(pageKey);
|
||||
|
||||
const distFile = path.join(DIST_DIR, pageKey, "index.html");
|
||||
if (!(await exists(distFile))) {
|
||||
failures++;
|
||||
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
||||
if (!missingDistPage.has(pageKey)) {
|
||||
missingDistPage.add(pageKey);
|
||||
failures++;
|
||||
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
|
||||
} else {
|
||||
notes.push(`- WARN missing page already reported: dist/${pageKey}/index.html (from ${rel})`);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
pages++;
|
||||
const html = await fs.readFile(distFile, "utf8");
|
||||
let html = htmlCache.get(pageKey);
|
||||
if (!html) {
|
||||
html = await fs.readFile(distFile, "utf8");
|
||||
htmlCache.set(pageKey, html);
|
||||
}
|
||||
|
||||
for (const paraId of Object.keys(doc.paras)) {
|
||||
checked++;
|
||||
|
||||
if (!/^p-\d+-/i.test(paraId)) {
|
||||
if (!isParaId(paraId)) {
|
||||
failures++;
|
||||
notes.push(`- INVALID ID: ${rel} (${paraId})`);
|
||||
continue;
|
||||
@@ -156,6 +206,7 @@ async function main() {
|
||||
}
|
||||
|
||||
const warns = notes.filter((x) => x.startsWith("- WARN"));
|
||||
const pages = pagesSeen.size;
|
||||
|
||||
if (failures > 0) {
|
||||
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
|
||||
@@ -170,4 +221,4 @@ async function main() {
|
||||
main().catch((e) => {
|
||||
console.error("FAIL: annotations check crashed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
241
scripts/convert_docx_to_mdx.py
Executable file
@@ -0,0 +1,241 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
print("Erreur : PyYAML n'est pas installé. Lance : pip3 install pyyaml")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
EDITION = "archicrat-ia"
|
||||
STATUS = "essai_these"
|
||||
VERSION = "0.1.0"
|
||||
|
||||
|
||||
ORDER_MAP = {
|
||||
"prologue": 10,
|
||||
"chapitre-1": 20,
|
||||
"chapitre-2": 30,
|
||||
"chapitre-3": 40,
|
||||
"chapitre-4": 50,
|
||||
"chapitre-5": 60,
|
||||
"conclusion": 70,
|
||||
}
|
||||
|
||||
|
||||
TITLE_MAP = {
|
||||
"prologue": "Prologue — Fondation, finalité sociopolitique et historique",
|
||||
"chapitre-1": "Chapitre 1 — Fondements épistémologiques et modélisation",
|
||||
"chapitre-2": "Chapitre 2 — Archéogenèse des régimes de co-viabilité",
|
||||
"chapitre-3": "Chapitre 3 — Philosophies du pouvoir et archicration",
|
||||
"chapitre-4": "Chapitre 4 — Histoire archicratique des révolutions industrielles",
|
||||
"chapitre-5": "Chapitre 5 — Tensions, co-viabilités et régulations",
|
||||
"conclusion": "Conclusion — ArchiCraT-IA",
|
||||
}
|
||||
|
||||
|
||||
def slugify_name(path: Path) -> str:
|
||||
stem = path.stem.lower().strip()
|
||||
|
||||
replacements = {
|
||||
" ": "-",
|
||||
"_": "-",
|
||||
"—": "-",
|
||||
"–": "-",
|
||||
"é": "e",
|
||||
"è": "e",
|
||||
"ê": "e",
|
||||
"ë": "e",
|
||||
"à": "a",
|
||||
"â": "a",
|
||||
"ä": "a",
|
||||
"î": "i",
|
||||
"ï": "i",
|
||||
"ô": "o",
|
||||
"ö": "o",
|
||||
"ù": "u",
|
||||
"û": "u",
|
||||
"ü": "u",
|
||||
"ç": "c",
|
||||
"'": "",
|
||||
"’": "",
|
||||
}
|
||||
|
||||
for old, new in replacements.items():
|
||||
stem = stem.replace(old, new)
|
||||
|
||||
stem = re.sub(r"-+", "-", stem).strip("-")
|
||||
|
||||
# normalisations spécifiques
|
||||
stem = stem.replace("chapitre-1-fondements-epistemologiques-et-modelisation-archicratie-version-officielle-revise", "chapitre-1")
|
||||
stem = stem.replace("chapitre-2", "chapitre-2")
|
||||
stem = stem.replace("chapitre-3", "chapitre-3")
|
||||
stem = stem.replace("chapitre-4", "chapitre-4")
|
||||
stem = stem.replace("chapitre-5", "chapitre-5")
|
||||
|
||||
if "prologue" in stem:
|
||||
return "prologue"
|
||||
if "chapitre-1" in stem:
|
||||
return "chapitre-1"
|
||||
if "chapitre-2" in stem:
|
||||
return "chapitre-2"
|
||||
if "chapitre-3" in stem:
|
||||
return "chapitre-3"
|
||||
if "chapitre-4" in stem:
|
||||
return "chapitre-4"
|
||||
if "chapitre-5" in stem:
|
||||
return "chapitre-5"
|
||||
if "conclusion" in stem:
|
||||
return "conclusion"
|
||||
|
||||
return stem
|
||||
|
||||
|
||||
def extract_title_from_markdown(md_text: str) -> str | None:
|
||||
for line in md_text.splitlines():
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
if line.startswith("# "):
|
||||
return line[2:].strip()
|
||||
return None
|
||||
|
||||
|
||||
def remove_first_h1(md_text: str) -> str:
|
||||
lines = md_text.splitlines()
|
||||
out = []
|
||||
removed = False
|
||||
|
||||
for line in lines:
|
||||
if not removed and line.strip().startswith("# "):
|
||||
removed = True
|
||||
continue
|
||||
out.append(line)
|
||||
|
||||
text = "\n".join(out).lstrip()
|
||||
return text
|
||||
|
||||
|
||||
def clean_markdown(md_text: str) -> str:
|
||||
text = md_text.replace("\r\n", "\n").replace("\r", "\n")
|
||||
|
||||
# nettoyer espaces multiples
|
||||
text = re.sub(r"\n{3,}", "\n\n", text)
|
||||
|
||||
# supprimer éventuels signets/artefacts de liens internes Pandoc
|
||||
text = re.sub(r"\[\]\(#.*?\)", "", text)
|
||||
|
||||
# convertir astérismes parasites
|
||||
text = re.sub(r"[ \t]+$", "", text, flags=re.MULTILINE)
|
||||
|
||||
return text.strip() + "\n"
|
||||
|
||||
|
||||
def compute_level(slug: str) -> int:
|
||||
if slug == "prologue":
|
||||
return 1
|
||||
if slug.startswith("chapitre-"):
|
||||
return 1
|
||||
if slug == "conclusion":
|
||||
return 1
|
||||
return 1
|
||||
|
||||
|
||||
def convert_one_file(input_docx: Path, output_dir: Path, source_root: Path):
|
||||
slug = slugify_name(input_docx)
|
||||
output_mdx = output_dir / f"{slug}.mdx"
|
||||
|
||||
cmd = [
|
||||
"pandoc",
|
||||
str(input_docx),
|
||||
"-f",
|
||||
"docx",
|
||||
"-t",
|
||||
"gfm+smart",
|
||||
]
|
||||
|
||||
result = subprocess.run(cmd, check=True, capture_output=True, text=True)
|
||||
md_text = result.stdout
|
||||
|
||||
detected_title = extract_title_from_markdown(md_text)
|
||||
md_body = remove_first_h1(md_text)
|
||||
md_body = clean_markdown(md_body)
|
||||
|
||||
title = TITLE_MAP.get(slug) or detected_title or input_docx.stem
|
||||
order = ORDER_MAP.get(slug, 999)
|
||||
level = compute_level(slug)
|
||||
|
||||
relative_source = input_docx
|
||||
try:
|
||||
relative_source = input_docx.relative_to(source_root)
|
||||
except ValueError:
|
||||
relative_source = input_docx.name
|
||||
|
||||
frontmatter = {
|
||||
"title": title,
|
||||
"edition": EDITION,
|
||||
"status": STATUS,
|
||||
"level": level,
|
||||
"version": VERSION,
|
||||
"concepts": [],
|
||||
"links": [],
|
||||
"order": order,
|
||||
"summary": "",
|
||||
"source": {
|
||||
"kind": "docx",
|
||||
"path": str(relative_source),
|
||||
},
|
||||
}
|
||||
|
||||
yaml_block = yaml.safe_dump(
|
||||
frontmatter,
|
||||
allow_unicode=True,
|
||||
sort_keys=False,
|
||||
default_flow_style=False,
|
||||
).strip()
|
||||
|
||||
final_text = f"---\n{yaml_block}\n---\n{md_body if md_body.startswith(chr(10)) else chr(10) + md_body}"
|
||||
output_mdx.write_text(final_text, encoding="utf-8")
|
||||
print(f"✅ {input_docx.name} -> {output_mdx.name}")
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Convertit un dossier DOCX en MDX avec frontmatter.")
|
||||
parser.add_argument("input_dir", help="Dossier source contenant les DOCX")
|
||||
parser.add_argument("output_dir", help="Dossier de sortie pour les MDX")
|
||||
args = parser.parse_args()
|
||||
|
||||
input_dir = Path(args.input_dir).expanduser().resolve()
|
||||
output_dir = Path(args.output_dir).expanduser().resolve()
|
||||
|
||||
if not shutil.which("pandoc"):
|
||||
print("Erreur : pandoc n'est pas installé. Lance : brew install pandoc")
|
||||
sys.exit(1)
|
||||
|
||||
if not input_dir.exists() or not input_dir.is_dir():
|
||||
print(f"Erreur : dossier source introuvable : {input_dir}")
|
||||
sys.exit(1)
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
docx_files = sorted(input_dir.glob("*.docx"))
|
||||
if not docx_files:
|
||||
print(f"Aucun DOCX trouvé dans : {input_dir}")
|
||||
sys.exit(1)
|
||||
|
||||
for docx_file in docx_files:
|
||||
convert_one_file(docx_file, output_dir, input_dir)
|
||||
|
||||
print()
|
||||
print("Conversion DOCX -> MDX terminée.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
304
scripts/convert_mdx_to_docx.py
Normal file
@@ -0,0 +1,304 @@
|
||||
#!/usr/bin/env python3
|
||||
import argparse
|
||||
import re
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
import zipfile
|
||||
|
||||
try:
|
||||
import yaml
|
||||
except ImportError:
|
||||
print("Erreur : PyYAML n'est pas installé. Lance : pip3 install pyyaml")
|
||||
sys.exit(1)
|
||||
|
||||
try:
|
||||
from docx import Document
|
||||
except ImportError:
|
||||
print("Erreur : python-docx n'est pas installé. Lance : pip3 install python-docx")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def split_frontmatter(text: str):
|
||||
if not text.startswith("---\n"):
|
||||
return {}, text
|
||||
|
||||
match = re.match(r"^---\n(.*?)\n---\n(.*)$", text, flags=re.DOTALL)
|
||||
if not match:
|
||||
return {}, text
|
||||
|
||||
yaml_block = match.group(1)
|
||||
body = match.group(2)
|
||||
|
||||
try:
|
||||
metadata = yaml.safe_load(yaml_block) or {}
|
||||
except Exception as e:
|
||||
print(f"Avertissement : frontmatter YAML illisible : {e}")
|
||||
metadata = {}
|
||||
|
||||
return metadata, body
|
||||
|
||||
|
||||
def strip_mdx_artifacts(text: str):
|
||||
# imports / exports MDX
|
||||
text = re.sub(r"^\s*(import|export)\s+.+?$", "", text, flags=re.MULTILINE)
|
||||
|
||||
# composants autofermants : <Component />
|
||||
text = re.sub(r"<[A-Z][A-Za-z0-9._-]*\b[^>]*\/>", "", text)
|
||||
|
||||
# composants bloc : <Component ...>...</Component>
|
||||
text = re.sub(
|
||||
r"<([A-Z][A-Za-z0-9._-]*)\b[^>]*>.*?</\1>",
|
||||
"",
|
||||
text,
|
||||
flags=re.DOTALL,
|
||||
)
|
||||
|
||||
# accolades seules résiduelles sur ligne
|
||||
text = re.sub(r"^\s*{\s*}\s*$", "", text, flags=re.MULTILINE)
|
||||
|
||||
# lignes vides multiples
|
||||
text = re.sub(r"\n{3,}", "\n\n", text)
|
||||
|
||||
return text.strip() + "\n"
|
||||
|
||||
|
||||
def inject_h1_from_title(metadata: dict, body: str):
|
||||
title = metadata.get("title", "")
|
||||
if not title:
|
||||
return body
|
||||
|
||||
if re.match(r"^\s*#\s+", body):
|
||||
return body
|
||||
|
||||
return f"# {title}\n\n{body.lstrip()}"
|
||||
|
||||
|
||||
def find_style_by_candidates(doc, candidates):
|
||||
# Cherche d'abord par nom visible
|
||||
for style in doc.styles:
|
||||
for candidate in candidates:
|
||||
if style.name == candidate:
|
||||
return style
|
||||
|
||||
# Puis par style_id Word interne
|
||||
for style in doc.styles:
|
||||
style_id = getattr(style, "style_id", "")
|
||||
if style_id in {"BodyText", "Heading1", "Heading2", "Heading3", "Heading4"}:
|
||||
for candidate in candidates:
|
||||
if candidate in {"Body Text", "Corps de texte"} and style_id == "BodyText":
|
||||
return style
|
||||
if candidate in {"Heading 1", "Titre 1"} and style_id == "Heading1":
|
||||
return style
|
||||
if candidate in {"Heading 2", "Titre 2"} and style_id == "Heading2":
|
||||
return style
|
||||
if candidate in {"Heading 3", "Titre 3"} and style_id == "Heading3":
|
||||
return style
|
||||
if candidate in {"Heading 4", "Titre 4"} and style_id == "Heading4":
|
||||
return style
|
||||
return None
|
||||
|
||||
def strip_leading_paragraph_numbers(text: str):
|
||||
"""
|
||||
Supprime les numéros de paragraphe du type :
|
||||
2. Texte...
|
||||
11. Texte...
|
||||
101. Texte...
|
||||
sans toucher aux titres Markdown (#, ##, ###).
|
||||
"""
|
||||
fixed_lines = []
|
||||
|
||||
for line in text.splitlines():
|
||||
stripped = line.lstrip()
|
||||
|
||||
# Ne jamais toucher aux titres Markdown
|
||||
if stripped.startswith("#"):
|
||||
fixed_lines.append(line)
|
||||
continue
|
||||
|
||||
# Supprime un numéro de paragraphe en début de ligne
|
||||
line = re.sub(r"^\s*\d+\.\s+", "", line)
|
||||
fixed_lines.append(line)
|
||||
|
||||
return "\n".join(fixed_lines) + "\n"
|
||||
|
||||
def normalize_non_heading_paragraphs(docx_path: Path):
|
||||
"""
|
||||
Force tous les paragraphes non-titres en Body Text / Corps de texte.
|
||||
On laisse intacts les Heading 1-4.
|
||||
"""
|
||||
doc = Document(str(docx_path))
|
||||
|
||||
body_style = find_style_by_candidates(doc, ["Body Text", "Corps de texte"])
|
||||
if body_style is None:
|
||||
print(f"Avertissement : style 'Body Text / Corps de texte' introuvable dans {docx_path.name}")
|
||||
return
|
||||
|
||||
heading_names = {
|
||||
"Heading 1", "Heading 2", "Heading 3", "Heading 4",
|
||||
"Titre 1", "Titre 2", "Titre 3", "Titre 4",
|
||||
}
|
||||
heading_ids = {"Heading1", "Heading2", "Heading3", "Heading4"}
|
||||
|
||||
changed = 0
|
||||
|
||||
for para in doc.paragraphs:
|
||||
text = para.text.strip()
|
||||
if not text:
|
||||
continue
|
||||
|
||||
current_style = para.style
|
||||
current_name = current_style.name if current_style else ""
|
||||
current_id = getattr(current_style, "style_id", "") if current_style else ""
|
||||
|
||||
if current_name in heading_names or current_id in heading_ids:
|
||||
continue
|
||||
|
||||
# Tout le reste passe en Body Text
|
||||
para.style = body_style
|
||||
changed += 1
|
||||
|
||||
doc.save(str(docx_path))
|
||||
print(f" ↳ normalisation styles : {changed} paragraphe(s) mis en 'Body Text / Corps de texte'")
|
||||
|
||||
def remove_word_bookmarks(docx_path: Path):
|
||||
"""
|
||||
Supprime les bookmarks Word (signets) du DOCX.
|
||||
Ce sont eux qui apparaissent comme crochets gris dans LibreOffice/Word
|
||||
quand l'affichage des signets est activé.
|
||||
"""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
tmpdir = Path(tmpdir)
|
||||
|
||||
# Dézipper le docx
|
||||
with zipfile.ZipFile(docx_path, "r") as zin:
|
||||
zin.extractall(tmpdir)
|
||||
|
||||
xml_targets = [
|
||||
tmpdir / "word" / "document.xml",
|
||||
tmpdir / "word" / "footnotes.xml",
|
||||
tmpdir / "word" / "endnotes.xml",
|
||||
tmpdir / "word" / "comments.xml",
|
||||
]
|
||||
|
||||
removed = 0
|
||||
|
||||
for xml_file in xml_targets:
|
||||
if not xml_file.exists():
|
||||
continue
|
||||
|
||||
text = xml_file.read_text(encoding="utf-8")
|
||||
|
||||
# enlever <w:bookmarkStart .../> et <w:bookmarkEnd .../>
|
||||
text, c1 = re.subn(r"<w:bookmarkStart\b[^>]*/>", "", text)
|
||||
text, c2 = re.subn(r"<w:bookmarkEnd\b[^>]*/>", "", text)
|
||||
|
||||
removed += c1 + c2
|
||||
xml_file.write_text(text, encoding="utf-8")
|
||||
|
||||
# Rezipper
|
||||
tmp_output = docx_path.with_suffix(".cleaned.docx")
|
||||
with zipfile.ZipFile(tmp_output, "w", zipfile.ZIP_DEFLATED) as zout:
|
||||
for file in tmpdir.rglob("*"):
|
||||
if file.is_file():
|
||||
zout.write(file, file.relative_to(tmpdir))
|
||||
|
||||
tmp_output.replace(docx_path)
|
||||
print(f" ↳ suppression signets : {removed} balise(s) supprimée(s)")
|
||||
|
||||
def convert_one_file(input_path: Path, output_path: Path, reference_doc: Path | None):
|
||||
raw = input_path.read_text(encoding="utf-8")
|
||||
metadata, body = split_frontmatter(raw)
|
||||
body = strip_mdx_artifacts(body)
|
||||
body = strip_leading_paragraph_numbers(body)
|
||||
body = inject_h1_from_title(metadata, body)
|
||||
|
||||
with tempfile.NamedTemporaryFile("w", suffix=".md", delete=False, encoding="utf-8") as tmp:
|
||||
tmp.write(body)
|
||||
tmp_md = Path(tmp.name)
|
||||
|
||||
cmd = [
|
||||
"pandoc",
|
||||
str(tmp_md),
|
||||
"-f",
|
||||
"markdown",
|
||||
"-o",
|
||||
str(output_path),
|
||||
]
|
||||
|
||||
if reference_doc:
|
||||
cmd.extend(["--reference-doc", str(reference_doc)])
|
||||
|
||||
try:
|
||||
subprocess.run(cmd, check=True)
|
||||
finally:
|
||||
try:
|
||||
tmp_md.unlink()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
normalize_non_heading_paragraphs(output_path)
|
||||
remove_word_bookmarks(output_path)
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Convertit des fichiers MDX en DOCX en conservant H1/H2/H3/H4 et en forçant le corps en Body Text."
|
||||
)
|
||||
parser.add_argument("input_dir", help="Dossier contenant les .mdx")
|
||||
parser.add_argument(
|
||||
"--output-dir",
|
||||
default=str(Path.home() / "Desktop" / "archicrat-ia-docx"),
|
||||
help="Dossier de sortie DOCX"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--reference-doc",
|
||||
default=None,
|
||||
help="DOCX modèle Word à utiliser comme reference-doc"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
input_dir = Path(args.input_dir)
|
||||
output_dir = Path(args.output_dir)
|
||||
reference_doc = Path(args.reference_doc) if args.reference_doc else None
|
||||
|
||||
if not shutil.which("pandoc"):
|
||||
print("Erreur : pandoc n'est pas installé. Installe-le avec : brew install pandoc")
|
||||
sys.exit(1)
|
||||
|
||||
if not input_dir.exists() or not input_dir.is_dir():
|
||||
print(f"Erreur : dossier introuvable : {input_dir}")
|
||||
sys.exit(1)
|
||||
|
||||
if reference_doc and not reference_doc.exists():
|
||||
print(f"Erreur : reference-doc introuvable : {reference_doc}")
|
||||
sys.exit(1)
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
mdx_files = sorted(input_dir.glob("*.mdx"))
|
||||
if not mdx_files:
|
||||
print(f"Aucun fichier .mdx trouvé dans : {input_dir}")
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Conversion de {len(mdx_files)} fichier(s)...")
|
||||
print(f"Entrée : {input_dir}")
|
||||
print(f"Sortie : {output_dir}")
|
||||
if reference_doc:
|
||||
print(f"Modèle : {reference_doc}")
|
||||
print()
|
||||
|
||||
for mdx_file in mdx_files:
|
||||
docx_name = mdx_file.with_suffix(".docx").name
|
||||
out_file = output_dir / docx_name
|
||||
print(f"→ {mdx_file.name} -> {docx_name}")
|
||||
convert_one_file(mdx_file, out_file, reference_doc)
|
||||
|
||||
print()
|
||||
print("✅ Conversion terminée.")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
132
scripts/fix-docx-source.py
Executable file
@@ -0,0 +1,132 @@
|
||||
#!/usr/bin/env python3
|
||||
from __future__ import annotations
|
||||
|
||||
import argparse
|
||||
import shutil
|
||||
import tempfile
|
||||
import unicodedata
|
||||
import xml.etree.ElementTree as ET
|
||||
from pathlib import Path
|
||||
from zipfile import ZIP_DEFLATED, ZipFile
|
||||
|
||||
W_NS = "http://schemas.openxmlformats.org/wordprocessingml/2006/main"
|
||||
XML_NS = "http://www.w3.org/XML/1998/namespace"
|
||||
NS = {"w": W_NS}
|
||||
|
||||
ET.register_namespace("w", W_NS)
|
||||
|
||||
|
||||
REPLACEMENTS = {
|
||||
"coviabilité": "co-viabilité",
|
||||
"sacroinstitutionnelle": "sacro-institutionnelle",
|
||||
"technologistique": "techno-logistique",
|
||||
"scripturonormative": "scripturo-normative",
|
||||
"textesrepères": "textes-repères",
|
||||
"ellemême": "elle-même",
|
||||
"opérateur de d’archicration": "opérateur d’archicration",
|
||||
"systèmes plusieurs statuts": "systèmes à plusieurs statuts",
|
||||
"celle-ci se donne à voir": "Celle-ci se donne à voir",
|
||||
"Pour autant il serait": "Pour autant, il serait",
|
||||
"Telles peuvent être le cas de": "Tels peuvent être les cas de",
|
||||
}
|
||||
|
||||
# volontairement NON auto-corrigé : "la co-viabilité devient ,"
|
||||
# ce cas demande une décision éditoriale humaine.
|
||||
|
||||
|
||||
def qn(tag: str) -> str:
|
||||
prefix, local = tag.split(":")
|
||||
if prefix != "w":
|
||||
raise ValueError(tag)
|
||||
return f"{{{W_NS}}}{local}"
|
||||
|
||||
|
||||
def norm(s: str) -> str:
|
||||
return unicodedata.normalize("NFC", s or "")
|
||||
|
||||
|
||||
def paragraph_text(p: ET.Element) -> str:
|
||||
return "".join(t.text or "" for t in p.findall(".//w:t", NS))
|
||||
|
||||
|
||||
def replaced_text(s: str) -> str:
|
||||
out = norm(s)
|
||||
for bad, good in REPLACEMENTS.items():
|
||||
out = out.replace(bad, good)
|
||||
return out
|
||||
|
||||
|
||||
def rewrite_paragraph_text(p: ET.Element, new_text: str) -> None:
|
||||
ppr = p.find("w:pPr", NS)
|
||||
|
||||
for child in list(p):
|
||||
if ppr is not None and child is ppr:
|
||||
continue
|
||||
p.remove(child)
|
||||
|
||||
r = ET.Element(qn("w:r"))
|
||||
t = ET.SubElement(r, qn("w:t"))
|
||||
t.set(f"{{{XML_NS}}}space", "preserve")
|
||||
t.text = new_text
|
||||
p.append(r)
|
||||
|
||||
|
||||
def process_document_xml(xml_path: Path) -> int:
|
||||
tree = ET.parse(xml_path)
|
||||
root = tree.getroot()
|
||||
|
||||
changed = 0
|
||||
|
||||
for p in root.findall(".//w:p", NS):
|
||||
old = paragraph_text(p)
|
||||
new = replaced_text(old)
|
||||
if new != old:
|
||||
rewrite_paragraph_text(p, new)
|
||||
changed += 1
|
||||
|
||||
tree.write(xml_path, encoding="utf-8", xml_declaration=True)
|
||||
return changed
|
||||
|
||||
|
||||
def repack_docx(tmpdir: Path, out_docx: Path) -> None:
|
||||
tmp_out = out_docx.with_suffix(out_docx.suffix + ".tmp")
|
||||
with ZipFile(tmp_out, "w", ZIP_DEFLATED) as zf:
|
||||
for p in sorted(tmpdir.rglob("*")):
|
||||
if p.is_file():
|
||||
zf.write(p, p.relative_to(tmpdir))
|
||||
shutil.move(tmp_out, out_docx)
|
||||
|
||||
|
||||
def main() -> int:
|
||||
parser = argparse.ArgumentParser(description="Répare mécaniquement certaines scories DOCX.")
|
||||
parser.add_argument("docx", help="Chemin du DOCX")
|
||||
parser.add_argument("--in-place", action="store_true", help="Réécrit le DOCX en place")
|
||||
args = parser.parse_args()
|
||||
|
||||
src = Path(args.docx)
|
||||
if not src.exists():
|
||||
print(f"ECHEC: fichier introuvable: {src}", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
out = src if args.in_place else src.with_name(src.stem + ".fixed.docx")
|
||||
|
||||
with tempfile.TemporaryDirectory(prefix="docx-fix-") as td:
|
||||
td_path = Path(td)
|
||||
with ZipFile(src) as zf:
|
||||
zf.extractall(td_path)
|
||||
|
||||
document_xml = td_path / "word" / "document.xml"
|
||||
if not document_xml.exists():
|
||||
print("ECHEC: word/document.xml absent.", file=sys.stderr)
|
||||
return 2
|
||||
|
||||
changed = process_document_xml(document_xml)
|
||||
repack_docx(td_path, out)
|
||||
|
||||
print(f"OK: DOCX réparé par réécriture paragraphe/XML. Paragraphes modifiés: {changed}")
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
raise SystemExit(main())
|
||||
@@ -114,7 +114,6 @@ async function runMammoth(docxPath, assetsOutDirWebRoot) {
|
||||
);
|
||||
|
||||
let html = result.value || "";
|
||||
|
||||
// Mammoth gives relative src="image-xx.png" ; we will prefix later
|
||||
return html;
|
||||
}
|
||||
@@ -182,17 +181,52 @@ async function exists(p) {
|
||||
try { await fs.access(p); return true; } catch { return false; }
|
||||
}
|
||||
|
||||
/**
|
||||
* ✅ compat:
|
||||
* - ancien : collection="archicratie" + slug="archicrat-ia/chapitre-3"
|
||||
* - nouveau : collection="archicrat-ia" + slug="chapitre-3"
|
||||
*
|
||||
* But : toujours écrire dans src/content/archicrat-ia/<slugSansPrefix>.mdx
|
||||
*/
|
||||
function normalizeDest(collection, slug) {
|
||||
let outCollection = String(collection || "").trim();
|
||||
let outSlug = String(slug || "").trim().replace(/^\/+|\/+$/g, "");
|
||||
|
||||
if (outCollection === "archicratie" && outSlug.startsWith("archicrat-ia/")) {
|
||||
outCollection = "archicrat-ia";
|
||||
outSlug = outSlug.replace(/^archicrat-ia\//, "");
|
||||
}
|
||||
|
||||
return { outCollection, outSlug };
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const args = parseArgs(process.argv);
|
||||
const manifestPath = path.resolve(args.manifest);
|
||||
|
||||
const items = await readManifest(manifestPath);
|
||||
const selected = args.all ? items : items.filter(it => args.only.includes(it.slug));
|
||||
const selected = args.all
|
||||
? items
|
||||
: items.filter((it) => {
|
||||
const rawSlug = String(it.slug || "").trim();
|
||||
const rawCollection = String(it.collection || "").trim();
|
||||
const qualified = `${rawCollection}/${rawSlug}`;
|
||||
return args.only.includes(rawSlug) || args.only.includes(qualified);
|
||||
});
|
||||
|
||||
if (!args.all && selected.length !== args.only.length) {
|
||||
const found = new Set(selected.map(s => s.slug));
|
||||
const missing = args.only.filter(s => !found.has(s));
|
||||
throw new Error(`Some --only slugs not found in manifest: ${missing.join(", ")}`);
|
||||
if (!args.all) {
|
||||
const found = new Set(
|
||||
selected.flatMap((s) => {
|
||||
const rawSlug = String(s.slug || "").trim();
|
||||
const rawCollection = String(s.collection || "").trim();
|
||||
return [rawSlug, `${rawCollection}/${rawSlug}`];
|
||||
})
|
||||
);
|
||||
|
||||
const missing = args.only.filter((s) => !found.has(s));
|
||||
if (missing.length > 0) {
|
||||
throw new Error(`Some --only slugs not found in manifest: ${missing.join(", ")}`);
|
||||
}
|
||||
}
|
||||
|
||||
const pandocOk = havePandoc();
|
||||
@@ -203,11 +237,14 @@ async function main() {
|
||||
|
||||
for (const it of selected) {
|
||||
const docxPath = path.resolve(it.source);
|
||||
const outFile = path.resolve("src/content", it.collection, `${it.slug}.mdx`);
|
||||
|
||||
const { outCollection, outSlug } = normalizeDest(it.collection, it.slug);
|
||||
|
||||
const outFile = path.resolve("src/content", outCollection, `${outSlug}.mdx`);
|
||||
const outDir = path.dirname(outFile);
|
||||
|
||||
const assetsPublicDir = path.posix.join("/imported", it.collection, it.slug);
|
||||
const assetsDiskDir = path.resolve("public", "imported", it.collection, it.slug);
|
||||
const assetsPublicDir = path.posix.join("/imported", outCollection, outSlug);
|
||||
const assetsDiskDir = path.resolve("public", "imported", outCollection, outSlug);
|
||||
|
||||
if (!(await exists(docxPath))) {
|
||||
throw new Error(`Missing source docx: ${docxPath}`);
|
||||
@@ -241,18 +278,35 @@ async function main() {
|
||||
html = rewriteLocalImageLinks(html, assetsPublicDir);
|
||||
body = html.trim() ? html : "<p>(Import vide)</p>";
|
||||
}
|
||||
|
||||
|
||||
const defaultVersion = process.env.PUBLIC_RELEASE || "0.1.0";
|
||||
|
||||
// ✅ IMPORTANT: archicrat-ia partage edition/status avec archicratie (pas de migration frontmatter)
|
||||
const schemaDefaultsByCollection = {
|
||||
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
|
||||
ia: { edition: "ia", status: "cas_pratique", level: 1 },
|
||||
traite: { edition: "traite", status: "ontodynamique", level: 1 },
|
||||
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
|
||||
atlas: { edition: "atlas", status: "atlas", level: 1 },
|
||||
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
|
||||
"archicrat-ia": { edition: "archicrat-ia", status: "essai_these", level: 1 },
|
||||
"cas-ia": { edition: "cas-ia", status: "application", level: 1 },
|
||||
traite: { edition: "traite", status: "ontodynamique", level: 1 },
|
||||
glossaire: { edition: "glossaire", status: "lexique", level: 1 },
|
||||
atlas: { edition: "atlas", status: "atlas", level: 1 },
|
||||
};
|
||||
|
||||
const defaults = schemaDefaultsByCollection[it.collection] || { edition: it.collection, status: "draft", level: 1 };
|
||||
// Compat legacy :
|
||||
// manifest collection="archicratie" + slug="archicrat-ia/..."
|
||||
// => on écrit bien dans src/content/archicrat-ia/...
|
||||
// => mais on conserve edition/status historiques de type archicratie/modele_sociopolitique
|
||||
const defaultsKey =
|
||||
String(it.collection || "").trim() === "archicratie" &&
|
||||
String(it.slug || "").trim().startsWith("archicrat-ia/")
|
||||
? "archicratie"
|
||||
: outCollection;
|
||||
|
||||
const defaults =
|
||||
schemaDefaultsByCollection[defaultsKey] || {
|
||||
edition: defaultsKey,
|
||||
status: "draft",
|
||||
level: 1,
|
||||
};
|
||||
|
||||
const fm = [
|
||||
"---",
|
||||
@@ -282,4 +336,4 @@ async function main() {
|
||||
main().catch((e) => {
|
||||
console.error("\nERROR:", e?.message || e);
|
||||
process.exit(1);
|
||||
});
|
||||
});
|
||||
@@ -14,6 +14,24 @@ const STRICT = argv.includes("--strict") || process.env.CI === "1" || process.en
|
||||
function escRe(s) {
|
||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
async function exists(p) {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeRoute(route) {
|
||||
let r = String(route || "").trim();
|
||||
if (!r.startsWith("/")) r = "/" + r;
|
||||
if (!r.endsWith("/")) r = r + "/";
|
||||
r = r.replace(/\/{2,}/g, "/");
|
||||
return r;
|
||||
}
|
||||
|
||||
function countIdAttr(html, id) {
|
||||
const re = new RegExp(`\\bid=(["'])${escRe(id)}\\1`, "gi");
|
||||
let c = 0;
|
||||
@@ -22,7 +40,6 @@ function countIdAttr(html, id) {
|
||||
}
|
||||
|
||||
function findStartTagWithId(html, id) {
|
||||
// 1er élément qui porte id="..."
|
||||
const re = new RegExp(
|
||||
`<([a-zA-Z0-9:-]+)\\b[^>]*\\bid=(["'])${escRe(id)}\\2[^>]*>`,
|
||||
"i"
|
||||
@@ -36,34 +53,10 @@ function isInjectedAliasSpan(html, id) {
|
||||
const found = findStartTagWithId(html, id);
|
||||
if (!found) return false;
|
||||
if (found.tagName !== "span") return false;
|
||||
// class="... para-alias ..."
|
||||
return /\bclass=(["'])(?:(?!\1).)*\bpara-alias\b(?:(?!\1).)*\1/i.test(found.tag);
|
||||
}
|
||||
|
||||
function normalizeRoute(route) {
|
||||
let r = String(route || "").trim();
|
||||
if (!r.startsWith("/")) r = "/" + r;
|
||||
if (!r.endsWith("/")) r = r + "/";
|
||||
r = r.replace(/\/{2,}/g, "/");
|
||||
return r;
|
||||
}
|
||||
|
||||
async function exists(p) {
|
||||
try {
|
||||
await fs.access(p);
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
function hasId(html, id) {
|
||||
const re = new RegExp(`\\bid=(["'])${escRe(id)}\\1`, "i");
|
||||
return re.test(html);
|
||||
}
|
||||
|
||||
function injectBeforeId(html, newId, injectHtml) {
|
||||
// insère juste avant la balise qui porte id="newId"
|
||||
const re = new RegExp(
|
||||
`(<[^>]+\\bid=(["'])${escRe(newId)}\\2[^>]*>)`,
|
||||
"i"
|
||||
@@ -82,6 +75,7 @@ async function main() {
|
||||
}
|
||||
|
||||
const raw = await fs.readFile(ALIASES_PATH, "utf-8");
|
||||
|
||||
/** @type {Record<string, Record<string,string>>} */
|
||||
let aliases;
|
||||
try {
|
||||
@@ -89,6 +83,7 @@ async function main() {
|
||||
} catch (e) {
|
||||
throw new Error(`JSON invalide: ${ALIASES_PATH} (${e?.message || e})`);
|
||||
}
|
||||
|
||||
if (!aliases || typeof aliases !== "object" || Array.isArray(aliases)) {
|
||||
throw new Error(`Format invalide: attendu { route: { oldId: newId } } dans ${ALIASES_PATH}`);
|
||||
}
|
||||
@@ -114,10 +109,10 @@ async function main() {
|
||||
console.log(msg);
|
||||
warnCount++;
|
||||
}
|
||||
|
||||
|
||||
if (entries.length === 0) continue;
|
||||
|
||||
const rel = route.replace(/^\/+|\/+$/g, ""); // sans slash
|
||||
const rel = route.replace(/^\/+|\/+$/g, "");
|
||||
const htmlPath = path.join(DIST_ROOT, rel, "index.html");
|
||||
|
||||
if (!(await exists(htmlPath))) {
|
||||
@@ -135,24 +130,8 @@ async function main() {
|
||||
if (!oldId || !newId) continue;
|
||||
|
||||
const oldCount = countIdAttr(html, oldId);
|
||||
if (oldCount > 0) {
|
||||
// ✅ déjà injecté (idempotent)
|
||||
if (isInjectedAliasSpan(html, oldId)) continue;
|
||||
|
||||
// ⛔️ oldId existe déjà "en vrai" (ex: <p id="oldId">)
|
||||
// => alias inutile / inversé / obsolète
|
||||
const found = findStartTagWithId(html, oldId);
|
||||
const where = found ? `<${found.tagName} … id="${oldId}" …>` : `id="${oldId}"`;
|
||||
const msg =
|
||||
`⚠️ alias inutile/inversé: oldId déjà présent dans la page (${where}). ` +
|
||||
`Supprime l'alias ${oldId} -> ${newId} (ou corrige le sens) pour route=${route}`;
|
||||
if (STRICT) throw new Error(msg);
|
||||
console.log(msg);
|
||||
warnCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// juste après avoir calculé oldCount
|
||||
// ✅ déjà injecté => idempotent
|
||||
if (oldCount > 0 && isInjectedAliasSpan(html, oldId)) {
|
||||
if (STRICT && oldCount !== 1) {
|
||||
throw new Error(`oldId dupliqué (${oldCount}) alors qu'il est censé être unique: ${route} id=${oldId}`);
|
||||
@@ -160,18 +139,23 @@ async function main() {
|
||||
continue;
|
||||
}
|
||||
|
||||
// avant l'injection, après hasId(newId)
|
||||
const newCount = countIdAttr(html, newId);
|
||||
if (newCount !== 1) {
|
||||
const msg = `⚠️ newId non-unique (${newCount}) : ${route} new=${newId} (injection ambiguë)`;
|
||||
// ⛔️ oldId existe déjà "en vrai" => alias inutile/inversé
|
||||
if (oldCount > 0) {
|
||||
const found = findStartTagWithId(html, oldId);
|
||||
const where = found ? `<${found.tagName} … id="${oldId}" …>` : `id="${oldId}"`;
|
||||
const msg =
|
||||
`⚠️ alias inutile/inversé: oldId déjà présent (${where}). ` +
|
||||
`Supprime ${oldId} -> ${newId} (ou corrige le sens) pour route=${route}`;
|
||||
if (STRICT) throw new Error(msg);
|
||||
console.log(msg);
|
||||
warnCount++;
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!hasId(html, newId)) {
|
||||
const msg = `⚠️ newId introuvable: ${route} old=${oldId} -> new=${newId}`;
|
||||
// newId doit exister UNE fois (sinon injection ambiguë)
|
||||
const newCount = countIdAttr(html, newId);
|
||||
if (newCount !== 1) {
|
||||
const msg = `⚠️ newId non-unique (${newCount}) : ${route} new=${newId} (injection ambiguë)`;
|
||||
if (STRICT) throw new Error(msg);
|
||||
console.log(msg);
|
||||
warnCount++;
|
||||
|
||||
241
scripts/pick-proposer-issue.mjs
Normal file
@@ -0,0 +1,241 @@
|
||||
#!/usr/bin/env node
|
||||
import process from "node:process";
|
||||
|
||||
function getEnv(name, fallback = "") {
|
||||
return String(process.env[name] ?? fallback).trim();
|
||||
}
|
||||
|
||||
function sh(value) {
|
||||
return JSON.stringify(String(value ?? ""));
|
||||
}
|
||||
|
||||
function escapeRegExp(s) {
|
||||
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function pickLine(body, key) {
|
||||
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||
const m = String(body || "").match(re);
|
||||
return m ? m[1].trim() : "";
|
||||
}
|
||||
|
||||
function pickHeadingValue(body, headingKey) {
|
||||
const re = new RegExp(
|
||||
`^##\\s*${escapeRegExp(headingKey)}[^\\n]*\\n([\\s\\S]*?)(?=\\n##\\s|\\n\\s*$)`,
|
||||
"mi"
|
||||
);
|
||||
const m = String(body || "").match(re);
|
||||
if (!m) return "";
|
||||
const lines = m[1].split(/\r?\n/).map((l) => l.trim());
|
||||
for (const l of lines) {
|
||||
if (!l) continue;
|
||||
if (l.startsWith("<!--")) continue;
|
||||
return l.replace(/^\/?/, "/").trim();
|
||||
}
|
||||
return "";
|
||||
}
|
||||
|
||||
function normalizeChemin(chemin) {
|
||||
let c = String(chemin || "").trim();
|
||||
if (!c) return "";
|
||||
if (!c.startsWith("/")) c = "/" + c;
|
||||
if (!c.endsWith("/")) c += "/";
|
||||
return c;
|
||||
}
|
||||
|
||||
function extractCheminFromAnyUrl(text) {
|
||||
const s = String(text || "");
|
||||
const m = s.match(/(\/[a-z0-9\-]+\/[a-z0-9\-\/]+\/)#p-\d+-[0-9a-f]{8}/i);
|
||||
return m ? m[1] : "";
|
||||
}
|
||||
|
||||
function inferType(issue) {
|
||||
const title = String(issue?.title || "");
|
||||
const body = String(issue?.body || "").replace(/\r\n/g, "\n");
|
||||
const fromBody = String(pickLine(body, "Type") || "").trim().toLowerCase();
|
||||
if (fromBody) return fromBody;
|
||||
|
||||
if (title.startsWith("[Correction]")) return "type/correction";
|
||||
if (title.startsWith("[Fact-check]") || title.startsWith("[Vérification]")) return "type/fact-check";
|
||||
return "";
|
||||
}
|
||||
|
||||
function inferChemin(issue) {
|
||||
const title = String(issue?.title || "");
|
||||
const body = String(issue?.body || "").replace(/\r\n/g, "\n");
|
||||
|
||||
return normalizeChemin(
|
||||
pickLine(body, "Chemin") ||
|
||||
pickHeadingValue(body, "Chemin") ||
|
||||
extractCheminFromAnyUrl(body) ||
|
||||
extractCheminFromAnyUrl(title)
|
||||
);
|
||||
}
|
||||
|
||||
function labelsOf(issue) {
|
||||
return Array.isArray(issue?.labels)
|
||||
? issue.labels.map((l) => String(l?.name || "")).filter(Boolean)
|
||||
: [];
|
||||
}
|
||||
|
||||
function issueNumber(issue) {
|
||||
return Number(issue?.number || issue?.index || 0);
|
||||
}
|
||||
|
||||
function parseMeta(issue) {
|
||||
const labels = labelsOf(issue);
|
||||
const type = inferType(issue);
|
||||
const chemin = inferChemin(issue);
|
||||
const number = issueNumber(issue);
|
||||
|
||||
const hasApproved = labels.includes("state/approved");
|
||||
const hasRejected = labels.includes("state/rejected");
|
||||
const isProposer = type === "type/correction" || type === "type/fact-check";
|
||||
const isOpen = String(issue?.state || "open") === "open";
|
||||
const isPR = Boolean(issue?.pull_request);
|
||||
|
||||
const eligible =
|
||||
number > 0 &&
|
||||
isOpen &&
|
||||
!isPR &&
|
||||
hasApproved &&
|
||||
!hasRejected &&
|
||||
isProposer &&
|
||||
Boolean(chemin);
|
||||
|
||||
return {
|
||||
issue,
|
||||
number,
|
||||
type,
|
||||
chemin,
|
||||
labels,
|
||||
hasApproved,
|
||||
hasRejected,
|
||||
eligible,
|
||||
};
|
||||
}
|
||||
|
||||
async function fetchJson(url, token) {
|
||||
const res = await fetch(url, {
|
||||
headers: {
|
||||
Authorization: `token ${token}`,
|
||||
Accept: "application/json",
|
||||
"User-Agent": "archicratie-pick-proposer-issue/1.0",
|
||||
},
|
||||
});
|
||||
if (!res.ok) {
|
||||
const t = await res.text().catch(() => "");
|
||||
throw new Error(`HTTP ${res.status} ${url}\n${t}`);
|
||||
}
|
||||
return await res.json();
|
||||
}
|
||||
|
||||
async function fetchIssue(apiBase, owner, repo, token, n) {
|
||||
const url = `${apiBase}/api/v1/repos/${owner}/${repo}/issues/${n}`;
|
||||
return await fetchJson(url, token);
|
||||
}
|
||||
|
||||
async function listOpenIssues(apiBase, owner, repo, token) {
|
||||
const out = [];
|
||||
let page = 1;
|
||||
const limit = 100;
|
||||
|
||||
while (true) {
|
||||
const url = `${apiBase}/api/v1/repos/${owner}/${repo}/issues?state=open&page=${page}&limit=${limit}`;
|
||||
const batch = await fetchJson(url, token);
|
||||
if (!Array.isArray(batch) || batch.length === 0) break;
|
||||
out.push(...batch);
|
||||
if (batch.length < limit) break;
|
||||
page += 1;
|
||||
}
|
||||
|
||||
return out;
|
||||
}
|
||||
|
||||
function emitNone(reason) {
|
||||
process.stdout.write(
|
||||
[
|
||||
`TARGET_FOUND="0"`,
|
||||
`TARGET_REASON=${sh(reason)}`,
|
||||
`TARGET_PRIMARY_ISSUE=""`,
|
||||
`TARGET_ISSUES=""`,
|
||||
`TARGET_COUNT="0"`,
|
||||
`TARGET_CHEMIN=""`,
|
||||
].join("\n") + "\n"
|
||||
);
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const token = getEnv("FORGE_TOKEN");
|
||||
const owner = getEnv("GITEA_OWNER");
|
||||
const repo = getEnv("GITEA_REPO");
|
||||
const apiBase = (getEnv("FORGE_API") || getEnv("FORGE_BASE")).replace(/\/+$/, "");
|
||||
const explicit = Number(process.argv[2] || 0);
|
||||
|
||||
if (!token) throw new Error("Missing FORGE_TOKEN");
|
||||
if (!owner || !repo) throw new Error("Missing GITEA_OWNER / GITEA_REPO");
|
||||
if (!apiBase) throw new Error("Missing FORGE_API / FORGE_BASE");
|
||||
|
||||
let metas = [];
|
||||
|
||||
if (explicit > 0) {
|
||||
const issue = await fetchIssue(apiBase, owner, repo, token, explicit);
|
||||
const meta = parseMeta(issue);
|
||||
|
||||
if (!meta.eligible) {
|
||||
emitNone(
|
||||
!meta.hasApproved
|
||||
? "explicit_issue_not_approved"
|
||||
: meta.hasRejected
|
||||
? "explicit_issue_rejected"
|
||||
: !meta.type
|
||||
? "explicit_issue_missing_type"
|
||||
: !meta.chemin
|
||||
? "explicit_issue_missing_chemin"
|
||||
: "explicit_issue_not_eligible"
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const openIssues = await listOpenIssues(apiBase, owner, repo, token);
|
||||
metas = openIssues.map(parseMeta).filter((m) => m.eligible && m.chemin === meta.chemin);
|
||||
} else {
|
||||
const openIssues = await listOpenIssues(apiBase, owner, repo, token);
|
||||
metas = openIssues.map(parseMeta).filter((m) => m.eligible);
|
||||
|
||||
if (metas.length === 0) {
|
||||
emitNone("no_open_approved_proposer_issue");
|
||||
return;
|
||||
}
|
||||
|
||||
metas.sort((a, b) => a.number - b.number);
|
||||
const first = metas[0];
|
||||
metas = metas.filter((m) => m.chemin === first.chemin);
|
||||
}
|
||||
|
||||
metas.sort((a, b) => a.number - b.number);
|
||||
|
||||
if (metas.length === 0) {
|
||||
emitNone("no_batch_for_path");
|
||||
return;
|
||||
}
|
||||
|
||||
const primary = metas[0];
|
||||
const issues = metas.map((m) => String(m.number));
|
||||
|
||||
process.stdout.write(
|
||||
[
|
||||
`TARGET_FOUND="1"`,
|
||||
`TARGET_REASON="ok"`,
|
||||
`TARGET_PRIMARY_ISSUE=${sh(primary.number)}`,
|
||||
`TARGET_ISSUES=${sh(issues.join(" "))}`,
|
||||
`TARGET_COUNT=${sh(issues.length)}`,
|
||||
`TARGET_CHEMIN=${sh(primary.chemin)}`,
|
||||
].join("\n") + "\n"
|
||||
);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("💥 pick-proposer-issue:", e?.message || e);
|
||||
process.exit(1);
|
||||
});
|
||||
31
scripts/purge-dist-dev-whoami.mjs
Normal file
@@ -0,0 +1,31 @@
|
||||
// scripts/purge-dist-dev-whoami.mjs
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
|
||||
const CWD = process.cwd();
|
||||
const targetDir = path.join(CWD, "dist", "_auth", "whoami");
|
||||
const targetIndex = path.join(CWD, "dist", "_auth", "whoami", "index.html");
|
||||
|
||||
// Purge idempotente (force=true => pas d'erreur si absent)
|
||||
async function rmSafe(p) {
|
||||
try {
|
||||
await fs.rm(p, { recursive: true, force: true });
|
||||
return true;
|
||||
} catch {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const removedIndex = await rmSafe(targetIndex);
|
||||
const removedDir = await rmSafe(targetDir);
|
||||
|
||||
// Optionnel: si dist/_auth devient vide, on laisse tel quel (pas besoin de toucher)
|
||||
const any = removedIndex || removedDir;
|
||||
console.log(`✅ purge-dist-dev-whoami: ${any ? "purged" : "nothing to purge"}`);
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.error("❌ purge-dist-dev-whoami failed:", e);
|
||||
process.exit(1);
|
||||
});
|
||||
29
scripts/refresh-chapter2.sh
Executable file
@@ -0,0 +1,29 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
DOCX="sources/docx/archicrat-ia/Chapitre_2–Archeogenese_des_regimes_de_co-viabilite-version_officielle.docx"
|
||||
MANIFEST="sources/manifest.yml"
|
||||
ONLY="archicrat-ia/chapitre-2"
|
||||
|
||||
echo "== Audit source avant fix =="
|
||||
if ! python3 scripts/audit-docx-source.py "$DOCX"; then
|
||||
echo
|
||||
echo "== Fix source =="
|
||||
python3 scripts/fix-docx-source.py --in-place "$DOCX"
|
||||
|
||||
echo
|
||||
echo "== Audit source après fix =="
|
||||
python3 scripts/audit-docx-source.py "$DOCX"
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "== Réimport =="
|
||||
node scripts/import-docx.mjs --manifest "$MANIFEST" --only "$ONLY" --force
|
||||
|
||||
echo
|
||||
echo "== Build =="
|
||||
npm run build
|
||||
|
||||
echo
|
||||
echo "== Tests =="
|
||||
npm test
|
||||
131
scripts/switch-archicratie.sh
Executable file
@@ -0,0 +1,131 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# switch-archicratie.sh — SAFE switch LIVE + STAGING (avec backups horodatés)
|
||||
#
|
||||
# Usage (NAS recommandé) :
|
||||
# sudo bash -c 'LIVE_PORT=8081 /volume2/docker/archicratie-web/current/scripts/switch-archicratie.sh'
|
||||
# sudo bash -c 'LIVE_PORT=8082 /volume2/docker/archicratie-web/current/scripts/switch-archicratie.sh'
|
||||
#
|
||||
# Usage (test local R&D, sans NAS) :
|
||||
# D=/tmp/dynamic-test LIVE_PORT=8081 bash scripts/switch-archicratie.sh --dry-run
|
||||
# D=/tmp/dynamic-test LIVE_PORT=8081 bash scripts/switch-archicratie.sh
|
||||
|
||||
usage() {
|
||||
cat <<'EOF'
|
||||
SAFE switch LIVE + STAGING (avec backups horodatés).
|
||||
|
||||
Variables / options :
|
||||
LIVE_PORT=8081|8082 (obligatoire) port LIVE cible
|
||||
D=/volume2/docker/edge/config/dynamic (optionnel) dossier des yml Traefik dynamiques
|
||||
--dry-run n'écrit rien, affiche seulement ce qui serait fait
|
||||
-h, --help aide
|
||||
|
||||
Exemples :
|
||||
sudo bash -c 'LIVE_PORT=8082 /volume2/docker/archicratie-web/current/scripts/switch-archicratie.sh'
|
||||
D=/tmp/dynamic-test LIVE_PORT=8081 bash scripts/switch-archicratie.sh --dry-run
|
||||
EOF
|
||||
}
|
||||
|
||||
DRY_RUN=0
|
||||
for arg in "${@:-}"; do
|
||||
case "$arg" in
|
||||
--dry-run) DRY_RUN=1 ;;
|
||||
-h|--help) usage; exit 0 ;;
|
||||
*) ;;
|
||||
esac
|
||||
done
|
||||
|
||||
D="${D:-/volume2/docker/edge/config/dynamic}"
|
||||
F_LIVE="$D/20-archicratie-backend.yml"
|
||||
F_STAG="$D/21-archicratie-staging.yml"
|
||||
|
||||
LIVE_PORT="${LIVE_PORT:-}"
|
||||
if [[ "$LIVE_PORT" != "8081" && "$LIVE_PORT" != "8082" ]]; then
|
||||
echo "❌ LIVE_PORT doit valoir 8081 ou 8082."
|
||||
usage
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$F_LIVE" || ! -f "$F_STAG" ]]; then
|
||||
echo "❌ Fichiers manquants :"
|
||||
echo " $F_LIVE"
|
||||
echo " $F_STAG"
|
||||
echo " (Astuce R&D locale : mets D=/tmp/dynamic-test et crée 20/21 dedans.)"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
OTHER_PORT="8081"
|
||||
[[ "$LIVE_PORT" == "8081" ]] && OTHER_PORT="8082"
|
||||
|
||||
show_urls() {
|
||||
local f="$1"
|
||||
echo "— $f"
|
||||
grep -nE '^\s*-\s*url:\s*".*"' "$f" || true
|
||||
}
|
||||
|
||||
# Garde-fou : on attend au moins un "url:" dans chaque fichier
|
||||
grep -qE '^\s*-\s*url:\s*"' "$F_LIVE" || { echo "❌ Format inattendu dans $F_LIVE (pas de - url: \")"; exit 1; }
|
||||
grep -qE '^\s*-\s*url:\s*"' "$F_STAG" || { echo "❌ Format inattendu dans $F_STAG (pas de - url: \")"; exit 1; }
|
||||
|
||||
echo "Avant :"
|
||||
show_urls "$F_LIVE"
|
||||
show_urls "$F_STAG"
|
||||
echo
|
||||
|
||||
echo "Plan : LIVE -> $LIVE_PORT ; STAGING -> $OTHER_PORT"
|
||||
echo
|
||||
|
||||
if [[ "$DRY_RUN" == "1" ]]; then
|
||||
echo "DRY-RUN : aucune écriture."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
TS="$(date +%F-%H%M%S)"
|
||||
cp -a "$F_LIVE" "$F_LIVE.bak.$TS"
|
||||
cp -a "$F_STAG" "$F_STAG.bak.$TS"
|
||||
|
||||
# sed inplace portable (macOS vs Linux/DSM)
|
||||
sed_inplace() {
|
||||
local expr="$1" file="$2"
|
||||
if [[ "$(uname -s)" == "Darwin" ]]; then
|
||||
sed -i '' -e "$expr" "$file"
|
||||
else
|
||||
sed -i -e "$expr" "$file"
|
||||
fi
|
||||
}
|
||||
|
||||
# Remplacement ciblé UNIQUEMENT sur la ligne - url: "http://127.0.0.1:808X"
|
||||
sed_inplace \
|
||||
"s#^\([[:space:]]*-[[:space:]]*url:[[:space:]]*\"http://127\\.0\\.0\\.1:\\)808[12]\\(\"[[:space:]]*\)#\\1${LIVE_PORT}\\2#g" \
|
||||
"$F_LIVE"
|
||||
|
||||
sed_inplace \
|
||||
"s#^\([[:space:]]*-[[:space:]]*url:[[:space:]]*\"http://127\\.0\\.0\\.1:\\)808[12]\\(\"[[:space:]]*\)#\\1${OTHER_PORT}\\2#g" \
|
||||
"$F_STAG"
|
||||
|
||||
# Post-check : on confirme que les fichiers contiennent bien les ports attendus
|
||||
grep -qE "http://127\.0\.0\.1:${LIVE_PORT}\"" "$F_LIVE" || {
|
||||
echo "❌ Post-check FAIL : $F_LIVE ne contient pas http://127.0.0.1:${LIVE_PORT}"
|
||||
echo "➡️ rollback backups : $F_LIVE.bak.$TS / $F_STAG.bak.$TS"
|
||||
exit 1
|
||||
}
|
||||
grep -qE "http://127\.0\.0\.1:${OTHER_PORT}\"" "$F_STAG" || {
|
||||
echo "❌ Post-check FAIL : $F_STAG ne contient pas http://127.0.0.1:${OTHER_PORT}"
|
||||
echo "➡️ rollback backups : $F_LIVE.bak.$TS / $F_STAG.bak.$TS"
|
||||
exit 1
|
||||
}
|
||||
|
||||
echo "✅ OK. Backups :"
|
||||
echo " - $F_LIVE.bak.$TS"
|
||||
echo " - $F_STAG.bak.$TS"
|
||||
echo
|
||||
echo "Après :"
|
||||
show_urls "$F_LIVE"
|
||||
show_urls "$F_STAG"
|
||||
echo
|
||||
echo "Smoke tests :"
|
||||
echo " curl -sS -I http://127.0.0.1:${LIVE_PORT}/ | head -n 12"
|
||||
echo " curl -sS -I http://127.0.0.1:${OTHER_PORT}/ | head -n 12"
|
||||
echo " curl -sS -I -H 'Host: archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ | head -n 20"
|
||||
echo " curl -sS -I -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ | head -n 20"
|
||||
@@ -205,7 +205,7 @@ for (const [route, mapping] of Object.entries(data)) {
|
||||
newId,
|
||||
htmlPath,
|
||||
msg:
|
||||
`oldId present but is NOT an injected alias span (<span class="para-alias">).</n` +
|
||||
`oldId present but is NOT an injected alias span (<span class="para-alias">).\n` +
|
||||
`Saw: ${seen}`,
|
||||
});
|
||||
continue;
|
||||
|
||||
26
scripts/write-dev-whoami.mjs
Normal file
@@ -0,0 +1,26 @@
|
||||
import fs from "node:fs/promises";
|
||||
import path from "node:path";
|
||||
|
||||
const OUT = path.join(process.cwd(), "public", "_auth", "whoami");
|
||||
|
||||
const groupsRaw = process.env.PUBLIC_WHOAMI_GROUPS ?? "editors";
|
||||
const user = process.env.PUBLIC_WHOAMI_USER ?? "dev";
|
||||
const name = process.env.PUBLIC_WHOAMI_NAME ?? "Dev Local";
|
||||
const email = process.env.PUBLIC_WHOAMI_EMAIL ?? "area.technik@proton.me";
|
||||
|
||||
const groups = groupsRaw
|
||||
.split(/[;,]/)
|
||||
.map((s) => s.trim())
|
||||
.filter(Boolean)
|
||||
.join(",");
|
||||
|
||||
const body =
|
||||
`Remote-User: ${user}\n` +
|
||||
`Remote-Name: ${name}\n` +
|
||||
`Remote-Email: ${email}\n` +
|
||||
`Remote-Groups: ${groups}\n`;
|
||||
|
||||
await fs.mkdir(path.dirname(OUT), { recursive: true });
|
||||
await fs.writeFile(OUT, body, "utf8");
|
||||
|
||||
console.log(`✅ dev whoami written: ${path.relative(process.cwd(), OUT)} (${groups})`);
|
||||
20
scripts/write-ops-health.mjs
Normal file
@@ -0,0 +1,20 @@
|
||||
import fs from "node:fs";
|
||||
import path from "node:path";
|
||||
|
||||
const root = process.cwd();
|
||||
const outDir = path.join(root, "public", "__ops");
|
||||
const outFile = path.join(outDir, "health.json");
|
||||
|
||||
const payload = {
|
||||
service: "archicratie-site",
|
||||
env: process.env.PUBLIC_OPS_ENV || "unknown",
|
||||
upstream: process.env.PUBLIC_OPS_UPSTREAM || "unknown",
|
||||
buildSha: process.env.PUBLIC_BUILD_SHA || "unknown",
|
||||
builtAt: process.env.PUBLIC_BUILD_TIME || new Date().toISOString(),
|
||||
};
|
||||
|
||||
fs.mkdirSync(outDir, { recursive: true });
|
||||
fs.writeFileSync(outFile, `${JSON.stringify(payload, null, 2)}\n`, "utf8");
|
||||
|
||||
console.log(`✅ ops health written: ${outFile}`);
|
||||
console.log(payload);
|
||||
BIN
sources/docx/commencer/document-de-presentation.docx
Normal file
@@ -1,161 +1,123 @@
|
||||
version: 1
|
||||
|
||||
docs:
|
||||
# =========================
|
||||
# Document d’entrée
|
||||
# =========================
|
||||
- source: sources/docx/commencer/document-de-presentation.docx
|
||||
collection: commencer
|
||||
slug: document-de-presentation
|
||||
title: "Document de présentation"
|
||||
order: 0
|
||||
|
||||
# =========================
|
||||
# Archicratie — Essai-thèse "ArchiCraT-IA"
|
||||
# =========================
|
||||
- source: sources/docx/archicrat-ia/Prologue—Archicratie-fondation_et_finalite_sociopolitique_et_historique-version_officielle.docx
|
||||
collection: archicratie
|
||||
slug: archicrat-ia/prologue
|
||||
title: "Prologue — Fondation et finalité sociopolitique et historique"
|
||||
collection: archicrat-ia
|
||||
slug: prologue
|
||||
title: "Prologue — Fondation, finalité sociopolitique et historique"
|
||||
order: 10
|
||||
|
||||
- source: sources/docx/archicrat-ia/Chapitre_1—Fondements_epistemologiques_et_modelisation_Archicratie-version_officielle.docx
|
||||
collection: archicratie
|
||||
slug: archicrat-ia/chapitre-1
|
||||
collection: archicrat-ia
|
||||
slug: chapitre-1
|
||||
title: "Chapitre 1 — Fondements épistémologiques et modélisation"
|
||||
order: 20
|
||||
|
||||
- source: sources/docx/archicrat-ia/Chapitre_2–Archeogenese_des_regimes_de_co-viabilite-version_officielle.docx
|
||||
collection: archicratie
|
||||
slug: archicrat-ia/chapitre-2
|
||||
collection: archicrat-ia
|
||||
slug: chapitre-2
|
||||
title: "Chapitre 2 — Archéogenèse des régimes de co-viabilité"
|
||||
order: 30
|
||||
|
||||
- source: sources/docx/archicrat-ia/Chapitre_3—Philosophies_du_pouvoir_et_Archicration-pour_une_topologie_differenciee_des_regimes_regulateurs-version_officielle.docx
|
||||
collection: archicratie
|
||||
slug: archicrat-ia/chapitre-3
|
||||
collection: archicrat-ia
|
||||
slug: chapitre-3
|
||||
title: "Chapitre 3 — Philosophies du pouvoir et archicration"
|
||||
order: 40
|
||||
|
||||
- source: sources/docx/archicrat-ia/Chapitre_4—Vers_une_histoire_archicratique_des_revolutions_industrielles-version_officielle.docx
|
||||
collection: archicratie
|
||||
slug: archicrat-ia/chapitre-4
|
||||
collection: archicrat-ia
|
||||
slug: chapitre-4
|
||||
title: "Chapitre 4 — Histoire archicratique des révolutions industrielles"
|
||||
order: 50
|
||||
|
||||
- source: sources/docx/archicrat-ia/Chapitre_5—Problematiques_des_tensions_des_co-viabilites_et_des_regulations_archicratiques-version_officielle.docx
|
||||
collection: archicratie
|
||||
slug: archicrat-ia/chapitre-5
|
||||
collection: archicrat-ia
|
||||
slug: chapitre-5
|
||||
title: "Chapitre 5 — Tensions, co-viabilités et régulations"
|
||||
order: 60
|
||||
|
||||
- source: sources/docx/archicrat-ia/Conclusion-Archicrat-IA-version_officielle.docx
|
||||
collection: archicratie
|
||||
slug: archicrat-ia/conclusion
|
||||
collection: archicrat-ia
|
||||
slug: conclusion
|
||||
title: "Conclusion — ArchiCraT-IA"
|
||||
order: 70
|
||||
|
||||
# =========================
|
||||
# IA — Cas pratique (1 page = 1 chapitre)
|
||||
# NOTE: on n'inclut PAS le monolithe "Cas_IA-... .docx" dans le manifeste.
|
||||
# Cas pratique — Gouvernance des systèmes IA
|
||||
# =========================
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Introduction_generale—Mettre_en_scene_un_systeme_IA.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/introduction
|
||||
title: "Cas pratique — Introduction générale : Mettre en scène un système IA"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Introduction.docx
|
||||
collection: cas-ia
|
||||
slug: introduction
|
||||
title: "Introduction générale — Mettre un système d’IA en scène"
|
||||
order: 110
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_I—Epreuve_de_detectabilite.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/chapitre-1
|
||||
title: "Cas pratique — Chapitre I : Épreuve de détectabilité"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_1_Epreuve_de_detectabilite.docx
|
||||
collection: cas-ia
|
||||
slug: chapitre-1
|
||||
title: "Chapitre I — Épreuve de détectabilité"
|
||||
order: 120
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_II—Epreuve_topologique.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/chapitre-2
|
||||
title: "Cas pratique — Chapitre II : Épreuve topologique"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_2_Epreuve_Topologique.docx
|
||||
collection: cas-ia
|
||||
slug: chapitre-2
|
||||
title: "Chapitre II — Épreuve topologique"
|
||||
order: 130
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_III—Epreuve_archeogenetique.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/chapitre-3
|
||||
title: "Cas pratique — Chapitre III : Épreuve archéogénétique"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_3_Epreuve_archeogenetique.docx
|
||||
collection: cas-ia
|
||||
slug: chapitre-3
|
||||
title: "Chapitre III — Épreuve archéogénétique"
|
||||
order: 140
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_IV—Epreuve_morphologique.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/chapitre-4
|
||||
title: "Cas pratique — Chapitre IV : Épreuve morphologique"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_4_Epreuve_Morphologique.docx
|
||||
collection: cas-ia
|
||||
slug: chapitre-4
|
||||
title: "Chapitre IV — Épreuve morphologique"
|
||||
order: 150
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_V—Epreuve_historique.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/chapitre-5
|
||||
title: "Cas pratique — Chapitre V : Épreuve historique"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_5_Epreuve_Historique.docx
|
||||
collection: cas-ia
|
||||
slug: chapitre-5
|
||||
title: "Chapitre V — Épreuve historique"
|
||||
order: 160
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_VI—Epreuve_de_co-viabilite.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/chapitre-6
|
||||
title: "Cas pratique — Chapitre VI : Épreuve de co-viabilité"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_6_Epreuve_de_Co-viabilite.docx
|
||||
collection: cas-ia
|
||||
slug: chapitre-6
|
||||
title: "Chapitre VI — Épreuve de co-viabilité"
|
||||
order: 170
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_VII—Gestes_archicratiques_concrets_pour_un_systeme_IA.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/chapitre-7
|
||||
title: "Cas pratique — Chapitre VII : Gestes archicratiques concrets"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_7_Gestes_archicratiques_concrets_pour_un_systeme_IA.docx
|
||||
collection: cas-ia
|
||||
slug: chapitre-7
|
||||
title: "Chapitre VII — Gestes archicratiques concrets pour un système d’IA"
|
||||
order: 180
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Conclusion.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/conclusion
|
||||
title: "Cas pratique — Conclusion"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Conclusion.docx
|
||||
collection: cas-ia
|
||||
slug: conclusion
|
||||
title: "Conclusion"
|
||||
order: 190
|
||||
|
||||
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Annexe—Glossaire_archicratique_pour_audit_des_systemes_IA.docx
|
||||
collection: ia
|
||||
slug: cas-pratique/annexe-glossaire-audit
|
||||
title: "Cas pratique — Annexe : Glossaire archicratique pour audit des systèmes IA"
|
||||
- source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Annexe_Glossaire_Archicratique_Cas_IA.docx
|
||||
collection: cas-ia
|
||||
slug: annexe-glossaire-audit
|
||||
title: "Annexe — Glossaire archicratique pour l’audit des systèmes d’IA"
|
||||
order: 195
|
||||
|
||||
# =========================
|
||||
# Traité — Ontodynamique générative (1 page = 1 chapitre)
|
||||
# NOTE: on n'inclut PAS le monolithe "Traite-...-version_officielle.docx" dans le manifeste.
|
||||
# =========================
|
||||
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Introduction-version_officielle.docx
|
||||
collection: traite
|
||||
slug: ontodynamique/introduction
|
||||
title: "Traité — Introduction"
|
||||
order: 210
|
||||
|
||||
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_1—Le_flux_ontogenetique-version_officielle.docx
|
||||
collection: traite
|
||||
slug: ontodynamique/chapitre-1
|
||||
title: "Traité — Chapitre 1 : Le flux ontogénétique"
|
||||
order: 220
|
||||
|
||||
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_2—economie_du_reel-version_officielle.docx
|
||||
collection: traite
|
||||
slug: ontodynamique/chapitre-2
|
||||
title: "Traité — Chapitre 2 : Économie du réel"
|
||||
order: 230
|
||||
|
||||
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_3—Le_reel_comme_systeme_regulateur-version_officielle.docx
|
||||
collection: traite
|
||||
slug: ontodynamique/chapitre-3
|
||||
title: "Traité — Chapitre 3 : Le réel comme système régulateur"
|
||||
order: 240
|
||||
|
||||
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_4—Arcalite-structures_formes_invariants-version_officielle.docx
|
||||
collection: traite
|
||||
slug: ontodynamique/chapitre-4
|
||||
title: "Traité — Chapitre 4 : Arcalité — structures, formes, invariants"
|
||||
order: 250
|
||||
|
||||
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_5-Cratialite-forces_flux_gradients-version_officielle.docx
|
||||
collection: traite
|
||||
slug: ontodynamique/chapitre-5
|
||||
title: "Traité — Chapitre 5 : Cratialité — forces, flux, gradients"
|
||||
order: 260
|
||||
|
||||
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_6—Archicration-version_officielle.docx
|
||||
collection: traite
|
||||
slug: ontodynamique/chapitre-6
|
||||
title: "Traité — Chapitre 6 : Archicration"
|
||||
order: 270
|
||||
|
||||
# =========================
|
||||
# Glossaire / Lexique
|
||||
# =========================
|
||||
@@ -169,4 +131,4 @@ docs:
|
||||
collection: glossaire
|
||||
slug: mini-glossaire-verbes
|
||||
title: "Mini-glossaire des verbes de la scène archicratique"
|
||||
order: 910
|
||||
order: 910
|
||||
@@ -1,2 +1 @@
|
||||
{}
|
||||
|
||||
0
src/annotations/.gitkeep
Normal file
@@ -1,59 +0,0 @@
|
||||
schema: 1
|
||||
|
||||
# optionnel (si présent, doit matcher le chemin du fichier)
|
||||
page: archicrat-ia/prologue
|
||||
|
||||
paras:
|
||||
p-0-d7974f88:
|
||||
refs:
|
||||
- label: "Happycratie — (Cabanas & Illouz) via Cairn"
|
||||
url: "https://shs.cairn.info/revue-ethnologie-francaise-2019-4-page-813?lang=fr"
|
||||
kind: "article"
|
||||
- label: "Techno-féodalisme — Variations (OpenEdition)"
|
||||
url: "https://journals.openedition.org/variations/2290"
|
||||
kind: "article"
|
||||
|
||||
authors:
|
||||
- "Eva Illouz"
|
||||
- "Yanis Varoufakis"
|
||||
|
||||
quotes:
|
||||
- text: "Dans Happycratie, Edgar Cabanas et Eva Illouz..."
|
||||
source: "Happycratie, p.1"
|
||||
- text: "En eux-mêmes, les actifs ne sont ni féodaux ni capitalistes..."
|
||||
source: "Entretien Morozov/Varoufakis — techno-féodalisme"
|
||||
|
||||
media:
|
||||
- type: "image"
|
||||
src: "/public/media/archicrat-ia/prologue/p-0-d7974f88/schema-1.svg"
|
||||
caption: "Tableau explicatif"
|
||||
credit: "ChatGPT"
|
||||
- type: "image"
|
||||
src: "/public/media/archicrat-ia/prologue/p-0-d7974f88/schema-2.svg"
|
||||
caption: "Diagramme d’évolution"
|
||||
credit: "Yanis Varoufakis"
|
||||
|
||||
comments_editorial:
|
||||
- text: "TODO: nuancer / préciser — commentaire éditorial versionné (pas public)."
|
||||
status: "draft"
|
||||
|
||||
p-1-2ef25f29:
|
||||
refs:
|
||||
- label: "Kafka et le pouvoir — Bernard Lahire (Cairn)"
|
||||
url: "https://shs.cairn.info/franz-kafka--9782707159410-page-475?lang=fr"
|
||||
kind: "book"
|
||||
|
||||
authors:
|
||||
- "Bernard Lahire"
|
||||
|
||||
quotes:
|
||||
- text: "Si l’on voulait chercher quelque chose comme une vision du monde chez Kafka..."
|
||||
source: "Bernard Lahire, Franz Kafka, p.475+"
|
||||
|
||||
media:
|
||||
- type: "video"
|
||||
src: "/public/media/archicrat-ia/prologue/p-1-2ef25f29/bien_commun.mp4"
|
||||
caption: "Entretien avec Bernard Lahire"
|
||||
credit: "Cairn.info"
|
||||
|
||||
comments_editorial: []
|
||||
@@ -1,51 +1,80 @@
|
||||
---
|
||||
import { getCollection } from "astro:content";
|
||||
|
||||
const { currentSlug } = Astro.props;
|
||||
const {
|
||||
currentSlug,
|
||||
collection = "archicrat-ia",
|
||||
basePath = "/archicrat-ia",
|
||||
label = "Table des matières"
|
||||
} = Astro.props;
|
||||
|
||||
const entries = (await getCollection("archicratie"))
|
||||
.filter((e) => e.slug.startsWith("archicrat-ia/"))
|
||||
.sort((a, b) => (a.data.order ?? 0) - (b.data.order ?? 0));
|
||||
const slugOf = (entry) => String(entry.id).replace(/\.(md|mdx)$/i, "");
|
||||
const hrefOf = (entry) => `${basePath}/${slugOf(entry)}/`;
|
||||
|
||||
// ✅ On route l’Essai-thèse sur /archicrat-ia/<slug-sans-prefix>/
|
||||
// (Astro trailingSlash = always → on garde le "/" final)
|
||||
const strip = (s) => String(s || "").replace(/^archicrat-ia\//, "");
|
||||
const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
const collator = new Intl.Collator("fr", { sensitivity: "base", numeric: true });
|
||||
|
||||
const entries = [...await getCollection(collection)].sort((a, b) => {
|
||||
const ao = Number(a.data.order ?? 9999);
|
||||
const bo = Number(b.data.order ?? 9999);
|
||||
if (ao !== bo) return ao - bo;
|
||||
|
||||
const at = String(a.data.title ?? a.data.term ?? slugOf(a));
|
||||
const bt = String(b.data.title ?? b.data.term ?? slugOf(b));
|
||||
return collator.compare(at, bt);
|
||||
});
|
||||
|
||||
const tocId = `toc-global-${collection}-${String(basePath).replace(/[^\w-]+/g, "-")}`;
|
||||
---
|
||||
|
||||
<nav class="toc-global" aria-label="Table des matières — ArchiCraT-IA">
|
||||
<div class="toc-global__head">
|
||||
<div class="toc-global__title">Table des matières</div>
|
||||
</div>
|
||||
<nav
|
||||
class="toc-global"
|
||||
data-mobile-default="closed"
|
||||
aria-label={label}
|
||||
data-toc-global
|
||||
data-toc-key={`global:${collection}:${basePath}`}
|
||||
>
|
||||
<button
|
||||
class="toc-global__head toc-global__toggle"
|
||||
type="button"
|
||||
aria-expanded="false"
|
||||
aria-controls={tocId}
|
||||
>
|
||||
<span class="toc-global__title">{label}</span>
|
||||
<span class="toc-global__chevron" aria-hidden="true">▾</span>
|
||||
</button>
|
||||
|
||||
<ol class="toc-global__list">
|
||||
{entries.map((e) => {
|
||||
const active = e.slug === currentSlug;
|
||||
return (
|
||||
<li class={`toc-item ${active ? "is-active" : ""}`}>
|
||||
<a class="toc-link" href={href(e.slug)} aria-current={active ? "page" : undefined}>
|
||||
<span class="toc-link__row">
|
||||
{active ? (
|
||||
<span class="toc-active-indicator" aria-hidden="true">👉</span>
|
||||
) : (
|
||||
<span class="toc-active-spacer" aria-hidden="true"></span>
|
||||
)}
|
||||
<div class="toc-global__body-clip" id={tocId} hidden>
|
||||
<div class="toc-global__body">
|
||||
<ol class="toc-global__list">
|
||||
{entries.map((e) => {
|
||||
const slug = slugOf(e);
|
||||
const active = slug === currentSlug;
|
||||
|
||||
<span class="toc-link__title">{e.data.title}</span>
|
||||
return (
|
||||
<li class={`toc-item ${active ? "is-active" : ""}`}>
|
||||
<a class="toc-link" href={hrefOf(e)} aria-current={active ? "page" : undefined}>
|
||||
<span class="toc-link__row">
|
||||
<span class={`toc-active-mark ${active ? "is-on" : ""}`} aria-hidden="true">
|
||||
<span class="toc-active-mark__dot"></span>
|
||||
</span>
|
||||
|
||||
{active && (
|
||||
<span class="toc-badge" aria-label="Chapitre en cours">
|
||||
En cours
|
||||
<span class="toc-link__title">{e.data.title}</span>
|
||||
|
||||
{active && (
|
||||
<span class="toc-badge" aria-label="Chapitre en cours">
|
||||
En cours
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
)}
|
||||
</span>
|
||||
|
||||
{active && <span class="toc-underline" aria-hidden="true"></span>}
|
||||
</a>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ol>
|
||||
{active && <span class="toc-underline" aria-hidden="true"></span>}
|
||||
</a>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ol>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
|
||||
<style>
|
||||
@@ -56,7 +85,22 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
background: rgba(127,127,127,0.06);
|
||||
}
|
||||
|
||||
.toc-global__toggle{
|
||||
width: 100%;
|
||||
appearance: none;
|
||||
border: 0;
|
||||
background: transparent;
|
||||
color: inherit;
|
||||
text-align: left;
|
||||
padding: 0;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.toc-global__head{
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 10px;
|
||||
margin-bottom: 10px;
|
||||
padding-bottom: 10px;
|
||||
border-bottom: 1px dashed rgba(127,127,127,0.25);
|
||||
@@ -69,11 +113,36 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
opacity: .88;
|
||||
}
|
||||
|
||||
.toc-global__chevron{
|
||||
font-size: 12px;
|
||||
opacity: .7;
|
||||
transition: transform 180ms ease;
|
||||
}
|
||||
|
||||
.toc-global__body-clip{
|
||||
display: grid;
|
||||
grid-template-rows: 1fr;
|
||||
transition:
|
||||
grid-template-rows 220ms ease,
|
||||
opacity 160ms ease,
|
||||
margin-top 220ms ease;
|
||||
}
|
||||
|
||||
.toc-global__body{
|
||||
min-height: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.toc-global__list{
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
max-height: 44vh;
|
||||
overflow: auto;
|
||||
padding-right: 8px;
|
||||
scrollbar-gutter: stable;
|
||||
}
|
||||
|
||||
.toc-global__list li::marker{ content: ""; }
|
||||
|
||||
.toc-item{ margin: 6px 0; }
|
||||
@@ -99,13 +168,33 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
align-items: center;
|
||||
}
|
||||
|
||||
.toc-active-indicator{
|
||||
font-size: 14px;
|
||||
line-height: 1;
|
||||
.toc-active-mark{
|
||||
width: 14px;
|
||||
height: 14px;
|
||||
display: inline-grid;
|
||||
place-items: center;
|
||||
border-radius: 999px;
|
||||
border: 1px solid transparent;
|
||||
opacity: .55;
|
||||
}
|
||||
|
||||
.toc-active-spacer{
|
||||
width: 14px;
|
||||
.toc-active-mark__dot{
|
||||
width: 5px;
|
||||
height: 5px;
|
||||
border-radius: 999px;
|
||||
background: currentColor;
|
||||
opacity: .65;
|
||||
}
|
||||
|
||||
.toc-active-mark.is-on{
|
||||
border-color: rgba(127,127,127,0.34);
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.toc-active-mark.is-on .toc-active-mark__dot{
|
||||
width: 6px;
|
||||
height: 6px;
|
||||
opacity: 1;
|
||||
}
|
||||
|
||||
.toc-link__title{
|
||||
@@ -143,11 +232,70 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
border-radius: 999px;
|
||||
}
|
||||
|
||||
.toc-global__list{
|
||||
max-height: 44vh;
|
||||
overflow: auto;
|
||||
padding-right: 8px;
|
||||
scrollbar-gutter: stable;
|
||||
@media (max-width: 980px){
|
||||
.toc-global{
|
||||
padding: 10px 12px;
|
||||
border-radius: 14px;
|
||||
}
|
||||
|
||||
.toc-global__head{
|
||||
margin-bottom: 0;
|
||||
padding-bottom: 0;
|
||||
border-bottom: 0;
|
||||
min-height: 28px;
|
||||
}
|
||||
|
||||
.toc-global__title{
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.toc-global__body-clip{
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.toc-global.is-collapsed .toc-global__body-clip{
|
||||
grid-template-rows: 0fr;
|
||||
opacity: 0;
|
||||
margin-top: 0;
|
||||
}
|
||||
|
||||
.toc-global__body{
|
||||
min-height: 0;
|
||||
overflow: hidden;
|
||||
transition: opacity 180ms ease;
|
||||
}
|
||||
|
||||
.toc-global.is-collapsed .toc-global__body{
|
||||
opacity: 0;
|
||||
}
|
||||
|
||||
.toc-global.is-collapsed .toc-global__chevron{
|
||||
transform: rotate(-90deg);
|
||||
}
|
||||
|
||||
.toc-link{
|
||||
padding: 7px 9px;
|
||||
border-radius: 12px;
|
||||
}
|
||||
|
||||
.toc-link__title{
|
||||
font-size: 12.5px;
|
||||
line-height: 1.22;
|
||||
}
|
||||
|
||||
.toc-badge{
|
||||
font-size: 10px;
|
||||
padding: 2px 7px;
|
||||
}
|
||||
|
||||
.toc-global__list{
|
||||
max-height: min(42vh, 360px);
|
||||
padding-right: 4px;
|
||||
}
|
||||
|
||||
.toc-global__body-clip[hidden]{
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark){
|
||||
@@ -155,12 +303,95 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
|
||||
.toc-link:hover{ background: rgba(255,255,255,0.06); }
|
||||
.toc-item.is-active .toc-link{ background: rgba(255,255,255,0.06); }
|
||||
.toc-badge{ background: rgba(255,255,255,0.06); }
|
||||
.toc-active-mark.is-on{ border-color: rgba(255,255,255,0.22); }
|
||||
}
|
||||
</style>
|
||||
|
||||
<script is:inline>
|
||||
(() => {
|
||||
const active = document.querySelector(".toc-global .toc-item.is-active");
|
||||
if (active) active.scrollIntoView({ block: "nearest" });
|
||||
function init() {
|
||||
document.querySelectorAll("[data-toc-global]").forEach((nav) => {
|
||||
if (nav.dataset.tocReady === "1") return;
|
||||
nav.dataset.tocReady = "1";
|
||||
|
||||
const toggle = nav.querySelector(".toc-global__toggle");
|
||||
const bodyClip = nav.querySelector(".toc-global__body-clip");
|
||||
const active = nav.querySelector(".toc-item.is-active");
|
||||
const mq = window.matchMedia("(max-width: 980px)");
|
||||
const key = `archicratie:${nav.dataset.tocKey || "toc-global"}`;
|
||||
|
||||
if (!toggle || !bodyClip) return;
|
||||
|
||||
const read = () => {
|
||||
try {
|
||||
const v = localStorage.getItem(key);
|
||||
if (v === "open") return true;
|
||||
if (v === "closed") return false;
|
||||
} catch {}
|
||||
return null;
|
||||
};
|
||||
|
||||
const write = (open) => {
|
||||
try { localStorage.setItem(key, open ? "open" : "closed"); } catch {}
|
||||
};
|
||||
|
||||
const setOpen = (open, { persist = true } = {}) => {
|
||||
const isMobile = mq.matches;
|
||||
const effectiveOpen = isMobile ? open : true;
|
||||
|
||||
nav.classList.toggle("is-collapsed", isMobile && !effectiveOpen);
|
||||
toggle.setAttribute("aria-expanded", effectiveOpen ? "true" : "false");
|
||||
|
||||
if (bodyClip) {
|
||||
bodyClip.hidden = isMobile && !effectiveOpen;
|
||||
}
|
||||
|
||||
if (persist && isMobile) write(effectiveOpen);
|
||||
};
|
||||
|
||||
const initState = () => {
|
||||
if (!mq.matches) {
|
||||
setOpen(true, { persist: false });
|
||||
if (active) active.scrollIntoView({ block: "nearest" });
|
||||
return;
|
||||
}
|
||||
|
||||
const stored = read();
|
||||
const open = stored == null ? false : stored;
|
||||
setOpen(open, { persist: false });
|
||||
|
||||
if (open && active) active.scrollIntoView({ block: "nearest" });
|
||||
};
|
||||
|
||||
toggle.addEventListener("click", () => {
|
||||
const open = toggle.getAttribute("aria-expanded") !== "true";
|
||||
setOpen(open);
|
||||
if (open && active) active.scrollIntoView({ block: "nearest" });
|
||||
|
||||
if (open) {
|
||||
window.dispatchEvent(new CustomEvent("archicratie:tocGlobalOpen"));
|
||||
}
|
||||
});
|
||||
|
||||
window.addEventListener("archicratie:tocLocalOpen", () => {
|
||||
if (!mq.matches) return;
|
||||
setOpen(false);
|
||||
});
|
||||
|
||||
if (mq.addEventListener) {
|
||||
mq.addEventListener("change", initState);
|
||||
} else if (mq.addListener) {
|
||||
mq.addListener(initState);
|
||||
}
|
||||
|
||||
initState();
|
||||
});
|
||||
}
|
||||
|
||||
if (document.readyState === "loading") {
|
||||
window.addEventListener("DOMContentLoaded", init, { once: true });
|
||||
} else {
|
||||
init();
|
||||
}
|
||||
})();
|
||||
</script>
|
||||
</script>
|
||||
529
src/components/GlossaryAside.astro
Normal file
@@ -0,0 +1,529 @@
|
||||
---
|
||||
import {
|
||||
getGlossaryEntryAsideData,
|
||||
getGlossaryPortalLinks,
|
||||
hrefOfGlossaryEntry,
|
||||
slugOfGlossaryEntry,
|
||||
} from "../lib/glossary";
|
||||
|
||||
const {
|
||||
currentEntry,
|
||||
allEntries = [],
|
||||
} = Astro.props;
|
||||
|
||||
const currentSlug = slugOfGlossaryEntry(currentEntry);
|
||||
|
||||
const {
|
||||
displayFamily,
|
||||
displayDomain,
|
||||
displayLevel,
|
||||
showNoyau,
|
||||
showSameFamily,
|
||||
fondamentaux,
|
||||
sameFamilyTitle,
|
||||
sameFamilyEntries,
|
||||
relationSections,
|
||||
contextualTheory,
|
||||
} = getGlossaryEntryAsideData(currentEntry, allEntries);
|
||||
|
||||
const portalLinks = getGlossaryPortalLinks();
|
||||
---
|
||||
|
||||
<nav class="glossary-aside" aria-label="Navigation du glossaire">
|
||||
<div class="glossary-aside__block glossary-aside__block--intro">
|
||||
<a class="glossary-aside__back" href="/glossaire/">← Retour au glossaire</a>
|
||||
<div class="glossary-aside__title">Glossaire archicratique</div>
|
||||
|
||||
<div class="glossary-aside__pills" aria-label="Repères de lecture">
|
||||
<span class="glossary-aside__pill glossary-aside__pill--family">
|
||||
{displayFamily}
|
||||
</span>
|
||||
|
||||
{displayDomain && (
|
||||
<span class="glossary-aside__pill">{displayDomain}</span>
|
||||
)}
|
||||
|
||||
{displayLevel && (
|
||||
<span class="glossary-aside__pill">{displayLevel}</span>
|
||||
)}
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<details class="glossary-aside__block glossary-aside__disclosure">
|
||||
<summary class="glossary-aside__summary">
|
||||
<span class="glossary-aside__heading">Portails</span>
|
||||
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-aside__panel">
|
||||
<ul class="glossary-aside__list">
|
||||
{portalLinks.map((item) => (
|
||||
<li><a href={item.href}>{item.label}</a></li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
|
||||
{showNoyau && (
|
||||
<details class="glossary-aside__block glossary-aside__disclosure">
|
||||
<summary class="glossary-aside__summary">
|
||||
<span class="glossary-aside__heading">Noyau archicratique</span>
|
||||
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-aside__panel">
|
||||
<ul class="glossary-aside__list">
|
||||
{fondamentaux.map((entry) => {
|
||||
const active = slugOfGlossaryEntry(entry) === currentSlug;
|
||||
return (
|
||||
<li>
|
||||
<a
|
||||
href={hrefOfGlossaryEntry(entry)}
|
||||
aria-current={active ? "page" : undefined}
|
||||
class={active ? "is-active" : undefined}
|
||||
>
|
||||
{entry.data.term}
|
||||
</a>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
|
||||
{showSameFamily && (
|
||||
<details class="glossary-aside__block glossary-aside__disclosure">
|
||||
<summary class="glossary-aside__summary">
|
||||
<span class="glossary-aside__heading">{sameFamilyTitle}</span>
|
||||
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-aside__panel">
|
||||
<ul class="glossary-aside__list">
|
||||
{sameFamilyEntries.map((entry) => {
|
||||
const active = slugOfGlossaryEntry(entry) === currentSlug;
|
||||
return (
|
||||
<li>
|
||||
<a
|
||||
href={hrefOfGlossaryEntry(entry)}
|
||||
aria-current={active ? "page" : undefined}
|
||||
class={active ? "is-active" : undefined}
|
||||
>
|
||||
{entry.data.term}
|
||||
</a>
|
||||
</li>
|
||||
);
|
||||
})}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
|
||||
{relationSections.length > 0 && (
|
||||
<details class="glossary-aside__block glossary-aside__disclosure">
|
||||
<summary class="glossary-aside__summary">
|
||||
<span class="glossary-aside__heading">Autour de cette fiche</span>
|
||||
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-aside__panel">
|
||||
{relationSections.map((section) => (
|
||||
<>
|
||||
<h3 class="glossary-aside__subheading">{section.title}</h3>
|
||||
<ul class="glossary-aside__list">
|
||||
{section.items.map((entry) => (
|
||||
<li><a href={hrefOfGlossaryEntry(entry)}>{entry.data.term}</a></li>
|
||||
))}
|
||||
</ul>
|
||||
</>
|
||||
))}
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
|
||||
{contextualTheory.length > 0 && (
|
||||
<details class="glossary-aside__block glossary-aside__disclosure">
|
||||
<summary class="glossary-aside__summary">
|
||||
<span class="glossary-aside__heading">Paysage théorique</span>
|
||||
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-aside__panel">
|
||||
<ul class="glossary-aside__list">
|
||||
{contextualTheory.map((entry) => (
|
||||
<li><a href={hrefOfGlossaryEntry(entry)}>{entry.data.term}</a></li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
</nav>
|
||||
|
||||
<style>
|
||||
.glossary-aside{
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 14px;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-aside__block{
|
||||
border: 1px solid rgba(127,127,127,0.22);
|
||||
border-radius: 16px;
|
||||
padding: 14px;
|
||||
background: rgba(127,127,127,0.05);
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-aside__block--intro{
|
||||
padding-top: 13px;
|
||||
padding-bottom: 13px;
|
||||
}
|
||||
|
||||
.glossary-aside__back{
|
||||
display: inline-block;
|
||||
margin-bottom: 10px;
|
||||
font-size: 14px;
|
||||
font-weight: 700;
|
||||
line-height: 1.35;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.glossary-aside__title{
|
||||
font-size: 18px;
|
||||
font-weight: 850;
|
||||
letter-spacing: .1px;
|
||||
line-height: 1.22;
|
||||
}
|
||||
|
||||
.glossary-aside__pills{
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 7px;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.glossary-aside__pill{
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 5px 10px;
|
||||
border: 1px solid rgba(127,127,127,0.24);
|
||||
border-radius: 999px;
|
||||
background: rgba(127,127,127,0.04);
|
||||
font-size: 13px;
|
||||
line-height: 1.35;
|
||||
opacity: .92;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-aside__pill--family{
|
||||
border-color: rgba(127,127,127,0.38);
|
||||
font-weight: 800;
|
||||
}
|
||||
|
||||
.glossary-aside__disclosure{
|
||||
padding: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.glossary-aside__summary{
|
||||
list-style: none;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 12px;
|
||||
padding: 14px;
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.glossary-aside__summary::-webkit-details-marker{
|
||||
display: none;
|
||||
}
|
||||
|
||||
.glossary-aside__summary:hover{
|
||||
background: rgba(127,127,127,0.035);
|
||||
}
|
||||
|
||||
.glossary-aside__heading{
|
||||
margin: 0;
|
||||
font-size: 16px;
|
||||
font-weight: 850;
|
||||
line-height: 1.28;
|
||||
opacity: .97;
|
||||
}
|
||||
|
||||
.glossary-aside__chevron{
|
||||
flex: 0 0 auto;
|
||||
font-size: 14px;
|
||||
line-height: 1;
|
||||
opacity: .72;
|
||||
transform: rotate(0deg);
|
||||
transition: transform 160ms ease, opacity 160ms ease;
|
||||
}
|
||||
|
||||
.glossary-aside__disclosure[open] .glossary-aside__chevron{
|
||||
transform: rotate(180deg);
|
||||
opacity: .96;
|
||||
}
|
||||
|
||||
.glossary-aside__panel{
|
||||
padding: 0 14px 14px;
|
||||
}
|
||||
|
||||
.glossary-aside__subheading{
|
||||
margin: 13px 0 8px;
|
||||
font-size: 12.5px;
|
||||
font-weight: 800;
|
||||
line-height: 1.35;
|
||||
opacity: .82;
|
||||
text-transform: uppercase;
|
||||
letter-spacing: .04em;
|
||||
}
|
||||
|
||||
.glossary-aside__list{
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.glossary-aside__list li{
|
||||
margin: 7px 0;
|
||||
}
|
||||
|
||||
.glossary-aside__list a{
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
line-height: 1.4;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
.glossary-aside__list a.is-active{
|
||||
font-weight: 800;
|
||||
}
|
||||
|
||||
@media (max-width: 860px){
|
||||
.glossary-aside{
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.glossary-aside__block{
|
||||
border-radius: 14px;
|
||||
}
|
||||
|
||||
.glossary-aside__block--intro{
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.glossary-aside__back{
|
||||
margin-bottom: 8px;
|
||||
font-size: 13px;
|
||||
line-height: 1.28;
|
||||
}
|
||||
|
||||
.glossary-aside__title{
|
||||
font-size: 19px;
|
||||
line-height: 1.18;
|
||||
}
|
||||
|
||||
.glossary-aside__pills{
|
||||
gap: 6px;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.glossary-aside__pill{
|
||||
padding: 4px 9px;
|
||||
font-size: 12px;
|
||||
line-height: 1.26;
|
||||
}
|
||||
|
||||
.glossary-aside__summary{
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.glossary-aside__heading{
|
||||
font-size: 17px;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.glossary-aside__panel{
|
||||
padding: 0 12px 12px;
|
||||
}
|
||||
|
||||
.glossary-aside__subheading{
|
||||
margin: 10px 0 6px;
|
||||
font-size: 11.5px;
|
||||
line-height: 1.26;
|
||||
}
|
||||
|
||||
.glossary-aside__list li{
|
||||
margin: 5px 0;
|
||||
}
|
||||
|
||||
.glossary-aside__list a{
|
||||
font-size: 14px;
|
||||
line-height: 1.34;
|
||||
}
|
||||
|
||||
.glossary-aside__disclosure:not([open]) .glossary-aside__panel{
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 860px){
|
||||
.glossary-aside__disclosure{
|
||||
background: rgba(127,127,127,0.045);
|
||||
}
|
||||
|
||||
.glossary-aside__disclosure[open] .glossary-aside__summary{
|
||||
border-bottom: 1px solid rgba(127,127,127,0.12);
|
||||
}
|
||||
}
|
||||
|
||||
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
|
||||
.glossary-aside{
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.glossary-aside__block{
|
||||
border-radius: 12px;
|
||||
}
|
||||
|
||||
.glossary-aside__block--intro{
|
||||
padding: 10px 11px;
|
||||
}
|
||||
|
||||
.glossary-aside__back{
|
||||
margin-bottom: 6px;
|
||||
font-size: 12px;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.glossary-aside__title{
|
||||
font-size: 16px;
|
||||
line-height: 1.14;
|
||||
}
|
||||
|
||||
.glossary-aside__pills{
|
||||
gap: 5px;
|
||||
margin-top: 7px;
|
||||
}
|
||||
|
||||
.glossary-aside__pill{
|
||||
padding: 3px 8px;
|
||||
font-size: 11px;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.glossary-aside__summary{
|
||||
padding: 10px 11px;
|
||||
}
|
||||
|
||||
.glossary-aside__heading{
|
||||
font-size: 15px;
|
||||
line-height: 1.16;
|
||||
}
|
||||
|
||||
.glossary-aside__panel{
|
||||
padding: 0 11px 10px;
|
||||
}
|
||||
|
||||
.glossary-aside__subheading{
|
||||
margin: 8px 0 5px;
|
||||
font-size: 11px;
|
||||
line-height: 1.18;
|
||||
}
|
||||
|
||||
.glossary-aside__list li{
|
||||
margin: 4px 0;
|
||||
}
|
||||
|
||||
.glossary-aside__list a{
|
||||
font-size: 13px;
|
||||
line-height: 1.28;
|
||||
}
|
||||
}
|
||||
|
||||
@media (orientation: portrait) and (max-width: 1024px) and (pointer: coarse){
|
||||
.glossary-aside{
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.glossary-aside__disclosure{
|
||||
background: rgba(127,127,127,0.045);
|
||||
}
|
||||
|
||||
.glossary-aside__disclosure:not([open]) .glossary-aside__panel{
|
||||
display: none;
|
||||
}
|
||||
|
||||
.glossary-aside__summary{
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.glossary-aside__chevron{
|
||||
display: inline;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 861px) and (hover: hover) and (pointer: fine){
|
||||
.glossary-aside__summary{
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.glossary-aside__chevron{
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark){
|
||||
.glossary-aside__block,
|
||||
.glossary-aside__pill{
|
||||
background: rgba(255,255,255,0.04);
|
||||
}
|
||||
|
||||
.glossary-aside__summary:hover{
|
||||
background: rgba(255,255,255,0.03);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<script is:inline>
|
||||
(() => {
|
||||
const syncMobileDisclosure = () => {
|
||||
const mobile = window.matchMedia(
|
||||
"(max-width: 860px), ((orientation: portrait) and (max-width: 1024px) and (pointer: coarse))"
|
||||
).matches;
|
||||
const smallLandscape = window.matchMedia(
|
||||
"(orientation: landscape) and (max-width: 920px) and (max-height: 520px)"
|
||||
).matches;
|
||||
|
||||
const compact = mobile || smallLandscape;
|
||||
|
||||
document
|
||||
.querySelectorAll(".glossary-aside__disclosure")
|
||||
.forEach((el, index) => {
|
||||
if (!(el instanceof HTMLDetailsElement)) return;
|
||||
|
||||
if (compact) {
|
||||
if (!el.dataset.mobileInit) {
|
||||
el.open = false;
|
||||
el.dataset.mobileInit = "true";
|
||||
}
|
||||
} else {
|
||||
el.open = true;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
if (document.readyState === "loading") {
|
||||
document.addEventListener("DOMContentLoaded", syncMobileDisclosure, { once: true });
|
||||
} else {
|
||||
syncMobileDisclosure();
|
||||
}
|
||||
|
||||
window.addEventListener("resize", syncMobileDisclosure);
|
||||
window.addEventListener("pageshow", syncMobileDisclosure);
|
||||
})();
|
||||
</script>
|
||||
110
src/components/GlossaryCardGrid.astro
Normal file
@@ -0,0 +1,110 @@
|
||||
---
|
||||
import { hrefOfGlossaryEntry, type GlossaryEntry } from "../lib/glossary";
|
||||
|
||||
export interface Props {
|
||||
entries?: GlossaryEntry[];
|
||||
wide?: boolean;
|
||||
}
|
||||
|
||||
const {
|
||||
entries = [],
|
||||
wide = false,
|
||||
} = Astro.props;
|
||||
---
|
||||
|
||||
<div class="glossary-cards">
|
||||
{entries.map((entry) => (
|
||||
<a
|
||||
class:list={[
|
||||
"glossary-card",
|
||||
wide && "glossary-card--wide",
|
||||
]}
|
||||
href={hrefOfGlossaryEntry(entry)}
|
||||
>
|
||||
<strong>{entry.data.term}</strong>
|
||||
<span>{entry.data.definitionShort}</span>
|
||||
</a>
|
||||
))}
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.glossary-cards{
|
||||
display: grid;
|
||||
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
|
||||
gap: 12px;
|
||||
margin-top: 12px;
|
||||
}
|
||||
|
||||
.glossary-card{
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 7px;
|
||||
padding: 13px 14px;
|
||||
border: 1px solid var(--glossary-border);
|
||||
border-radius: 16px;
|
||||
background: var(--glossary-bg-soft);
|
||||
text-decoration: none;
|
||||
transition: transform 120ms ease, background 120ms ease, border-color 120ms ease;
|
||||
}
|
||||
|
||||
.glossary-card:hover{
|
||||
transform: translateY(-1px);
|
||||
background: var(--glossary-bg-soft-strong);
|
||||
border-color: rgba(0,217,255,0.16);
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.glossary-card--wide{
|
||||
grid-column: 1 / -1;
|
||||
}
|
||||
|
||||
.glossary-card strong{
|
||||
color: var(--glossary-accent);
|
||||
font-size: 1.02rem;
|
||||
line-height: 1.24;
|
||||
}
|
||||
|
||||
.glossary-card span{
|
||||
color: inherit;
|
||||
font-size: .98rem;
|
||||
line-height: 1.46;
|
||||
opacity: .94;
|
||||
}
|
||||
|
||||
@media (max-width: 760px){
|
||||
.glossary-cards{
|
||||
grid-template-columns: 1fr;
|
||||
gap: 10px;
|
||||
margin-top: 10px;
|
||||
}
|
||||
|
||||
.glossary-card{
|
||||
gap: 6px;
|
||||
padding: 12px 12px;
|
||||
border-radius: 14px;
|
||||
}
|
||||
|
||||
.glossary-card strong{
|
||||
font-size: .98rem;
|
||||
}
|
||||
|
||||
.glossary-card span{
|
||||
font-size: .94rem;
|
||||
line-height: 1.42;
|
||||
}
|
||||
|
||||
.glossary-card--wide{
|
||||
grid-column: auto;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark){
|
||||
.glossary-card{
|
||||
background: rgba(255,255,255,0.04);
|
||||
}
|
||||
|
||||
.glossary-card:hover{
|
||||
background: rgba(255,255,255,0.07);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
26
src/components/GlossaryEntryBody.astro
Normal file
@@ -0,0 +1,26 @@
|
||||
<div class="glossary-entry-body">
|
||||
<slot />
|
||||
</div>
|
||||
|
||||
<style>
|
||||
.glossary-entry-body{
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.glossary-entry-body > :last-child{
|
||||
margin-bottom: 0;
|
||||
}
|
||||
|
||||
@media (max-width: 760px){
|
||||
.glossary-entry-body{
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
}
|
||||
|
||||
:global(.glossary-entry-body h2),
|
||||
:global(.glossary-entry-body h3),
|
||||
:global(.glossary-relations h2),
|
||||
:global(.glossary-relations h3){
|
||||
scroll-margin-top: calc(var(--sticky-offset-px, 96px) + 18px);
|
||||
}
|
||||
</style>
|
||||
264
src/components/GlossaryEntryHero.astro
Normal file
@@ -0,0 +1,264 @@
|
||||
---
|
||||
interface Props {
|
||||
term: string;
|
||||
definitionShort: string;
|
||||
displayFamily: string;
|
||||
displayDomain?: string;
|
||||
displayLevel?: string;
|
||||
mobilizedAuthors?: string[];
|
||||
comparisonTraditions?: string[];
|
||||
}
|
||||
|
||||
const {
|
||||
term,
|
||||
definitionShort,
|
||||
displayFamily,
|
||||
displayDomain = "",
|
||||
displayLevel = "",
|
||||
mobilizedAuthors = [],
|
||||
comparisonTraditions = [],
|
||||
} = Astro.props;
|
||||
|
||||
const hasScholarlyMeta =
|
||||
mobilizedAuthors.length > 0 ||
|
||||
comparisonTraditions.length > 0;
|
||||
---
|
||||
|
||||
<header class="glossary-entry-head" data-ge-hero>
|
||||
<div class="glossary-entry-head__title">
|
||||
<h1>{term}</h1>
|
||||
</div>
|
||||
|
||||
<div class="glossary-entry-summary">
|
||||
<p class="glossary-entry-dek">
|
||||
<em>{definitionShort}</em>
|
||||
</p>
|
||||
|
||||
<div class="glossary-entry-signals" aria-label="Repères de lecture">
|
||||
<span class="glossary-pill glossary-pill--family">
|
||||
<strong>Famille :</strong> {displayFamily}
|
||||
</span>
|
||||
|
||||
{displayDomain && (
|
||||
<span class="glossary-pill">
|
||||
<strong>Domaine :</strong> {displayDomain}
|
||||
</span>
|
||||
)}
|
||||
|
||||
{displayLevel && (
|
||||
<span class="glossary-pill">
|
||||
<strong>Niveau :</strong> {displayLevel}
|
||||
</span>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{hasScholarlyMeta && (
|
||||
<div class="glossary-entry-meta">
|
||||
{mobilizedAuthors.length > 0 && (
|
||||
<p>
|
||||
<strong>Auteurs mobilisés :</strong> {mobilizedAuthors.join(" / ")}
|
||||
</p>
|
||||
)}
|
||||
|
||||
{comparisonTraditions.length > 0 && (
|
||||
<p>
|
||||
<strong>Traditions de comparaison :</strong> {comparisonTraditions.join(" / ")}
|
||||
</p>
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
</div>
|
||||
</header>
|
||||
|
||||
<style>
|
||||
.glossary-entry-head{
|
||||
position: sticky;
|
||||
top: var(--sticky-header-h, 0px);
|
||||
z-index: 11;
|
||||
margin: 0 0 22px;
|
||||
border: 1px solid rgba(127,127,127,0.18);
|
||||
border-radius: 24px;
|
||||
background:
|
||||
linear-gradient(180deg, rgba(0,0,0,0.60), rgba(0,0,0,0.92)),
|
||||
radial-gradient(900px 240px at 20% 0%, rgba(0,217,255,0.08), transparent 60%);
|
||||
backdrop-filter: blur(10px);
|
||||
-webkit-backdrop-filter: blur(10px);
|
||||
overflow: hidden;
|
||||
transition:
|
||||
border-radius 180ms ease,
|
||||
box-shadow 180ms ease,
|
||||
border-color 180ms ease;
|
||||
}
|
||||
|
||||
.glossary-entry-head__title{
|
||||
padding:
|
||||
var(--entry-hero-pad-top, 18px)
|
||||
var(--entry-hero-pad-x, 18px)
|
||||
calc(var(--entry-hero-pad-top, 18px) - 2px);
|
||||
transition: padding 180ms ease;
|
||||
}
|
||||
|
||||
.glossary-entry-head h1{
|
||||
margin: 0;
|
||||
font-size: var(--entry-hero-h1-size, clamp(2.2rem, 4vw, 3.15rem));
|
||||
line-height: 1.02;
|
||||
letter-spacing: -.04em;
|
||||
font-weight: 850;
|
||||
transition: font-size 180ms ease;
|
||||
}
|
||||
|
||||
.glossary-entry-summary{
|
||||
display: grid;
|
||||
gap: var(--entry-hero-gap, 14px);
|
||||
padding:
|
||||
calc(var(--entry-hero-pad-bottom, 18px) - 2px)
|
||||
var(--entry-hero-pad-x, 18px)
|
||||
var(--entry-hero-pad-bottom, 18px);
|
||||
border-top: 1px solid rgba(127,127,127,0.14);
|
||||
background: rgba(255,255,255,0.02);
|
||||
transition: gap 180ms ease, padding 180ms ease;
|
||||
}
|
||||
|
||||
.glossary-entry-dek{
|
||||
margin: 0;
|
||||
max-width: var(--entry-hero-dek-maxw, 76ch);
|
||||
font-size: var(--entry-hero-dek-size, 1.04rem);
|
||||
line-height: var(--entry-hero-dek-lh, 1.55);
|
||||
opacity: .94;
|
||||
transition:
|
||||
max-width 180ms ease,
|
||||
font-size 180ms ease,
|
||||
line-height 180ms ease;
|
||||
|
||||
display: -webkit-box;
|
||||
-webkit-box-orient: vertical;
|
||||
-webkit-line-clamp: 4;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.glossary-entry-signals{
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 7px;
|
||||
margin: 0;
|
||||
transition: gap 180ms ease;
|
||||
}
|
||||
|
||||
.glossary-pill{
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
gap: 5px;
|
||||
padding: 5px 9px;
|
||||
border: 1px solid rgba(127,127,127,0.24);
|
||||
border-radius: 999px;
|
||||
background: rgba(127,127,127,0.05);
|
||||
font-size: 12.5px;
|
||||
line-height: 1.28;
|
||||
transition:
|
||||
padding 180ms ease,
|
||||
font-size 180ms ease,
|
||||
background 120ms ease,
|
||||
border-color 120ms ease;
|
||||
}
|
||||
|
||||
.glossary-pill--family{
|
||||
border-color: rgba(127,127,127,0.36);
|
||||
font-weight: 700;
|
||||
}
|
||||
|
||||
.glossary-entry-meta{
|
||||
margin: 0;
|
||||
padding: 10px 12px;
|
||||
border: 1px solid rgba(127,127,127,0.18);
|
||||
border-radius: 12px;
|
||||
background: rgba(127,127,127,0.04);
|
||||
max-height: var(--entry-hero-meta-max-h, 12rem);
|
||||
opacity: var(--entry-hero-meta-opacity, 1);
|
||||
overflow: hidden;
|
||||
transition:
|
||||
max-height 180ms ease,
|
||||
opacity 140ms ease,
|
||||
padding 180ms ease,
|
||||
border-color 180ms ease;
|
||||
}
|
||||
|
||||
.glossary-entry-meta p{
|
||||
margin: 0;
|
||||
font-size: 13.5px;
|
||||
line-height: 1.45;
|
||||
}
|
||||
|
||||
.glossary-entry-meta p + p{
|
||||
margin-top: 6px;
|
||||
}
|
||||
|
||||
@media (max-width: 860px){
|
||||
.glossary-entry-head{
|
||||
position: static;
|
||||
border-radius: 18px;
|
||||
margin-bottom: 16px;
|
||||
}
|
||||
|
||||
.glossary-entry-head__title{
|
||||
padding: 12px 12px 10px;
|
||||
}
|
||||
|
||||
.glossary-entry-summary{
|
||||
gap: 9px;
|
||||
padding: 10px 12px 12px;
|
||||
}
|
||||
|
||||
.glossary-entry-dek{
|
||||
max-width: none;
|
||||
-webkit-line-clamp: 3;
|
||||
}
|
||||
|
||||
.glossary-entry-signals{
|
||||
gap: 6px;
|
||||
}
|
||||
|
||||
.glossary-pill{
|
||||
font-size: 12px;
|
||||
padding: 4px 8px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 520px){
|
||||
.glossary-entry-head{
|
||||
border-radius: 16px;
|
||||
margin-bottom: 14px;
|
||||
}
|
||||
|
||||
.glossary-entry-head__title{
|
||||
padding: 10px 10px 9px;
|
||||
}
|
||||
|
||||
.glossary-entry-summary{
|
||||
gap: 9px;
|
||||
padding: 9px 10px 11px;
|
||||
}
|
||||
|
||||
.glossary-entry-dek{
|
||||
display: block;
|
||||
max-width: none;
|
||||
overflow: visible;
|
||||
-webkit-line-clamp: unset;
|
||||
-webkit-box-orient: unset;
|
||||
}
|
||||
|
||||
.glossary-pill{
|
||||
font-size: 11.5px;
|
||||
padding: 3px 7px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark){
|
||||
.glossary-entry-meta{
|
||||
background: rgba(255,255,255,0.03);
|
||||
}
|
||||
|
||||
.glossary-pill{
|
||||
background: rgba(255,255,255,0.04);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
31
src/components/GlossaryEntryLegacyNote.astro
Normal file
@@ -0,0 +1,31 @@
|
||||
---
|
||||
interface Props {
|
||||
canonicalHref: string;
|
||||
term: string;
|
||||
}
|
||||
|
||||
const { canonicalHref, term } = Astro.props;
|
||||
---
|
||||
|
||||
<p class="glossary-legacy-note">
|
||||
Cette entrée a été renommée. L’intitulé canonique est :
|
||||
<a href={canonicalHref}>{term}</a>.
|
||||
</p>
|
||||
|
||||
<style>
|
||||
.glossary-legacy-note{
|
||||
padding: 10px 12px;
|
||||
border: 1px solid rgba(127,127,127,0.22);
|
||||
border-radius: 12px;
|
||||
background: rgba(127,127,127,0.05);
|
||||
font-size: 14px;
|
||||
line-height: 1.45;
|
||||
margin-bottom: 18px;
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark){
|
||||
.glossary-legacy-note{
|
||||
background: rgba(255,255,255,0.04);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
291
src/components/GlossaryEntryStickySync.astro
Normal file
@@ -0,0 +1,291 @@
|
||||
<script is:inline>
|
||||
(() => {
|
||||
const boot = () => {
|
||||
const body = document.body;
|
||||
const root = document.documentElement;
|
||||
const hero = document.querySelector("[data-ge-hero]");
|
||||
const follow = document.getElementById("reading-follow");
|
||||
const mqMobile = window.matchMedia("(max-width: 860px)");
|
||||
const mqSmallLandscape = window.matchMedia(
|
||||
"(orientation: landscape) and (max-width: 920px) and (max-height: 520px)"
|
||||
);
|
||||
|
||||
if (!body || !root || !hero || !follow) return;
|
||||
|
||||
const BODY_CLASS = "is-glossary-entry-page";
|
||||
const FOLLOW_ON_CLASS = "glossary-entry-follow-on";
|
||||
|
||||
let lastHeight = -1;
|
||||
let lastFollowOn = null;
|
||||
let raf = 0;
|
||||
|
||||
body.classList.add(BODY_CLASS);
|
||||
|
||||
const isCompactViewport = () =>
|
||||
mqMobile.matches || mqSmallLandscape.matches;
|
||||
|
||||
const heroHeight = () => {
|
||||
const rect = hero.getBoundingClientRect();
|
||||
return Math.max(0, Math.round(rect.height || 0));
|
||||
};
|
||||
|
||||
const neutralizeGlobalFollowIfCompact = () => {
|
||||
if (!isCompactViewport()) {
|
||||
follow.style.display = "";
|
||||
return;
|
||||
}
|
||||
|
||||
follow.classList.remove("is-on");
|
||||
follow.setAttribute("aria-hidden", "true");
|
||||
follow.style.display = "none";
|
||||
root.style.setProperty("--followbar-h", "0px");
|
||||
};
|
||||
|
||||
const computeFollowOn = () =>
|
||||
!isCompactViewport() &&
|
||||
follow.classList.contains("is-on") &&
|
||||
follow.style.display !== "none" &&
|
||||
follow.getAttribute("aria-hidden") !== "true";
|
||||
|
||||
const syncFollowState = () => {
|
||||
const on = computeFollowOn();
|
||||
|
||||
if (on) {
|
||||
if (lastFollowOn === true) return;
|
||||
lastFollowOn = true;
|
||||
body.classList.add(FOLLOW_ON_CLASS);
|
||||
return;
|
||||
}
|
||||
|
||||
if (lastFollowOn === false) return;
|
||||
lastFollowOn = false;
|
||||
body.classList.remove(FOLLOW_ON_CLASS);
|
||||
};
|
||||
|
||||
const stripLocalSticky = () => {
|
||||
document
|
||||
.querySelectorAll(
|
||||
".glossary-entry-body h2, .glossary-entry-body h3, .glossary-relations h2, .glossary-relations h3"
|
||||
)
|
||||
.forEach((el) => {
|
||||
el.classList.remove("is-sticky");
|
||||
el.removeAttribute("data-sticky-active");
|
||||
});
|
||||
};
|
||||
|
||||
const applyLocalStickyHeight = () => {
|
||||
const h = isCompactViewport() ? 0 : heroHeight();
|
||||
if (h === lastHeight) return;
|
||||
lastHeight = h;
|
||||
|
||||
if (typeof window.__archiSetLocalStickyHeight === "function") {
|
||||
window.__archiSetLocalStickyHeight(h);
|
||||
} else {
|
||||
root.style.setProperty("--glossary-local-sticky-h", `${h}px`);
|
||||
}
|
||||
};
|
||||
|
||||
const syncAll = () => {
|
||||
neutralizeGlobalFollowIfCompact();
|
||||
stripLocalSticky();
|
||||
syncFollowState();
|
||||
applyLocalStickyHeight();
|
||||
};
|
||||
|
||||
const schedule = () => {
|
||||
if (raf) return;
|
||||
raf = requestAnimationFrame(() => {
|
||||
raf = 0;
|
||||
syncAll();
|
||||
});
|
||||
};
|
||||
|
||||
const followObserver = new MutationObserver(schedule);
|
||||
followObserver.observe(follow, {
|
||||
attributes: true,
|
||||
attributeFilter: ["class", "style", "aria-hidden"],
|
||||
subtree: false,
|
||||
});
|
||||
|
||||
const heroResizeObserver =
|
||||
typeof ResizeObserver !== "undefined"
|
||||
? new ResizeObserver(schedule)
|
||||
: null;
|
||||
|
||||
heroResizeObserver?.observe(hero);
|
||||
|
||||
window.addEventListener("resize", schedule);
|
||||
window.addEventListener("pageshow", schedule);
|
||||
|
||||
if (document.fonts?.ready) {
|
||||
document.fonts.ready.then(schedule).catch(() => {});
|
||||
}
|
||||
|
||||
if (mqMobile.addEventListener) {
|
||||
mqMobile.addEventListener("change", schedule);
|
||||
} else if (mqMobile.addListener) {
|
||||
mqMobile.addListener(schedule);
|
||||
}
|
||||
|
||||
if (mqSmallLandscape.addEventListener) {
|
||||
mqSmallLandscape.addEventListener("change", schedule);
|
||||
} else if (mqSmallLandscape.addListener) {
|
||||
mqSmallLandscape.addListener(schedule);
|
||||
}
|
||||
|
||||
schedule();
|
||||
};
|
||||
|
||||
if (document.readyState === "loading") {
|
||||
document.addEventListener("DOMContentLoaded", boot, { once: true });
|
||||
} else {
|
||||
boot();
|
||||
}
|
||||
})();
|
||||
</script>
|
||||
|
||||
<style>
|
||||
:global(body.is-glossary-entry-page #reading-follow){
|
||||
z-index: 10;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-head h1){
|
||||
letter-spacing: -.03em;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-summary){
|
||||
gap: 8px;
|
||||
padding-top: 10px;
|
||||
padding-bottom: 8px;
|
||||
border-top-color: rgba(127,127,127,0.10);
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-dek){
|
||||
display: block;
|
||||
-webkit-line-clamp: unset;
|
||||
overflow: visible;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-signals){
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-pill){
|
||||
gap: 4px;
|
||||
padding: 3px 7px;
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-meta){
|
||||
padding: 0;
|
||||
border-color: transparent;
|
||||
max-height: 0;
|
||||
opacity: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on #reading-follow){
|
||||
transform: none;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on #reading-follow .reading-follow__inner){
|
||||
margin-top: 0;
|
||||
border-top-left-radius: 0;
|
||||
border-top-right-radius: 0;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page .glossary-entry-body h2.is-sticky),
|
||||
:global(body.is-glossary-entry-page .glossary-entry-body h2[data-sticky-active="true"]),
|
||||
:global(body.is-glossary-entry-page .glossary-entry-body h3.is-sticky),
|
||||
:global(body.is-glossary-entry-page .glossary-entry-body h3[data-sticky-active="true"]),
|
||||
:global(body.is-glossary-entry-page .glossary-relations h2.is-sticky),
|
||||
:global(body.is-glossary-entry-page .glossary-relations h2[data-sticky-active="true"]),
|
||||
:global(body.is-glossary-entry-page .glossary-relations h3.is-sticky),
|
||||
:global(body.is-glossary-entry-page .glossary-relations h3[data-sticky-active="true"]){
|
||||
position: static !important;
|
||||
top: auto !important;
|
||||
z-index: auto !important;
|
||||
padding: 0 !important;
|
||||
border: 0 !important;
|
||||
background: transparent !important;
|
||||
box-shadow: none !important;
|
||||
backdrop-filter: none !important;
|
||||
-webkit-backdrop-filter: none !important;
|
||||
}
|
||||
|
||||
@media (max-width: 860px){
|
||||
:global(body.is-glossary-entry-page #reading-follow),
|
||||
:global(body.is-glossary-entry-page #reading-follow .reading-follow__inner){
|
||||
display: none !important;
|
||||
opacity: 0 !important;
|
||||
pointer-events: none !important;
|
||||
visibility: hidden !important;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page){
|
||||
--followbar-h: 0px !important;
|
||||
--sticky-offset-px: calc(var(--sticky-header-h, 0px) + var(--page-gap, 12px)) !important;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-head){
|
||||
margin-bottom: 18px;
|
||||
border-radius: 20px;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-summary){
|
||||
gap: 6px;
|
||||
padding-top: 8px;
|
||||
padding-bottom: 8px;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-dek){
|
||||
display: block;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-signals){
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-pill){
|
||||
padding: 3px 6px;
|
||||
font-size: 10.5px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
|
||||
:global(body.is-glossary-entry-page #reading-follow),
|
||||
:global(body.is-glossary-entry-page #reading-follow .reading-follow__inner){
|
||||
display: none !important;
|
||||
opacity: 0 !important;
|
||||
pointer-events: none !important;
|
||||
visibility: hidden !important;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page){
|
||||
--followbar-h: 0px !important;
|
||||
--sticky-offset-px: calc(var(--sticky-header-h, 0px) + var(--page-gap, 12px)) !important;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-head){
|
||||
margin-bottom: 14px;
|
||||
border-radius: 16px;
|
||||
box-shadow: none;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-summary){
|
||||
gap: 5px;
|
||||
padding-top: 6px;
|
||||
padding-bottom: 6px;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-dek){
|
||||
display: none;
|
||||
}
|
||||
|
||||
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-pill){
|
||||
padding: 2px 6px;
|
||||
font-size: 10px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
382
src/components/GlossaryHomeAside.astro
Normal file
@@ -0,0 +1,382 @@
|
||||
---
|
||||
import {
|
||||
getFondamentaux,
|
||||
getGlossaryHomeStats,
|
||||
getGlossaryPortalLinks,
|
||||
hrefOfGlossaryEntry,
|
||||
} from "../lib/glossary";
|
||||
|
||||
const {
|
||||
allEntries = [],
|
||||
} = Astro.props;
|
||||
|
||||
const fondamentaux = getFondamentaux(allEntries);
|
||||
const portalLinks = getGlossaryPortalLinks();
|
||||
|
||||
const {
|
||||
totalEntries,
|
||||
paradigmesCount,
|
||||
doctrinesCount,
|
||||
metaRegimesCount,
|
||||
} = getGlossaryHomeStats(allEntries);
|
||||
---
|
||||
|
||||
<nav class="glossary-home-aside" aria-label="Navigation du portail du glossaire">
|
||||
<div class="glossary-home-aside__block glossary-home-aside__block--intro">
|
||||
<div class="glossary-home-aside__title">Glossaire archicratique</div>
|
||||
<div class="glossary-home-aside__meta">
|
||||
portail de lecture · cartographie conceptuelle
|
||||
</div>
|
||||
|
||||
<div class="glossary-home-aside__pills" aria-label="Repères de navigation">
|
||||
<span class="glossary-home-aside__pill">{totalEntries} entrées</span>
|
||||
<span class="glossary-home-aside__pill">{metaRegimesCount} méta-régimes</span>
|
||||
<span class="glossary-home-aside__pill">
|
||||
{doctrinesCount} doctrine{doctrinesCount > 1 ? "s" : ""} · {paradigmesCount} paradigme{paradigmesCount > 1 ? "s" : ""}
|
||||
</span>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<details class="glossary-home-aside__block glossary-home-aside__disclosure" open>
|
||||
<summary class="glossary-home-aside__summary">
|
||||
<span class="glossary-home-aside__heading">Parcours du glossaire</span>
|
||||
<span class="glossary-home-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-home-aside__panel">
|
||||
<ul class="glossary-home-aside__list">
|
||||
{portalLinks.map((item) => (
|
||||
<li><a href={item.href}>{item.label}</a></li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
|
||||
{fondamentaux.length > 0 && (
|
||||
<details class="glossary-home-aside__block glossary-home-aside__disclosure" open>
|
||||
<summary class="glossary-home-aside__summary">
|
||||
<span class="glossary-home-aside__heading">Noyau archicratique</span>
|
||||
<span class="glossary-home-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-home-aside__panel">
|
||||
<ul class="glossary-home-aside__list">
|
||||
{fondamentaux.map((entry) => (
|
||||
<li><a href={hrefOfGlossaryEntry(entry)}>{entry.data.term}</a></li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
</nav>
|
||||
|
||||
<style>
|
||||
.glossary-home-aside{
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 14px;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-home-aside__block{
|
||||
border: 1px solid rgba(127,127,127,0.22);
|
||||
border-radius: 16px;
|
||||
padding: 14px;
|
||||
background: rgba(127,127,127,0.05);
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-home-aside__block--intro{
|
||||
padding-top: 13px;
|
||||
padding-bottom: 13px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__title{
|
||||
font-size: 18px;
|
||||
font-weight: 850;
|
||||
letter-spacing: .1px;
|
||||
line-height: 1.22;
|
||||
}
|
||||
|
||||
.glossary-home-aside__meta{
|
||||
margin-top: 8px;
|
||||
font-size: 13px;
|
||||
line-height: 1.4;
|
||||
opacity: .8;
|
||||
}
|
||||
|
||||
.glossary-home-aside__pills{
|
||||
display: flex;
|
||||
flex-wrap: wrap;
|
||||
gap: 7px;
|
||||
margin-top: 11px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__pill{
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
padding: 5px 10px;
|
||||
border: 1px solid rgba(127,127,127,0.24);
|
||||
border-radius: 999px;
|
||||
background: rgba(127,127,127,0.04);
|
||||
font-size: 13px;
|
||||
line-height: 1.35;
|
||||
opacity: .92;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-home-aside__disclosure{
|
||||
padding: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.glossary-home-aside__summary{
|
||||
list-style: none;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 12px;
|
||||
padding: 14px;
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.glossary-home-aside__summary::-webkit-details-marker{
|
||||
display: none;
|
||||
}
|
||||
|
||||
.glossary-home-aside__summary:hover{
|
||||
background: rgba(127,127,127,0.035);
|
||||
}
|
||||
|
||||
.glossary-home-aside__heading{
|
||||
margin: 0;
|
||||
font-size: 16px;
|
||||
font-weight: 850;
|
||||
line-height: 1.28;
|
||||
opacity: .97;
|
||||
}
|
||||
|
||||
.glossary-home-aside__chevron{
|
||||
flex: 0 0 auto;
|
||||
font-size: 14px;
|
||||
line-height: 1;
|
||||
opacity: .72;
|
||||
transform: rotate(0deg);
|
||||
transition: transform 160ms ease, opacity 160ms ease;
|
||||
}
|
||||
|
||||
.glossary-home-aside__disclosure[open] .glossary-home-aside__chevron{
|
||||
transform: rotate(180deg);
|
||||
opacity: .96;
|
||||
}
|
||||
|
||||
.glossary-home-aside__panel{
|
||||
padding: 0 14px 14px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__list{
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.glossary-home-aside__list li{
|
||||
margin: 7px 0;
|
||||
}
|
||||
|
||||
.glossary-home-aside__list a{
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
line-height: 1.42;
|
||||
word-break: break-word;
|
||||
}
|
||||
|
||||
@media (max-width: 860px){
|
||||
.glossary-home-aside{
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__block{
|
||||
border-radius: 14px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__block--intro{
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__title{
|
||||
font-size: 19px;
|
||||
line-height: 1.18;
|
||||
}
|
||||
|
||||
.glossary-home-aside__meta{
|
||||
margin-top: 6px;
|
||||
font-size: 12px;
|
||||
line-height: 1.32;
|
||||
}
|
||||
|
||||
.glossary-home-aside__pills{
|
||||
gap: 6px;
|
||||
margin-top: 9px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__pill{
|
||||
padding: 4px 9px;
|
||||
font-size: 12px;
|
||||
line-height: 1.28;
|
||||
}
|
||||
|
||||
.glossary-home-aside__summary{
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__heading{
|
||||
font-size: 17px;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.glossary-home-aside__panel{
|
||||
padding: 0 12px 12px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__list li{
|
||||
margin: 5px 0;
|
||||
}
|
||||
|
||||
.glossary-home-aside__list a{
|
||||
font-size: 14px;
|
||||
line-height: 1.34;
|
||||
}
|
||||
|
||||
.glossary-home-aside__disclosure:not([open]) .glossary-home-aside__panel{
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 860px){
|
||||
.glossary-home-aside__disclosure{
|
||||
background: rgba(127,127,127,0.045);
|
||||
}
|
||||
|
||||
.glossary-home-aside__disclosure[open] .glossary-home-aside__summary{
|
||||
border-bottom: 1px solid rgba(127,127,127,0.12);
|
||||
}
|
||||
}
|
||||
|
||||
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
|
||||
.glossary-home-aside{
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__block{
|
||||
border-radius: 12px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__block--intro{
|
||||
padding: 10px 11px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__title{
|
||||
font-size: 16px;
|
||||
line-height: 1.14;
|
||||
}
|
||||
|
||||
.glossary-home-aside__meta{
|
||||
font-size: 11px;
|
||||
line-height: 1.26;
|
||||
margin-top: 5px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__pills{
|
||||
gap: 5px;
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__pill{
|
||||
padding: 3px 8px;
|
||||
font-size: 11px;
|
||||
line-height: 1.2;
|
||||
}
|
||||
|
||||
.glossary-home-aside__summary{
|
||||
padding: 10px 11px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__heading{
|
||||
font-size: 15px;
|
||||
line-height: 1.16;
|
||||
}
|
||||
|
||||
.glossary-home-aside__panel{
|
||||
padding: 0 11px 10px;
|
||||
}
|
||||
|
||||
.glossary-home-aside__list li{
|
||||
margin: 4px 0;
|
||||
}
|
||||
|
||||
.glossary-home-aside__list a{
|
||||
font-size: 13px;
|
||||
line-height: 1.28;
|
||||
}
|
||||
}
|
||||
|
||||
@media (min-width: 861px){
|
||||
.glossary-home-aside__summary{
|
||||
cursor: default;
|
||||
}
|
||||
|
||||
.glossary-home-aside__chevron{
|
||||
display: none;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark){
|
||||
.glossary-home-aside__block,
|
||||
.glossary-home-aside__pill{
|
||||
background: rgba(255,255,255,0.04);
|
||||
}
|
||||
|
||||
.glossary-home-aside__summary:hover{
|
||||
background: rgba(255,255,255,0.03);
|
||||
}
|
||||
}
|
||||
</style>
|
||||
|
||||
<script is:inline>
|
||||
(() => {
|
||||
const syncMobileDisclosure = () => {
|
||||
const mobile = window.matchMedia("(max-width: 860px)").matches;
|
||||
const smallLandscape = window.matchMedia(
|
||||
"(orientation: landscape) and (max-width: 920px) and (max-height: 520px)"
|
||||
).matches;
|
||||
|
||||
const compact = mobile || smallLandscape;
|
||||
|
||||
document
|
||||
.querySelectorAll(".glossary-home-aside__disclosure")
|
||||
.forEach((el, index) => {
|
||||
if (!(el instanceof HTMLDetailsElement)) return;
|
||||
|
||||
if (compact) {
|
||||
if (!el.dataset.mobileInit) {
|
||||
el.open = index === 0;
|
||||
el.dataset.mobileInit = "true";
|
||||
}
|
||||
} else {
|
||||
el.open = true;
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
if (document.readyState === "loading") {
|
||||
document.addEventListener("DOMContentLoaded", syncMobileDisclosure, { once: true });
|
||||
} else {
|
||||
syncMobileDisclosure();
|
||||
}
|
||||
|
||||
window.addEventListener("resize", syncMobileDisclosure);
|
||||
window.addEventListener("pageshow", syncMobileDisclosure);
|
||||
})();
|
||||
</script>
|
||||
460
src/components/GlossaryHomeHero.astro
Normal file
@@ -0,0 +1,460 @@
|
||||
---
|
||||
export interface Props {
|
||||
kicker?: string;
|
||||
title?: string;
|
||||
intro?: string;
|
||||
}
|
||||
|
||||
const {
|
||||
kicker = "Référentiel terminologique",
|
||||
title = "Glossaire archicratique",
|
||||
intro = "Ce glossaire n’est pas seulement un index de définitions. Il constitue une porte d’entrée dans la pensée archicratique : une cartographie raisonnée des concepts fondamentaux, des scènes, des dynamiques et des méta-régimes à partir desquels une société peut être décrite comme organisation de tensions et recherche de co-viabilité.",
|
||||
} = Astro.props;
|
||||
---
|
||||
|
||||
<header class="glossary-hero" id="glossary-hero">
|
||||
<p class="glossary-kicker">{kicker}</p>
|
||||
<h1>{title}</h1>
|
||||
|
||||
<div class="glossary-hero__collapsible">
|
||||
<p
|
||||
class="glossary-intro"
|
||||
id="glossary-hero-intro"
|
||||
aria-hidden="false"
|
||||
>
|
||||
{intro}
|
||||
</p>
|
||||
|
||||
<button
|
||||
class="glossary-hero__toggle"
|
||||
id="glossary-hero-toggle"
|
||||
type="button"
|
||||
aria-controls="glossary-hero-intro"
|
||||
aria-expanded="false"
|
||||
hidden
|
||||
>
|
||||
lire la suite
|
||||
</button>
|
||||
</div>
|
||||
|
||||
<h2
|
||||
class="glossary-hero-follow"
|
||||
id="glossary-hero-follow"
|
||||
aria-hidden="true"
|
||||
></h2>
|
||||
</header>
|
||||
|
||||
<style>
|
||||
.glossary-hero{
|
||||
position: sticky;
|
||||
top: var(--glossary-sticky-top);
|
||||
z-index: 12;
|
||||
margin-bottom: 28px;
|
||||
padding: 14px 16px 18px;
|
||||
border: 1px solid rgba(127,127,127,0.18);
|
||||
border-radius: 28px;
|
||||
background:
|
||||
linear-gradient(180deg, rgba(0,0,0,0.60), rgba(0,0,0,0.90)),
|
||||
radial-gradient(900px 240px at 20% 0%, rgba(0,217,255,0.08), transparent 60%);
|
||||
transition:
|
||||
padding 220ms cubic-bezier(.22,.8,.22,1),
|
||||
border-radius 220ms cubic-bezier(.22,.8,.22,1),
|
||||
background 300ms cubic-bezier(.22,.8,.22,1),
|
||||
border-color 300ms cubic-bezier(.22,.8,.22,1),
|
||||
box-shadow 300ms cubic-bezier(.22,.8,.22,1);
|
||||
backdrop-filter: blur(10px);
|
||||
-webkit-backdrop-filter: blur(10px);
|
||||
display: grid;
|
||||
row-gap: 12px;
|
||||
min-width: 0;
|
||||
overflow: clip;
|
||||
}
|
||||
|
||||
.glossary-kicker{
|
||||
margin: 0;
|
||||
font-size: 12px;
|
||||
letter-spacing: .12em;
|
||||
text-transform: uppercase;
|
||||
opacity: .72;
|
||||
}
|
||||
|
||||
.glossary-hero h1{
|
||||
margin: 0;
|
||||
font-size: clamp(2.2rem, 4vw, 3.15rem);
|
||||
line-height: 1.02;
|
||||
letter-spacing: -.04em;
|
||||
font-weight: 850;
|
||||
transition:
|
||||
font-size 220ms cubic-bezier(.22,.8,.22,1),
|
||||
line-height 220ms cubic-bezier(.22,.8,.22,1);
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-hero__collapsible{
|
||||
display: grid;
|
||||
row-gap: 6px;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-intro{
|
||||
margin: 0;
|
||||
max-width: 72ch;
|
||||
font-size: 1.05rem;
|
||||
line-height: 1.55;
|
||||
opacity: .94;
|
||||
min-width: 0;
|
||||
transition:
|
||||
font-size 220ms cubic-bezier(.22,.8,.22,1),
|
||||
line-height 220ms cubic-bezier(.22,.8,.22,1),
|
||||
max-height 220ms cubic-bezier(.22,.8,.22,1),
|
||||
opacity 180ms ease;
|
||||
}
|
||||
|
||||
:global(body[data-edition-key="glossaire"] .glossary-hero p#glossary-hero-intro){
|
||||
padding-right: 0;
|
||||
scroll-margin-top: 0;
|
||||
}
|
||||
|
||||
.glossary-hero__toggle{
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
width: fit-content;
|
||||
min-height: 30px;
|
||||
padding: 3px 0;
|
||||
border: 0;
|
||||
border-radius: 0;
|
||||
background: transparent;
|
||||
color: inherit;
|
||||
font-size: 12px;
|
||||
line-height: 1.2;
|
||||
letter-spacing: .01em;
|
||||
opacity: .72;
|
||||
cursor: pointer;
|
||||
text-decoration: underline;
|
||||
text-decoration-thickness: 1px;
|
||||
text-underline-offset: 2px;
|
||||
transition:
|
||||
opacity 120ms ease,
|
||||
transform 120ms ease;
|
||||
}
|
||||
|
||||
.glossary-hero__toggle:hover{
|
||||
opacity: .92;
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
|
||||
.glossary-hero__toggle:focus-visible{
|
||||
outline: 2px solid rgba(0,217,255,0.24);
|
||||
outline-offset: 4px;
|
||||
border-radius: 4px;
|
||||
}
|
||||
|
||||
.glossary-hero__toggle[hidden]{
|
||||
display: none !important;
|
||||
}
|
||||
|
||||
.glossary-hero-follow{
|
||||
margin: 2px 0 0;
|
||||
min-height: var(--glossary-follow-height);
|
||||
display: block;
|
||||
max-width: min(100%, 22ch);
|
||||
opacity: 0;
|
||||
transform: translateY(10px) scale(.985);
|
||||
filter: blur(6px);
|
||||
transition:
|
||||
opacity 220ms cubic-bezier(.22,1,.36,1),
|
||||
transform 320ms cubic-bezier(.22,1,.36,1),
|
||||
filter 320ms cubic-bezier(.22,1,.36,1);
|
||||
pointer-events: none;
|
||||
white-space: nowrap;
|
||||
overflow: hidden;
|
||||
text-overflow: ellipsis;
|
||||
will-change: opacity, transform, filter;
|
||||
min-width: 0;
|
||||
}
|
||||
|
||||
.glossary-hero-follow.is-visible{
|
||||
opacity: 1;
|
||||
transform: translateY(0) scale(1);
|
||||
filter: blur(0);
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on) .glossary-hero{
|
||||
padding: 12px 14px 14px;
|
||||
border-bottom-left-radius: 18px;
|
||||
border-bottom-right-radius: 18px;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on) .glossary-hero h1{
|
||||
font-size: clamp(1.7rem, 3.2vw, 2.2rem);
|
||||
line-height: 1.02;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
|
||||
font-size: .94rem;
|
||||
line-height: 1.34;
|
||||
max-height: 2.7em;
|
||||
overflow: hidden;
|
||||
display: -webkit-box;
|
||||
-webkit-box-orient: vertical;
|
||||
-webkit-line-clamp: 2;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-hero__toggle{
|
||||
display: inline-flex;
|
||||
}
|
||||
|
||||
@media (max-width: 760px){
|
||||
.glossary-hero{
|
||||
top: calc(var(--glossary-sticky-top) - 2px);
|
||||
padding: 12px 14px 16px;
|
||||
border-radius: 22px;
|
||||
row-gap: 10px;
|
||||
}
|
||||
|
||||
.glossary-hero h1{
|
||||
font-size: clamp(1.9rem, 8vw, 2.45rem);
|
||||
line-height: 1.02;
|
||||
letter-spacing: -.03em;
|
||||
}
|
||||
|
||||
.glossary-hero__collapsible{
|
||||
row-gap: 7px;
|
||||
}
|
||||
|
||||
.glossary-intro{
|
||||
max-width: 100%;
|
||||
width: 100%;
|
||||
font-size: .98rem;
|
||||
line-height: 1.44;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on) .glossary-hero{
|
||||
padding: 10px 13px 12px;
|
||||
border-radius: 18px;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on) .glossary-hero h1{
|
||||
font-size: clamp(1.45rem, 6vw, 1.8rem);
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
|
||||
max-width: 100%;
|
||||
width: 100%;
|
||||
font-size: .86rem;
|
||||
line-height: 1.24;
|
||||
max-height: 2.48em;
|
||||
-webkit-line-clamp: 2;
|
||||
opacity: .9;
|
||||
}
|
||||
|
||||
.glossary-hero__toggle{
|
||||
min-height: 28px;
|
||||
font-size: 11.5px;
|
||||
}
|
||||
|
||||
.glossary-hero-follow{
|
||||
max-width: min(100%, 24ch);
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 520px){
|
||||
.glossary-hero{
|
||||
padding: 11px 12px 14px;
|
||||
border-radius: 20px;
|
||||
}
|
||||
|
||||
.glossary-intro{
|
||||
max-width: 100%;
|
||||
width: 100%;
|
||||
font-size: .94rem;
|
||||
line-height: 1.4;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on) .glossary-hero{
|
||||
padding: 9px 11px 11px;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
|
||||
max-width: 100%;
|
||||
width: 100%;
|
||||
font-size: .84rem;
|
||||
line-height: 1.22;
|
||||
}
|
||||
}
|
||||
|
||||
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
|
||||
.glossary-hero{
|
||||
padding: 10px 12px 12px;
|
||||
border-radius: 16px;
|
||||
row-gap: 8px;
|
||||
}
|
||||
|
||||
.glossary-kicker{
|
||||
font-size: 10px;
|
||||
letter-spacing: .1em;
|
||||
}
|
||||
|
||||
.glossary-hero h1{
|
||||
font-size: clamp(1.35rem, 4vw, 1.8rem);
|
||||
line-height: 1;
|
||||
}
|
||||
|
||||
.glossary-intro{
|
||||
font-size: .84rem;
|
||||
line-height: 1.24;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on) .glossary-hero{
|
||||
padding: 9px 11px 10px;
|
||||
border-radius: 16px;
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on) .glossary-hero h1{
|
||||
font-size: clamp(1.1rem, 3vw, 1.35rem);
|
||||
}
|
||||
|
||||
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
|
||||
font-size: .8rem;
|
||||
line-height: 1.18;
|
||||
max-height: 2.36em;
|
||||
-webkit-line-clamp: 2;
|
||||
opacity: .88;
|
||||
}
|
||||
|
||||
.glossary-hero__toggle{
|
||||
min-height: 24px;
|
||||
font-size: 11px;
|
||||
}
|
||||
|
||||
.glossary-hero-follow{
|
||||
max-width: min(100%, 26ch);
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 860px){
|
||||
.glossary-hero{
|
||||
position: static !important;
|
||||
top: auto !important;
|
||||
z-index: auto !important;
|
||||
margin-bottom: 18px !important;
|
||||
}
|
||||
|
||||
.glossary-hero-follow{
|
||||
display: none !important;
|
||||
min-height: 0 !important;
|
||||
opacity: 0 !important;
|
||||
transform: none !important;
|
||||
filter: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
|
||||
.glossary-hero{
|
||||
position: static !important;
|
||||
top: auto !important;
|
||||
z-index: auto !important;
|
||||
margin-bottom: 14px !important;
|
||||
}
|
||||
|
||||
.glossary-hero-follow{
|
||||
display: none !important;
|
||||
min-height: 0 !important;
|
||||
opacity: 0 !important;
|
||||
transform: none !important;
|
||||
filter: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
/* Neutralisation mobile/tablette : le hero n'est plus sticky, donc aucun état condensé. */
|
||||
@media (max-width: 860px){
|
||||
.glossary-hero{
|
||||
position: static !important;
|
||||
top: auto !important;
|
||||
z-index: auto !important;
|
||||
margin-bottom: 18px !important;
|
||||
padding: 12px 14px 16px !important;
|
||||
border-radius: 22px !important;
|
||||
row-gap: 8px !important;
|
||||
}
|
||||
|
||||
.glossary-hero h1,
|
||||
:global(body.glossary-home-follow-on) .glossary-hero h1{
|
||||
font-size: clamp(2rem, 6.2vw, 2.75rem) !important;
|
||||
line-height: 1.04 !important;
|
||||
letter-spacing: -.035em !important;
|
||||
max-width: 100%;
|
||||
overflow-wrap: normal;
|
||||
word-break: normal;
|
||||
hyphens: none;
|
||||
text-wrap: balance;
|
||||
}
|
||||
|
||||
.glossary-intro,
|
||||
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
|
||||
max-width: 100% !important;
|
||||
width: 100% !important;
|
||||
max-height: none !important;
|
||||
overflow: visible !important;
|
||||
display: block !important;
|
||||
-webkit-line-clamp: unset !important;
|
||||
-webkit-box-orient: unset !important;
|
||||
font-size: .94rem !important;
|
||||
line-height: 1.4 !important;
|
||||
opacity: .94 !important;
|
||||
padding-right: 0 !important;
|
||||
scroll-margin-top: 0 !important;
|
||||
}
|
||||
|
||||
.glossary-hero__toggle,
|
||||
.glossary-hero-follow{
|
||||
display: none !important;
|
||||
}
|
||||
}
|
||||
|
||||
/* Mobile paysage compact : même logique, mais plus dense. */
|
||||
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
|
||||
.glossary-hero{
|
||||
padding: 8px 10px 9px !important;
|
||||
border-radius: 14px !important;
|
||||
row-gap: 5px !important;
|
||||
margin-bottom: 10px !important;
|
||||
}
|
||||
|
||||
.glossary-kicker{
|
||||
font-size: 9px !important;
|
||||
letter-spacing: .11em !important;
|
||||
}
|
||||
|
||||
.glossary-hero h1,
|
||||
:global(body.glossary-home-follow-on) .glossary-hero h1{
|
||||
font-size: clamp(1.55rem, 4.2vw, 1.9rem) !important;
|
||||
line-height: 1.03 !important;
|
||||
letter-spacing: -.025em !important;
|
||||
}
|
||||
|
||||
.glossary-intro,
|
||||
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
|
||||
font-size: .72rem !important;
|
||||
line-height: 1.18 !important;
|
||||
}
|
||||
}
|
||||
|
||||
/* Tablette large / iPad landscape : le follow reste lisible, jamais tronqué brutalement. */
|
||||
@media (min-width: 861px) and (max-width: 1240px){
|
||||
.glossary-hero h1{
|
||||
font-size: clamp(2.35rem, 4.2vw, 3.05rem) !important;
|
||||
line-height: 1.03 !important;
|
||||
}
|
||||
|
||||
.glossary-hero-follow{
|
||||
max-width: 100% !important;
|
||||
white-space: normal !important;
|
||||
overflow: visible !important;
|
||||
text-overflow: clip !important;
|
||||
font-size: clamp(1.55rem, 3.1vw, 2.05rem) !important;
|
||||
line-height: 1.08 !important;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
133
src/components/GlossaryHomeSection.astro
Normal file
@@ -0,0 +1,133 @@
|
||||
---
|
||||
export interface Props {
|
||||
id?: string;
|
||||
title: string;
|
||||
intro?: string;
|
||||
followSection?: string;
|
||||
ctaHref?: string;
|
||||
ctaLabel?: string;
|
||||
}
|
||||
|
||||
const {
|
||||
id,
|
||||
title,
|
||||
intro,
|
||||
followSection,
|
||||
ctaHref,
|
||||
ctaLabel,
|
||||
} = Astro.props;
|
||||
|
||||
const resolvedFollowSection = (followSection || title || "").trim();
|
||||
const showCta = Boolean(ctaHref && ctaLabel);
|
||||
---
|
||||
|
||||
<section id={id} class="glossary-section">
|
||||
<div class="glossary-section__head">
|
||||
<div>
|
||||
<h2 data-follow-section={resolvedFollowSection}>{title}</h2>
|
||||
|
||||
{intro && (
|
||||
<p class="glossary-intro">{intro}</p>
|
||||
)}
|
||||
</div>
|
||||
|
||||
{showCta && (
|
||||
<a class="glossary-cta" href={ctaHref}>
|
||||
{ctaLabel}
|
||||
</a>
|
||||
)}
|
||||
</div>
|
||||
|
||||
<slot />
|
||||
</section>
|
||||
|
||||
<style>
|
||||
.glossary-section{
|
||||
margin-top: 34px;
|
||||
scroll-margin-top: calc(var(--glossary-sticky-top) + 150px);
|
||||
}
|
||||
|
||||
.glossary-section__head{
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
align-items: start;
|
||||
gap: 14px;
|
||||
flex-wrap: wrap;
|
||||
margin-bottom: 12px;
|
||||
}
|
||||
|
||||
.glossary-section h2{
|
||||
margin: 0;
|
||||
font-size: clamp(1.8rem, 3vw, 2.55rem);
|
||||
line-height: 1.06;
|
||||
letter-spacing: -.03em;
|
||||
font-weight: 800;
|
||||
}
|
||||
|
||||
.glossary-intro{
|
||||
margin: 0;
|
||||
max-width: 72ch;
|
||||
font-size: 1rem;
|
||||
line-height: 1.52;
|
||||
opacity: .94;
|
||||
}
|
||||
|
||||
.glossary-section__head .glossary-intro{
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.glossary-cta{
|
||||
display: inline-flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
min-height: 38px;
|
||||
border: 1px solid var(--glossary-border-strong);
|
||||
border-radius: 999px;
|
||||
padding: 6px 13px;
|
||||
color: var(--glossary-accent);
|
||||
text-decoration: none;
|
||||
white-space: nowrap;
|
||||
transition: transform 120ms ease, background 120ms ease;
|
||||
}
|
||||
|
||||
.glossary-cta:hover{
|
||||
background: var(--glossary-bg-soft-strong);
|
||||
text-decoration: none;
|
||||
transform: translateY(-1px);
|
||||
}
|
||||
|
||||
@media (max-width: 760px){
|
||||
.glossary-section{
|
||||
margin-top: 24px;
|
||||
scroll-margin-top: calc(var(--glossary-sticky-top) + 110px);
|
||||
}
|
||||
|
||||
.glossary-section__head{
|
||||
flex-direction: column;
|
||||
align-items: stretch;
|
||||
gap: 10px;
|
||||
margin-bottom: 10px;
|
||||
}
|
||||
|
||||
.glossary-section h2{
|
||||
font-size: clamp(1.45rem, 6vw, 1.95rem);
|
||||
line-height: 1.05;
|
||||
}
|
||||
|
||||
.glossary-intro{
|
||||
font-size: .95rem;
|
||||
line-height: 1.42;
|
||||
}
|
||||
|
||||
.glossary-section__head .glossary-intro{
|
||||
margin-top: 6px;
|
||||
}
|
||||
|
||||
.glossary-cta{
|
||||
width: fit-content;
|
||||
min-height: 35px;
|
||||
padding: 5px 12px;
|
||||
font-size: .95rem;
|
||||
}
|
||||
}
|
||||
</style>
|
||||
286
src/components/GlossaryPortalAside.astro
Normal file
@@ -0,0 +1,286 @@
|
||||
---
|
||||
interface LinkItem {
|
||||
href: string;
|
||||
label: string;
|
||||
}
|
||||
|
||||
interface Props {
|
||||
ariaLabel: string;
|
||||
title: string;
|
||||
meta?: string;
|
||||
backHref?: string;
|
||||
backLabel?: string;
|
||||
pageItems?: LinkItem[];
|
||||
usefulLinks?: LinkItem[];
|
||||
}
|
||||
|
||||
const {
|
||||
ariaLabel,
|
||||
title,
|
||||
meta,
|
||||
backHref = "/glossaire/",
|
||||
backLabel = "← Retour au glossaire",
|
||||
pageItems = [],
|
||||
usefulLinks = [],
|
||||
} = Astro.props;
|
||||
---
|
||||
|
||||
<nav class="glossary-portal-aside" aria-label={ariaLabel}>
|
||||
<div class="glossary-portal-aside__block">
|
||||
<a class="glossary-portal-aside__back" href={backHref}>{backLabel}</a>
|
||||
<div class="glossary-portal-aside__title">{title}</div>
|
||||
{meta && <div class="glossary-portal-aside__meta">{meta}</div>}
|
||||
</div>
|
||||
|
||||
{pageItems.length > 0 && (
|
||||
<details class="glossary-portal-aside__block glossary-portal-aside__disclosure">
|
||||
<summary class="glossary-portal-aside__summary">
|
||||
<span class="glossary-portal-aside__heading">Dans cette page</span>
|
||||
<span class="glossary-portal-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-portal-aside__panel">
|
||||
<ul class="glossary-portal-aside__list">
|
||||
{pageItems.map((item) => (
|
||||
<li><a href={item.href}>{item.label}</a></li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
|
||||
{usefulLinks.length > 0 && (
|
||||
<details class="glossary-portal-aside__block glossary-portal-aside__disclosure">
|
||||
<summary class="glossary-portal-aside__summary">
|
||||
<span class="glossary-portal-aside__heading">Renvois utiles</span>
|
||||
<span class="glossary-portal-aside__chevron" aria-hidden="true">▾</span>
|
||||
</summary>
|
||||
|
||||
<div class="glossary-portal-aside__panel">
|
||||
<ul class="glossary-portal-aside__list">
|
||||
{usefulLinks.map((item) => (
|
||||
<li><a href={item.href}>{item.label}</a></li>
|
||||
))}
|
||||
</ul>
|
||||
</div>
|
||||
</details>
|
||||
)}
|
||||
</nav>
|
||||
|
||||
<style>
|
||||
.glossary-portal-aside{
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
gap: 14px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__block{
|
||||
border: 1px solid rgba(127,127,127,0.22);
|
||||
border-radius: 16px;
|
||||
padding: 14px;
|
||||
background: rgba(127,127,127,0.05);
|
||||
}
|
||||
|
||||
.glossary-portal-aside__back{
|
||||
display: inline-block;
|
||||
margin-bottom: 10px;
|
||||
font-size: 14px;
|
||||
font-weight: 700;
|
||||
line-height: 1.35;
|
||||
text-decoration: none;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__title{
|
||||
font-size: 16px;
|
||||
font-weight: 800;
|
||||
letter-spacing: .2px;
|
||||
line-height: 1.3;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__meta{
|
||||
margin-top: 8px;
|
||||
font-size: 13px;
|
||||
line-height: 1.4;
|
||||
opacity: .8;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__heading{
|
||||
margin: 0 0 11px;
|
||||
font-size: 14px;
|
||||
font-weight: 800;
|
||||
line-height: 1.35;
|
||||
opacity: .94;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__list{
|
||||
list-style: none;
|
||||
margin: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__list li{
|
||||
margin: 7px 0;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__list a{
|
||||
text-decoration: none;
|
||||
font-size: 14px;
|
||||
line-height: 1.4;
|
||||
}
|
||||
|
||||
@media (max-width: 980px){
|
||||
.glossary-portal-aside{
|
||||
gap: 12px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__block{
|
||||
padding: 12px;
|
||||
border-radius: 14px;
|
||||
}
|
||||
}
|
||||
|
||||
@media (max-width: 760px){
|
||||
.glossary-portal-aside{
|
||||
gap: 10px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__block{
|
||||
padding: 11px 12px;
|
||||
border-radius: 14px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__back{
|
||||
margin-bottom: 8px;
|
||||
font-size: 13px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__title{
|
||||
font-size: 15px;
|
||||
line-height: 1.22;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__meta{
|
||||
margin-top: 6px;
|
||||
font-size: 12px;
|
||||
line-height: 1.32;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__heading{
|
||||
margin-bottom: 8px;
|
||||
font-size: 13px;
|
||||
line-height: 1.22;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__list li{
|
||||
margin: 5px 0;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__list a{
|
||||
font-size: 12.5px;
|
||||
line-height: 1.3;
|
||||
}
|
||||
}
|
||||
|
||||
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
|
||||
.glossary-portal-aside{
|
||||
gap: 8px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__block{
|
||||
padding: 9px 10px;
|
||||
border-radius: 12px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__back{
|
||||
margin-bottom: 6px;
|
||||
font-size: 12px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__title{
|
||||
font-size: 14px;
|
||||
line-height: 1.18;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__meta{
|
||||
margin-top: 4px;
|
||||
font-size: 11px;
|
||||
line-height: 1.24;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__heading{
|
||||
margin-bottom: 6px;
|
||||
font-size: 12px;
|
||||
line-height: 1.18;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__list li{
|
||||
margin: 4px 0;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__list a{
|
||||
font-size: 11.5px;
|
||||
line-height: 1.22;
|
||||
}
|
||||
}
|
||||
|
||||
@media (prefers-color-scheme: dark){
|
||||
.glossary-portal-aside__block{
|
||||
background: rgba(255,255,255,0.04);
|
||||
}
|
||||
}
|
||||
|
||||
.glossary-portal-aside__disclosure{
|
||||
padding: 0;
|
||||
overflow: hidden;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__summary{
|
||||
list-style: none;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: space-between;
|
||||
gap: 12px;
|
||||
padding: 14px;
|
||||
cursor: pointer;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__summary::-webkit-details-marker{
|
||||
display: none;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__summary .glossary-portal-aside__heading{
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__disclosure:not([open]) .glossary-portal-aside__panel{
|
||||
display: none;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__chevron{
|
||||
flex: 0 0 auto;
|
||||
font-size: 14px;
|
||||
line-height: 1;
|
||||
opacity: .72;
|
||||
transition: transform 160ms ease, opacity 160ms ease;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__disclosure[open] .glossary-portal-aside__chevron{
|
||||
transform: rotate(180deg);
|
||||
opacity: .96;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__panel{
|
||||
padding: 0 14px 14px;
|
||||
}
|
||||
|
||||
@media (max-width: 980px){
|
||||
.glossary-portal-aside__summary{
|
||||
padding: 12px;
|
||||
}
|
||||
|
||||
.glossary-portal-aside__panel{
|
||||
padding: 0 12px 12px;
|
||||
}
|
||||
}
|
||||
</style>
|
||||