chore: add ops scripts + diagrams PNG renders
This commit is contained in:
Binary file not shown.
|
After Width: | Height: | Size: 221 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 187 KiB |
BIN
docs/diagrams/out/archicratie-web-edition-git-ci-workflow-v1.png
Normal file
BIN
docs/diagrams/out/archicratie-web-edition-git-ci-workflow-v1.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 395 KiB |
BIN
docs/diagrams/out/archicratie-web-edition-global-verbatim-v2.png
Normal file
BIN
docs/diagrams/out/archicratie-web-edition-global-verbatim-v2.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 284 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 304 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 360 KiB |
159
scripts/build-annotations-index.mjs
Normal file
159
scripts/build-annotations-index.mjs
Normal file
@@ -0,0 +1,159 @@
|
|||||||
|
// scripts/build-annotations-index.mjs
|
||||||
|
import fs from "node:fs/promises";
|
||||||
|
import path from "node:path";
|
||||||
|
import YAML from "yaml";
|
||||||
|
|
||||||
|
function parseArgs(argv) {
|
||||||
|
const out = {
|
||||||
|
inDir: "src/annotations",
|
||||||
|
outFile: "dist/annotations-index.json",
|
||||||
|
};
|
||||||
|
|
||||||
|
for (let i = 0; i < argv.length; i++) {
|
||||||
|
const a = argv[i];
|
||||||
|
|
||||||
|
if (a === "--in" && argv[i + 1]) out.inDir = argv[++i];
|
||||||
|
else if (a.startsWith("--in=")) out.inDir = a.slice("--in=".length);
|
||||||
|
|
||||||
|
if (a === "--out" && argv[i + 1]) out.outFile = argv[++i];
|
||||||
|
else if (a.startsWith("--out=")) out.outFile = a.slice("--out=".length);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function exists(p) {
|
||||||
|
try { await fs.access(p); return true; } catch { return false; }
|
||||||
|
}
|
||||||
|
|
||||||
|
async function walk(dir) {
|
||||||
|
const out = [];
|
||||||
|
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||||
|
for (const e of ents) {
|
||||||
|
const p = path.join(dir, e.name);
|
||||||
|
if (e.isDirectory()) out.push(...(await walk(p)));
|
||||||
|
else out.push(p);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
function inferPageKeyFromFile(inDirAbs, fileAbs) {
|
||||||
|
// src/annotations/<page>.yml -> "<page>"
|
||||||
|
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/");
|
||||||
|
return rel.replace(/\.(ya?ml|json)$/i, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
function assert(cond, msg) {
|
||||||
|
if (!cond) throw new Error(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isPlainObject(x) {
|
||||||
|
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizePageKey(s) {
|
||||||
|
// pas de / en tête/fin
|
||||||
|
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
function validateAndNormalizeDoc(doc, pageKey, fileRel) {
|
||||||
|
assert(isPlainObject(doc), `${fileRel}: document must be an object`);
|
||||||
|
assert(doc.schema === 1, `${fileRel}: schema must be 1`);
|
||||||
|
if (doc.page != null) {
|
||||||
|
assert(
|
||||||
|
normalizePageKey(doc.page) === pageKey,
|
||||||
|
`${fileRel}: page mismatch (page="${doc.page}" vs path="${pageKey}")`
|
||||||
|
);
|
||||||
|
}
|
||||||
|
assert(isPlainObject(doc.paras), `${fileRel}: missing object key "paras"`);
|
||||||
|
|
||||||
|
const parasOut = Object.create(null);
|
||||||
|
|
||||||
|
for (const [paraId, entry] of Object.entries(doc.paras)) {
|
||||||
|
assert(/^p-\d+-/i.test(paraId), `${fileRel}: invalid para id "${paraId}"`);
|
||||||
|
|
||||||
|
// entry peut être vide, mais doit être un objet si présent
|
||||||
|
assert(entry == null || isPlainObject(entry), `${fileRel}: paras.${paraId} must be an object`);
|
||||||
|
|
||||||
|
const e = entry ? { ...entry } : {};
|
||||||
|
|
||||||
|
// Sanity checks (non destructifs : on n’écrase pas, on vérifie juste les types)
|
||||||
|
if (e.refs != null) assert(Array.isArray(e.refs), `${fileRel}: paras.${paraId}.refs must be an array`);
|
||||||
|
if (e.authors != null) assert(Array.isArray(e.authors), `${fileRel}: paras.${paraId}.authors must be an array`);
|
||||||
|
if (e.quotes != null) assert(Array.isArray(e.quotes), `${fileRel}: paras.${paraId}.quotes must be an array`);
|
||||||
|
if (e.media != null) assert(Array.isArray(e.media), `${fileRel}: paras.${paraId}.media must be an array`);
|
||||||
|
if (e.comments_editorial != null) assert(Array.isArray(e.comments_editorial), `${fileRel}: paras.${paraId}.comments_editorial must be an array`);
|
||||||
|
|
||||||
|
parasOut[paraId] = e;
|
||||||
|
}
|
||||||
|
|
||||||
|
return parasOut;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function readDoc(fileAbs) {
|
||||||
|
const raw = await fs.readFile(fileAbs, "utf8");
|
||||||
|
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
||||||
|
return YAML.parse(raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const { inDir, outFile } = parseArgs(process.argv.slice(2));
|
||||||
|
const CWD = process.cwd();
|
||||||
|
|
||||||
|
const inDirAbs = path.isAbsolute(inDir) ? inDir : path.join(CWD, inDir);
|
||||||
|
const outAbs = path.isAbsolute(outFile) ? outFile : path.join(CWD, outFile);
|
||||||
|
|
||||||
|
// antifragile
|
||||||
|
if (!(await exists(inDirAbs))) {
|
||||||
|
console.log(`ℹ️ annotations-index: skip (input missing): ${inDir}`);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = (await walk(inDirAbs)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||||
|
if (!files.length) {
|
||||||
|
console.log(`ℹ️ annotations-index: skip (no .yml/.yaml/.json found in): ${inDir}`);
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const pages = Object.create(null);
|
||||||
|
let paraCount = 0;
|
||||||
|
|
||||||
|
for (const f of files) {
|
||||||
|
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||||
|
const pageKey = normalizePageKey(inferPageKeyFromFile(inDirAbs, f));
|
||||||
|
assert(pageKey, `${fileRel}: cannot infer page key`);
|
||||||
|
|
||||||
|
let doc;
|
||||||
|
try {
|
||||||
|
doc = await readDoc(f);
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error(`${fileRel}: parse failed: ${String(e?.message ?? e)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const paras = validateAndNormalizeDoc(doc, pageKey, fileRel);
|
||||||
|
|
||||||
|
// 1 fichier = 1 page (canon)
|
||||||
|
assert(!pages[pageKey], `${fileRel}: duplicate page "${pageKey}" (only one file per page)`);
|
||||||
|
pages[pageKey] = { paras };
|
||||||
|
paraCount += Object.keys(paras).length;
|
||||||
|
}
|
||||||
|
|
||||||
|
const out = {
|
||||||
|
schema: 1,
|
||||||
|
generatedAt: new Date().toISOString(),
|
||||||
|
pages,
|
||||||
|
stats: {
|
||||||
|
pages: Object.keys(pages).length,
|
||||||
|
paras: paraCount,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
await fs.mkdir(path.dirname(outAbs), { recursive: true });
|
||||||
|
await fs.writeFile(outAbs, JSON.stringify(out), "utf8");
|
||||||
|
|
||||||
|
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> ${path.relative(CWD, outAbs)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((e) => {
|
||||||
|
console.error("FAIL: build-annotations-index crashed:", e);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
97
scripts/check-annotations-media.mjs
Normal file
97
scripts/check-annotations-media.mjs
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
import fs from "node:fs/promises";
|
||||||
|
import path from "node:path";
|
||||||
|
import YAML from "yaml";
|
||||||
|
|
||||||
|
const CWD = process.cwd();
|
||||||
|
const ANNO_DIR = path.join(CWD, "src", "annotations");
|
||||||
|
const PUBLIC_DIR = path.join(CWD, "public");
|
||||||
|
|
||||||
|
async function exists(p) {
|
||||||
|
try { await fs.access(p); return true; } catch { return false; }
|
||||||
|
}
|
||||||
|
|
||||||
|
async function walk(dir) {
|
||||||
|
const out = [];
|
||||||
|
const ents = await fs.readdir(dir, { withFileTypes: true });
|
||||||
|
for (const e of ents) {
|
||||||
|
const p = path.join(dir, e.name);
|
||||||
|
if (e.isDirectory()) out.push(...(await walk(p)));
|
||||||
|
else out.push(p);
|
||||||
|
}
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseDoc(raw, fileAbs) {
|
||||||
|
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
|
||||||
|
return YAML.parse(raw);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isPlainObject(x) {
|
||||||
|
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
function toPublicPathFromUrl(urlPath) {
|
||||||
|
// "/media/..." -> "public/media/..."
|
||||||
|
const clean = String(urlPath || "").split("?")[0].split("#")[0];
|
||||||
|
if (!clean.startsWith("/media/")) return null;
|
||||||
|
return path.join(PUBLIC_DIR, clean.replace(/^\/+/, ""));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
if (!(await exists(ANNO_DIR))) {
|
||||||
|
console.log("✅ annotations-media: aucun src/annotations — rien à vérifier.");
|
||||||
|
process.exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
|
||||||
|
let checked = 0;
|
||||||
|
let missing = 0;
|
||||||
|
const notes = [];
|
||||||
|
|
||||||
|
for (const f of files) {
|
||||||
|
const rel = path.relative(CWD, f).replace(/\\/g, "/");
|
||||||
|
const raw = await fs.readFile(f, "utf8");
|
||||||
|
|
||||||
|
let doc;
|
||||||
|
try { doc = parseDoc(raw, f); }
|
||||||
|
catch (e) {
|
||||||
|
missing++;
|
||||||
|
notes.push(`- PARSE FAIL: ${rel} (${String(e?.message ?? e)})`);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!isPlainObject(doc) || doc.schema !== 1 || !isPlainObject(doc.paras)) continue;
|
||||||
|
|
||||||
|
for (const [paraId, entry] of Object.entries(doc.paras)) {
|
||||||
|
const media = entry?.media;
|
||||||
|
if (!Array.isArray(media)) continue;
|
||||||
|
|
||||||
|
for (const m of media) {
|
||||||
|
const src = String(m?.src || "");
|
||||||
|
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
|
||||||
|
|
||||||
|
checked++;
|
||||||
|
const p = toPublicPathFromUrl(src);
|
||||||
|
if (!p) continue;
|
||||||
|
|
||||||
|
if (!(await exists(p))) {
|
||||||
|
missing++;
|
||||||
|
notes.push(`- MISSING MEDIA: ${src} (from ${rel} para ${paraId})`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (missing > 0) {
|
||||||
|
console.error(`FAIL: annotations media missing (checked=${checked} missing=${missing})`);
|
||||||
|
for (const n of notes) console.error(n);
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
console.log(`✅ annotations-media OK: checked=${checked}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((e) => {
|
||||||
|
console.error("FAIL: check-annotations-media crashed:", e);
|
||||||
|
process.exit(1);
|
||||||
|
});
|
||||||
131
scripts/switch-archicratie.sh
Executable file
131
scripts/switch-archicratie.sh
Executable file
@@ -0,0 +1,131 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# switch-archicratie.sh — SAFE switch LIVE + STAGING (avec backups horodatés)
|
||||||
|
#
|
||||||
|
# Usage (NAS recommandé) :
|
||||||
|
# sudo bash -c 'LIVE_PORT=8081 /volume2/docker/archicratie-web/current/scripts/switch-archicratie.sh'
|
||||||
|
# sudo bash -c 'LIVE_PORT=8082 /volume2/docker/archicratie-web/current/scripts/switch-archicratie.sh'
|
||||||
|
#
|
||||||
|
# Usage (test local R&D, sans NAS) :
|
||||||
|
# D=/tmp/dynamic-test LIVE_PORT=8081 bash scripts/switch-archicratie.sh --dry-run
|
||||||
|
# D=/tmp/dynamic-test LIVE_PORT=8081 bash scripts/switch-archicratie.sh
|
||||||
|
|
||||||
|
usage() {
|
||||||
|
cat <<'EOF'
|
||||||
|
SAFE switch LIVE + STAGING (avec backups horodatés).
|
||||||
|
|
||||||
|
Variables / options :
|
||||||
|
LIVE_PORT=8081|8082 (obligatoire) port LIVE cible
|
||||||
|
D=/volume2/docker/edge/config/dynamic (optionnel) dossier des yml Traefik dynamiques
|
||||||
|
--dry-run n'écrit rien, affiche seulement ce qui serait fait
|
||||||
|
-h, --help aide
|
||||||
|
|
||||||
|
Exemples :
|
||||||
|
sudo bash -c 'LIVE_PORT=8082 /volume2/docker/archicratie-web/current/scripts/switch-archicratie.sh'
|
||||||
|
D=/tmp/dynamic-test LIVE_PORT=8081 bash scripts/switch-archicratie.sh --dry-run
|
||||||
|
EOF
|
||||||
|
}
|
||||||
|
|
||||||
|
DRY_RUN=0
|
||||||
|
for arg in "${@:-}"; do
|
||||||
|
case "$arg" in
|
||||||
|
--dry-run) DRY_RUN=1 ;;
|
||||||
|
-h|--help) usage; exit 0 ;;
|
||||||
|
*) ;;
|
||||||
|
esac
|
||||||
|
done
|
||||||
|
|
||||||
|
D="${D:-/volume2/docker/edge/config/dynamic}"
|
||||||
|
F_LIVE="$D/20-archicratie-backend.yml"
|
||||||
|
F_STAG="$D/21-archicratie-staging.yml"
|
||||||
|
|
||||||
|
LIVE_PORT="${LIVE_PORT:-}"
|
||||||
|
if [[ "$LIVE_PORT" != "8081" && "$LIVE_PORT" != "8082" ]]; then
|
||||||
|
echo "❌ LIVE_PORT doit valoir 8081 ou 8082."
|
||||||
|
usage
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [[ ! -f "$F_LIVE" || ! -f "$F_STAG" ]]; then
|
||||||
|
echo "❌ Fichiers manquants :"
|
||||||
|
echo " $F_LIVE"
|
||||||
|
echo " $F_STAG"
|
||||||
|
echo " (Astuce R&D locale : mets D=/tmp/dynamic-test et crée 20/21 dedans.)"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
OTHER_PORT="8081"
|
||||||
|
[[ "$LIVE_PORT" == "8081" ]] && OTHER_PORT="8082"
|
||||||
|
|
||||||
|
show_urls() {
|
||||||
|
local f="$1"
|
||||||
|
echo "— $f"
|
||||||
|
grep -nE '^\s*-\s*url:\s*".*"' "$f" || true
|
||||||
|
}
|
||||||
|
|
||||||
|
# Garde-fou : on attend au moins un "url:" dans chaque fichier
|
||||||
|
grep -qE '^\s*-\s*url:\s*"' "$F_LIVE" || { echo "❌ Format inattendu dans $F_LIVE (pas de - url: \")"; exit 1; }
|
||||||
|
grep -qE '^\s*-\s*url:\s*"' "$F_STAG" || { echo "❌ Format inattendu dans $F_STAG (pas de - url: \")"; exit 1; }
|
||||||
|
|
||||||
|
echo "Avant :"
|
||||||
|
show_urls "$F_LIVE"
|
||||||
|
show_urls "$F_STAG"
|
||||||
|
echo
|
||||||
|
|
||||||
|
echo "Plan : LIVE -> $LIVE_PORT ; STAGING -> $OTHER_PORT"
|
||||||
|
echo
|
||||||
|
|
||||||
|
if [[ "$DRY_RUN" == "1" ]]; then
|
||||||
|
echo "DRY-RUN : aucune écriture."
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
TS="$(date +%F-%H%M%S)"
|
||||||
|
cp -a "$F_LIVE" "$F_LIVE.bak.$TS"
|
||||||
|
cp -a "$F_STAG" "$F_STAG.bak.$TS"
|
||||||
|
|
||||||
|
# sed inplace portable (macOS vs Linux/DSM)
|
||||||
|
sed_inplace() {
|
||||||
|
local expr="$1" file="$2"
|
||||||
|
if [[ "$(uname -s)" == "Darwin" ]]; then
|
||||||
|
sed -i '' -e "$expr" "$file"
|
||||||
|
else
|
||||||
|
sed -i -e "$expr" "$file"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# Remplacement ciblé UNIQUEMENT sur la ligne - url: "http://127.0.0.1:808X"
|
||||||
|
sed_inplace \
|
||||||
|
"s#^\([[:space:]]*-[[:space:]]*url:[[:space:]]*\"http://127\\.0\\.0\\.1:\\)808[12]\\(\"[[:space:]]*\)#\\1${LIVE_PORT}\\2#g" \
|
||||||
|
"$F_LIVE"
|
||||||
|
|
||||||
|
sed_inplace \
|
||||||
|
"s#^\([[:space:]]*-[[:space:]]*url:[[:space:]]*\"http://127\\.0\\.0\\.1:\\)808[12]\\(\"[[:space:]]*\)#\\1${OTHER_PORT}\\2#g" \
|
||||||
|
"$F_STAG"
|
||||||
|
|
||||||
|
# Post-check : on confirme que les fichiers contiennent bien les ports attendus
|
||||||
|
grep -qE "http://127\.0\.0\.1:${LIVE_PORT}\"" "$F_LIVE" || {
|
||||||
|
echo "❌ Post-check FAIL : $F_LIVE ne contient pas http://127.0.0.1:${LIVE_PORT}"
|
||||||
|
echo "➡️ rollback backups : $F_LIVE.bak.$TS / $F_STAG.bak.$TS"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
grep -qE "http://127\.0\.0\.1:${OTHER_PORT}\"" "$F_STAG" || {
|
||||||
|
echo "❌ Post-check FAIL : $F_STAG ne contient pas http://127.0.0.1:${OTHER_PORT}"
|
||||||
|
echo "➡️ rollback backups : $F_LIVE.bak.$TS / $F_STAG.bak.$TS"
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "✅ OK. Backups :"
|
||||||
|
echo " - $F_LIVE.bak.$TS"
|
||||||
|
echo " - $F_STAG.bak.$TS"
|
||||||
|
echo
|
||||||
|
echo "Après :"
|
||||||
|
show_urls "$F_LIVE"
|
||||||
|
show_urls "$F_STAG"
|
||||||
|
echo
|
||||||
|
echo "Smoke tests :"
|
||||||
|
echo " curl -sS -I http://127.0.0.1:${LIVE_PORT}/ | head -n 12"
|
||||||
|
echo " curl -sS -I http://127.0.0.1:${OTHER_PORT}/ | head -n 12"
|
||||||
|
echo " curl -sS -I -H 'Host: archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ | head -n 20"
|
||||||
|
echo " curl -sS -I -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ | head -n 20"
|
||||||
Reference in New Issue
Block a user