613 lines
24 KiB
YAML
613 lines
24 KiB
YAML
name: Deploy staging+live (annotations)
|
||
|
||
on:
|
||
push:
|
||
branches: [main]
|
||
workflow_dispatch:
|
||
inputs:
|
||
force:
|
||
description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)"
|
||
required: false
|
||
default: "0"
|
||
|
||
env:
|
||
NODE_OPTIONS: --dns-result-order=ipv4first
|
||
DOCKER_API_VERSION: "1.43"
|
||
COMPOSE_VERSION: "2.29.7"
|
||
ASTRO_TELEMETRY_DISABLED: "1"
|
||
|
||
defaults:
|
||
run:
|
||
shell: bash
|
||
|
||
concurrency:
|
||
group: deploy-staging-live-main
|
||
cancel-in-progress: false
|
||
|
||
jobs:
|
||
deploy:
|
||
runs-on: nas-deploy
|
||
container:
|
||
image: localhost:5000/archicratie/nas-deploy-node22@sha256:fefa8bb307005cebec07796661ab25528dc319c33a8f1e480e1d66f90cd5cff6
|
||
|
||
steps:
|
||
- name: Tools sanity
|
||
run: |
|
||
set -euo pipefail
|
||
git --version
|
||
node --version
|
||
npm --version
|
||
|
||
- name: Checkout (push or workflow_dispatch, no external actions)
|
||
env:
|
||
EVENT_JSON: /var/run/act/workflow/event.json
|
||
run: |
|
||
set -euo pipefail
|
||
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||
|
||
node --input-type=module <<'NODE'
|
||
import fs from "node:fs";
|
||
|
||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||
const repoObj = ev?.repository || {};
|
||
|
||
const cloneUrl =
|
||
repoObj?.clone_url ||
|
||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||
|
||
const defaultBranch = repoObj?.default_branch || "main";
|
||
|
||
// Push-range (most reliable for change detection)
|
||
const before = String(ev?.before || "").trim();
|
||
const after =
|
||
(process.env.GITHUB_SHA && String(process.env.GITHUB_SHA).trim()) ||
|
||
String(ev?.after || ev?.sha || ev?.head_commit?.id || ev?.pull_request?.head?.sha || "").trim();
|
||
|
||
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
|
||
|
||
fs.writeFileSync("/tmp/deploy.env", [
|
||
`REPO_URL=${shq(cloneUrl)}`,
|
||
`DEFAULT_BRANCH=${shq(defaultBranch)}`,
|
||
`BEFORE=${shq(before)}`,
|
||
`AFTER=${shq(after)}`
|
||
].join("\n") + "\n");
|
||
NODE
|
||
|
||
source /tmp/deploy.env
|
||
echo "Repo URL: $REPO_URL"
|
||
echo "Default branch: $DEFAULT_BRANCH"
|
||
echo "BEFORE: ${BEFORE:-<empty>}"
|
||
echo "AFTER: ${AFTER:-<empty>}"
|
||
|
||
rm -rf .git
|
||
git init -q
|
||
git remote add origin "$REPO_URL"
|
||
|
||
# Checkout AFTER (or default branch if missing)
|
||
if [[ -n "${AFTER:-}" ]]; then
|
||
git fetch --depth 50 origin "$AFTER"
|
||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||
else
|
||
git fetch --depth 50 origin "$DEFAULT_BRANCH"
|
||
git -c advice.detachedHead=false checkout -q "origin/$DEFAULT_BRANCH"
|
||
AFTER="$(git rev-parse HEAD)"
|
||
echo "AFTER='$AFTER'" >> /tmp/deploy.env
|
||
echo "Resolved AFTER: $AFTER"
|
||
fi
|
||
|
||
git log -1 --oneline
|
||
|
||
- name: Gate — decide SKIP vs HOTPATCH vs FULL rebuild
|
||
env:
|
||
INPUT_FORCE: ${{ inputs.force }}
|
||
EVENT_JSON: /var/run/act/workflow/event.json
|
||
run: |
|
||
set -euo pipefail
|
||
source /tmp/deploy.env
|
||
|
||
FORCE="${INPUT_FORCE:-0}"
|
||
|
||
# Lire before/after du push depuis event.json (merge-proof)
|
||
node --input-type=module <<'NODE'
|
||
import fs from "node:fs";
|
||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||
const before = ev?.before || "";
|
||
const after = ev?.after || ev?.sha || "";
|
||
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
|
||
fs.writeFileSync("/tmp/gate.env", [
|
||
`EV_BEFORE=${shq(before)}`,
|
||
`EV_AFTER=${shq(after)}`
|
||
].join("\n") + "\n");
|
||
NODE
|
||
|
||
source /tmp/gate.env
|
||
|
||
BEFORE="${EV_BEFORE:-}"
|
||
AFTER="${EV_AFTER:-}"
|
||
if [[ -z "${AFTER:-}" ]]; then
|
||
AFTER="${SHA:-}"
|
||
fi
|
||
|
||
echo "Gate ctx: BEFORE=${BEFORE:-<empty>} AFTER=${AFTER:-<empty>} FORCE=${FORCE}"
|
||
|
||
# Produire une liste CHANGED fiable :
|
||
# - si BEFORE/AFTER valides -> git diff before..after
|
||
# - sinon fallback -> diff parent1..after ou show after
|
||
CHANGED=""
|
||
Z40="0000000000000000000000000000000000000000"
|
||
|
||
if [[ -n "${BEFORE:-}" && "${BEFORE}" != "${Z40}" ]] \
|
||
&& git cat-file -e "${BEFORE}^{commit}" 2>/dev/null \
|
||
&& git cat-file -e "${AFTER}^{commit}" 2>/dev/null; then
|
||
CHANGED="$(git diff --name-only "${BEFORE}" "${AFTER}" || true)"
|
||
else
|
||
P1="$(git rev-parse "${AFTER}^" 2>/dev/null || true)"
|
||
if [[ -n "${P1:-}" ]] && git cat-file -e "${P1}^{commit}" 2>/dev/null; then
|
||
CHANGED="$(git diff --name-only "${P1}" "${AFTER}" || true)"
|
||
else
|
||
CHANGED="$(git show --name-only --pretty="" "${AFTER}" | sed '/^$/d' || true)"
|
||
fi
|
||
fi
|
||
|
||
printf "%s\n" "${CHANGED}" > /tmp/changed.txt
|
||
|
||
echo "== changed files (first 200) =="
|
||
sed -n '1,200p' /tmp/changed.txt || true
|
||
|
||
# Flags
|
||
HAS_FULL=0
|
||
HAS_HOTPATCH=0
|
||
|
||
# HOTPATCH si annotations/media touchés
|
||
if grep -qE '^(src/annotations/|public/media/)' /tmp/changed.txt; then
|
||
HAS_HOTPATCH=1
|
||
fi
|
||
|
||
# FULL si build-impacting (robuste)
|
||
# 1) Tout src/ SAUF src/annotations/
|
||
if grep -qE '^src/' /tmp/changed.txt && grep -qEv '^src/annotations/' /tmp/changed.txt; then
|
||
HAS_FULL=1
|
||
fi
|
||
|
||
# 2) scripts/
|
||
if grep -qE '^scripts/' /tmp/changed.txt; then
|
||
HAS_FULL=1
|
||
fi
|
||
|
||
# 3) Tout public/ SAUF public/media/
|
||
if grep -qE '^public/' /tmp/changed.txt && grep -qEv '^public/media/' /tmp/changed.txt; then
|
||
HAS_FULL=1
|
||
fi
|
||
|
||
# 4) fichiers racine qui changent le build / l’image
|
||
if grep -qE '^(package\.json|package-lock\.json|astro\.config\.mjs|tsconfig\.json|\.npmrc|\.nvmrc|Dockerfile|docker-compose\.yml|nginx\.conf)$' /tmp/changed.txt; then
|
||
HAS_FULL=1
|
||
fi
|
||
|
||
echo "Gate flags: HAS_FULL=${HAS_FULL} HAS_HOTPATCH=${HAS_HOTPATCH}"
|
||
|
||
# Décision
|
||
if [[ "${FORCE}" == "1" ]]; then
|
||
GO=1
|
||
MODE="full"
|
||
echo "✅ force=1 -> MODE=full (rebuild+restart)"
|
||
elif [[ "${HAS_FULL}" == "1" ]]; then
|
||
GO=1
|
||
MODE="full"
|
||
echo "✅ build-impacting change -> MODE=full (rebuild+restart)"
|
||
elif [[ "${HAS_HOTPATCH}" == "1" ]]; then
|
||
GO=1
|
||
MODE="hotpatch"
|
||
echo "✅ annotations/media change -> MODE=hotpatch"
|
||
else
|
||
GO=0
|
||
MODE="skip"
|
||
echo "ℹ️ no relevant change -> skip deploy"
|
||
fi
|
||
|
||
echo "GO=${GO}" >> /tmp/deploy.env
|
||
echo "MODE='${MODE}'" >> /tmp/deploy.env
|
||
|
||
- name: Toolchain sanity + resolve COMPOSE_PROJECT_NAME
|
||
run: |
|
||
set -euo pipefail
|
||
source /tmp/deploy.env
|
||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||
|
||
# tools are prebaked in the image
|
||
git --version
|
||
docker version
|
||
docker compose version
|
||
python3 -c 'import yaml; print("PyYAML OK")'
|
||
|
||
# Reuse existing compose project name if containers already exist
|
||
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||
if [[ -z "${PROJ:-}" ]]; then
|
||
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||
fi
|
||
if [[ -z "${PROJ:-}" ]]; then PROJ="archicratie-web"; fi
|
||
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
|
||
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
|
||
|
||
# Assert target containers exist (hotpatch needs them)
|
||
for c in archicratie-web-blue archicratie-web-green; do
|
||
docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; }
|
||
done
|
||
|
||
- name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full
|
||
env:
|
||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||
run: |
|
||
set -euo pipefail
|
||
source /tmp/deploy.env
|
||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> vars not required"; exit 0; }
|
||
|
||
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
||
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
||
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
||
echo "✅ vars OK"
|
||
|
||
- name: Assert deploy files exist — only needed for MODE=full
|
||
run: |
|
||
set -euo pipefail
|
||
source /tmp/deploy.env
|
||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> files not required"; exit 0; }
|
||
|
||
test -f docker-compose.yml
|
||
test -f Dockerfile
|
||
test -f nginx.conf
|
||
echo "✅ deploy files OK"
|
||
|
||
- name: FULL — Build + deploy staging (blue) then warmup+smoke
|
||
env:
|
||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||
run: |
|
||
set -euo pipefail
|
||
source /tmp/deploy.env
|
||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||
|
||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||
|
||
wait_url() {
|
||
local url="$1"
|
||
local label="$2"
|
||
local tries="${3:-60}"
|
||
for i in $(seq 1 "$tries"); do
|
||
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||
echo "✅ $label OK ($url)"
|
||
return 0
|
||
fi
|
||
echo "… warmup $label ($i/$tries)"
|
||
sleep 1
|
||
done
|
||
echo "❌ timeout $label ($url)"
|
||
return 1
|
||
}
|
||
|
||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||
echo "TS='$TS'" >> /tmp/deploy.env
|
||
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
||
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
||
|
||
BUILD_TIME_RAW="$(TZ=Europe/Paris date '+%Y-%m-%dT%H:%M:%S%z')"
|
||
BUILD_TIME="${BUILD_TIME_RAW:0:${#BUILD_TIME_RAW}-2}:${BUILD_TIME_RAW:${#BUILD_TIME_RAW}-2}"
|
||
|
||
PUBLIC_OPS_ENV=staging \
|
||
PUBLIC_OPS_UPSTREAM=web_blue \
|
||
PUBLIC_BUILD_SHA="${AFTER}" \
|
||
PUBLIC_BUILD_TIME="${BUILD_TIME}" \
|
||
node scripts/write-ops-health.mjs
|
||
|
||
test -f public/__ops/health.json
|
||
echo "=== public/__ops/health.json (blue/staging) ==="
|
||
cat public/__ops/health.json
|
||
|
||
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
||
docker rm -f archicratie-web-blue || true
|
||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
||
|
||
# warmup endpoints
|
||
wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index"
|
||
wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index"
|
||
wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js"
|
||
|
||
wait_url "http://127.0.0.1:8081/__ops/health.json" "blue ops health"
|
||
|
||
curl -fsS --max-time 6 "http://127.0.0.1:8081/__ops/health.json" \
|
||
| python3 -c 'import sys, json; j=json.load(sys.stdin); print("env=", j.get("env")); print("upstream=", j.get("upstream")); print("buildSha=", j.get("buildSha")); print("builtAt=", j.get("builtAt"))'
|
||
|
||
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||
echo "canonical(blue)=$CANON"
|
||
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
||
echo "❌ staging canonical mismatch"
|
||
docker logs --tail 120 archicratie-web-blue || true
|
||
exit 3
|
||
}
|
||
|
||
echo "✅ staging OK"
|
||
|
||
- name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed
|
||
env:
|
||
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||
run: |
|
||
set -euo pipefail
|
||
source /tmp/deploy.env
|
||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||
[[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; }
|
||
|
||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
||
|
||
wait_url() {
|
||
local url="$1"
|
||
local label="$2"
|
||
local tries="${3:-60}"
|
||
for i in $(seq 1 "$tries"); do
|
||
if curl -fsS --max-time 4 "$url" >/dev/null; then
|
||
echo "✅ $label OK ($url)"
|
||
return 0
|
||
fi
|
||
echo "… warmup $label ($i/$tries)"
|
||
sleep 1
|
||
done
|
||
echo "❌ timeout $label ($url)"
|
||
return 1
|
||
}
|
||
|
||
rollback() {
|
||
echo "⚠️ rollback green -> previous image tag (best effort)"
|
||
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
||
docker rm -f archicratie-web-green || true
|
||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
||
}
|
||
|
||
BUILD_TIME_RAW="$(TZ=Europe/Paris date '+%Y-%m-%dT%H:%M:%S%z')"
|
||
BUILD_TIME="${BUILD_TIME_RAW:0:${#BUILD_TIME_RAW}-2}:${BUILD_TIME_RAW:${#BUILD_TIME_RAW}-2}"
|
||
|
||
PUBLIC_OPS_ENV=prod \
|
||
PUBLIC_OPS_UPSTREAM=web_green \
|
||
PUBLIC_BUILD_SHA="${AFTER}" \
|
||
PUBLIC_BUILD_TIME="${BUILD_TIME}" \
|
||
node scripts/write-ops-health.mjs
|
||
|
||
test -f public/__ops/health.json
|
||
echo "=== public/__ops/health.json (green/prod) ==="
|
||
cat public/__ops/health.json
|
||
|
||
# build/restart green
|
||
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
|
||
echo "❌ build green failed"; rollback; exit 4
|
||
fi
|
||
|
||
docker rm -f archicratie-web-green || true
|
||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
||
|
||
# warmup endpoints
|
||
if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi
|
||
if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi
|
||
if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi
|
||
|
||
if ! wait_url "http://127.0.0.1:8082/__ops/health.json" "green ops health"; then rollback; exit 4; fi
|
||
|
||
curl -fsS --max-time 6 "http://127.0.0.1:8082/__ops/health.json" \
|
||
| python3 -c 'import sys, json; j=json.load(sys.stdin); print("env=", j.get("env")); print("upstream=", j.get("upstream")); print("buildSha=", j.get("buildSha")); print("builtAt=", j.get("builtAt"))'
|
||
|
||
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||
echo "canonical(green)=$CANON"
|
||
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
||
echo "❌ live canonical mismatch"
|
||
docker logs --tail 120 archicratie-web-green || true
|
||
rollback
|
||
exit 4
|
||
}
|
||
|
||
echo "✅ live OK"
|
||
|
||
- name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green
|
||
run: |
|
||
set -euo pipefail
|
||
source /tmp/deploy.env
|
||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||
|
||
python3 - <<'PY'
|
||
import os, re, json, glob
|
||
import yaml
|
||
import datetime as dt
|
||
|
||
ROOT = os.getcwd()
|
||
ANNO_ROOT = os.path.join(ROOT, "src", "annotations")
|
||
|
||
def is_obj(x): return isinstance(x, dict)
|
||
def is_arr(x): return isinstance(x, list)
|
||
|
||
def iso_dt(x):
|
||
if isinstance(x, dt.datetime):
|
||
if x.tzinfo is None:
|
||
return x.isoformat()
|
||
return x.astimezone(dt.timezone.utc).isoformat().replace("+00:00","Z")
|
||
if isinstance(x, dt.date):
|
||
return x.isoformat()
|
||
return None
|
||
|
||
def normalize(x):
|
||
s = iso_dt(x)
|
||
if s is not None: return s
|
||
if isinstance(x, dict):
|
||
return {str(k): normalize(v) for k, v in x.items()}
|
||
if isinstance(x, list):
|
||
return [normalize(v) for v in x]
|
||
return x
|
||
|
||
def key_media(it): return str((it or {}).get("src",""))
|
||
def key_ref(it):
|
||
it = it or {}
|
||
return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))])
|
||
def key_comment(it): return str((it or {}).get("text","")).strip()
|
||
|
||
def dedup_extend(dst_list, src_list, key_fn):
|
||
seen = set(); out = []
|
||
for x in (dst_list or []):
|
||
x = normalize(x); k = key_fn(x)
|
||
if k and k not in seen: seen.add(k); out.append(x)
|
||
for x in (src_list or []):
|
||
x = normalize(x); k = key_fn(x)
|
||
if k and k not in seen: seen.add(k); out.append(x)
|
||
return out
|
||
|
||
def deep_merge(dst, src):
|
||
src = normalize(src)
|
||
for k, v in (src or {}).items():
|
||
if k in ("media","refs","comments_editorial") and is_arr(v):
|
||
if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media)
|
||
elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
|
||
else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
|
||
continue
|
||
|
||
if is_obj(v):
|
||
if not is_obj(dst.get(k)): dst[k] = {}
|
||
deep_merge(dst[k], v)
|
||
continue
|
||
|
||
if is_arr(v):
|
||
cur = dst.get(k, [])
|
||
if not is_arr(cur): cur = []
|
||
seen = set(); out = []
|
||
for x in cur:
|
||
x = normalize(x)
|
||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||
if s not in seen: seen.add(s); out.append(x)
|
||
for x in v:
|
||
x = normalize(x)
|
||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||
if s not in seen: seen.add(s); out.append(x)
|
||
dst[k] = out
|
||
continue
|
||
|
||
v = normalize(v)
|
||
if k not in dst or dst.get(k) in (None, ""):
|
||
dst[k] = v
|
||
|
||
def para_num(pid):
|
||
m = re.match(r"^p-(\d+)-", str(pid))
|
||
return int(m.group(1)) if m else 10**9
|
||
|
||
def sort_lists(entry):
|
||
for k in ("media","refs","comments_editorial"):
|
||
arr = entry.get(k)
|
||
if not is_arr(arr): continue
|
||
def ts(x):
|
||
x = normalize(x)
|
||
try:
|
||
s = str((x or {}).get("ts",""))
|
||
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
|
||
except Exception:
|
||
return 0
|
||
arr = [normalize(x) for x in arr]
|
||
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
||
entry[k] = arr
|
||
|
||
if not os.path.isdir(ANNO_ROOT):
|
||
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
|
||
|
||
pages = {}
|
||
errors = []
|
||
|
||
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
|
||
for fp in files:
|
||
try:
|
||
with open(fp, "r", encoding="utf-8") as f:
|
||
doc = yaml.safe_load(f) or {}
|
||
doc = normalize(doc)
|
||
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
||
continue
|
||
|
||
page = str(doc.get("page","")).strip().strip("/")
|
||
paras = doc.get("paras") or {}
|
||
if not page or not isinstance(paras, dict):
|
||
continue
|
||
|
||
pg = pages.setdefault(page, {"paras": {}})
|
||
for pid, entry in paras.items():
|
||
pid = str(pid)
|
||
if pid not in pg["paras"] or not isinstance(pg["paras"].get(pid), dict):
|
||
pg["paras"][pid] = {}
|
||
if isinstance(entry, dict):
|
||
deep_merge(pg["paras"][pid], entry)
|
||
sort_lists(pg["paras"][pid])
|
||
|
||
except Exception as e:
|
||
errors.append({"file": os.path.relpath(fp, ROOT), "error": str(e)})
|
||
|
||
for page, obj in pages.items():
|
||
keys = list((obj.get("paras") or {}).keys())
|
||
keys.sort(key=lambda k: (para_num(k), k))
|
||
obj["paras"] = {k: obj["paras"][k] for k in keys}
|
||
|
||
out = {
|
||
"schema": 1,
|
||
"generatedAt": dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc).isoformat().replace("+00:00","Z"),
|
||
"pages": pages,
|
||
"stats": {
|
||
"pages": len(pages),
|
||
"paras": sum(len(v.get("paras") or {}) for v in pages.values()),
|
||
"errors": len(errors),
|
||
},
|
||
"errors": errors,
|
||
}
|
||
|
||
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
||
json.dump(out, f, ensure_ascii=False)
|
||
|
||
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
|
||
PY
|
||
|
||
# patch JSON into running containers
|
||
for c in archicratie-web-blue archicratie-web-green; do
|
||
echo "== patch annotations-index.json into $c =="
|
||
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
|
||
done
|
||
|
||
# copy changed media files into containers (so new media appears without rebuild)
|
||
if [[ -s /tmp/changed.txt ]]; then
|
||
while IFS= read -r f; do
|
||
[[ -n "$f" ]] || continue
|
||
if [[ "$f" == public/media/* ]]; then
|
||
dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/...
|
||
for c in archicratie-web-blue archicratie-web-green; do
|
||
echo "== copy media into $c: $f -> $dest =="
|
||
docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\""
|
||
docker cp "$f" "$c:$dest"
|
||
done
|
||
fi
|
||
done < /tmp/changed.txt
|
||
fi
|
||
|
||
# smoke after patch
|
||
for p in 8081 8082; do
|
||
echo "== smoke annotations-index on $p =="
|
||
curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \
|
||
| python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))'
|
||
done
|
||
|
||
echo "✅ hotpatch done"
|
||
|
||
- name: Debug on failure (containers status/logs)
|
||
if: ${{ failure() }}
|
||
run: |
|
||
set -euo pipefail
|
||
echo "== docker ps =="
|
||
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true
|
||
for c in archicratie-web-blue archicratie-web-green; do
|
||
echo "== logs $c (tail 200) =="
|
||
docker logs --tail 200 "$c" || true
|
||
done |