diff --git a/.gitea/workflows/deploy-staging-live.yml b/.gitea/workflows/deploy-staging-live.yml index a75995d..f86f414 100644 --- a/.gitea/workflows/deploy-staging-live.yml +++ b/.gitea/workflows/deploy-staging-live.yml @@ -6,7 +6,7 @@ on: workflow_dispatch: inputs: force: - description: "Force deploy even if gate would skip (1=yes, 0=no)" + description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)" required: false default: "0" @@ -14,6 +14,7 @@ env: NODE_OPTIONS: --dns-result-order=ipv4first DOCKER_API_VERSION: "1.43" COMPOSE_VERSION: "2.29.7" + ASTRO_TELEMETRY_DISABLED: "1" defaults: run: @@ -92,7 +93,7 @@ jobs: git log -1 --oneline - - name: Gate — auto deploy only on annotations/media changes + - name: Gate — decide HOTPATCH vs FULL rebuild env: INPUT_FORCE: ${{ inputs.force }} run: | @@ -100,21 +101,29 @@ jobs: source /tmp/deploy.env FORCE="${INPUT_FORCE:-0}" + + # liste fichiers touchés (utile pour copier les médias) + CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)" + printf "%s\n" "$CHANGED" > /tmp/changed.txt + + echo "== changed files ==" + echo "$CHANGED" | sed -n '1,260p' + if [[ "$FORCE" == "1" ]]; then - echo "✅ force=1 -> bypass gate -> deploy allowed" - echo "GO=1" >> /tmp/deploy.env + echo "GO=1" >> /tmp/deploy.env + echo "MODE='full'" >> /tmp/deploy.env + echo "✅ force=1 -> MODE=full (rebuild+restart)" exit 0 fi - CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)" - echo "== changed files ==" - echo "$CHANGED" | sed -n '1,240p' - + # Auto mode: uniquement annotations/media => hotpatch only if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then echo "GO=1" >> /tmp/deploy.env - echo "✅ deploy allowed (annotations/media change detected)" + echo "MODE='hotpatch'" >> /tmp/deploy.env + echo "✅ annotations/media change -> MODE=hotpatch" else echo "GO=0" >> /tmp/deploy.env + echo "MODE='skip'" >> /tmp/deploy.env echo "ℹ️ no annotations/media change -> skip deploy" fi @@ -138,7 +147,7 @@ jobs: docker compose version python3 --version - # 🔥 KEY FIX: reuse existing compose project name if containers already exist + # Reuse existing compose project name if containers already exist PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)" if [[ -z "${PROJ:-}" ]]; then PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)" @@ -147,7 +156,12 @@ jobs: echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ" - - name: Assert required vars (PUBLIC_GITEA_*) + # Assert target containers exist (hotpatch needs them) + for c in archicratie-web-blue archicratie-web-green; do + docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; } + done + + - name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full env: PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }} PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }} @@ -156,24 +170,26 @@ jobs: set -euo pipefail source /tmp/deploy.env [[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; } + [[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> vars not required"; exit 0; } test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; } test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; } test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; } echo "✅ vars OK" - - name: Assert deploy files exist + - name: Assert deploy files exist — only needed for MODE=full run: | set -euo pipefail source /tmp/deploy.env [[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; } + [[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ hotpatch mode -> files not required"; exit 0; } test -f docker-compose.yml test -f Dockerfile test -f nginx.conf echo "✅ deploy files OK" - - name: Build + deploy staging (blue) then smoke + - name: FULL — Build + deploy staging (blue) then warmup+smoke env: PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }} PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }} @@ -182,31 +198,51 @@ jobs: set -euo pipefail source /tmp/deploy.env [[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; } + [[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; } + PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}" + wait_url() { + local url="$1" + local label="$2" + local tries="${3:-60}" + for i in $(seq 1 "$tries"); do + if curl -fsS --max-time 4 "$url" >/dev/null; then + echo "✅ $label OK ($url)" + return 0 + fi + echo "… warmup $label ($i/$tries)" + sleep 1 + done + echo "❌ timeout $label ($url)" + return 1 + } + TS="$(date -u +%Y%m%d-%H%M%S)" echo "TS='$TS'" >> /tmp/deploy.env docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true docker compose -p "$PROJ" -f docker-compose.yml build web_blue - docker rm -f archicratie-web-blue || true docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue - curl -fsS "http://127.0.0.1:8081/para-index.json" >/dev/null - curl -fsS "http://127.0.0.1:8081/annotations-index.json" >/dev/null - curl -fsS "http://127.0.0.1:8081/pagefind/pagefind.js" >/dev/null + # warmup endpoints + wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index" + wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index" + wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js" - CANON="$(curl -fsS "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)" + CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)" echo "canonical(blue)=$CANON" echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || { - echo "❌ staging canonical mismatch"; exit 3; + echo "❌ staging canonical mismatch" + docker logs --tail 120 archicratie-web-blue || true + exit 3 } echo "✅ staging OK" - - name: Build + deploy live (green) then smoke + rollback if needed + - name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed env: PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }} PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }} @@ -215,9 +251,27 @@ jobs: set -euo pipefail source /tmp/deploy.env [[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; } + [[ "${MODE:-hotpatch}" == "full" ]] || { echo "ℹ️ MODE=$MODE -> skip full rebuild"; exit 0; } + PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}" TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}" + wait_url() { + local url="$1" + local label="$2" + local tries="${3:-60}" + for i in $(seq 1 "$tries"); do + if curl -fsS --max-time 4 "$url" >/dev/null; then + echo "✅ $label OK ($url)" + return 0 + fi + echo "… warmup $label ($i/$tries)" + sleep 1 + done + echo "❌ timeout $label ($url)" + return 1 + } + rollback() { echo "⚠️ rollback green -> previous image tag (best effort)" docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true @@ -225,33 +279,38 @@ jobs: docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true } - set +e - docker compose -p "$PROJ" -f docker-compose.yml build web_green + # build/restart green + if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then + echo "❌ build green failed"; rollback; exit 4 + fi docker rm -f archicratie-web-green || true docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green - curl -fsS "http://127.0.0.1:8082/para-index.json" >/dev/null - curl -fsS "http://127.0.0.1:8082/annotations-index.json" >/dev/null - curl -fsS "http://127.0.0.1:8082/pagefind/pagefind.js" >/dev/null + # warmup endpoints + if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi + if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi + if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi - CANON="$(curl -fsS "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)" + CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)" echo "canonical(green)=$CANON" echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || { - echo "❌ live canonical mismatch"; rollback; exit 4; + echo "❌ live canonical mismatch" + docker logs --tail 120 archicratie-web-green || true + rollback + exit 4 } echo "✅ live OK" - set -e - - name: Hotpatch annotations-index.json (deep merge shards) into blue+green + - name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green run: | set -euo pipefail source /tmp/deploy.env [[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; } python3 - <<'PY' - import os, re, json, glob, datetime + import os, re, json, glob import yaml import datetime as dt @@ -261,7 +320,6 @@ jobs: def is_obj(x): return isinstance(x, dict) def is_arr(x): return isinstance(x, list) - # --- KEY FIX: YAML timestamps -> datetime; JSON can't dump them def iso_dt(x): if isinstance(x, dt.datetime): if x.tzinfo is None: @@ -273,81 +331,58 @@ jobs: def normalize(x): s = iso_dt(x) - if s is not None: - return s + if s is not None: return s if isinstance(x, dict): return {str(k): normalize(v) for k, v in x.items()} if isinstance(x, list): return [normalize(v) for v in x] return x - def key_media(it): - return str((it or {}).get("src","")) - + def key_media(it): return str((it or {}).get("src","")) def key_ref(it): it = it or {} - return "||".join([ - str(it.get("url","")), - str(it.get("label","")), - str(it.get("kind","")), - str(it.get("citation","")), - ]) - - def key_comment(it): - return str((it or {}).get("text","")).strip() + return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))]) + def key_comment(it): return str((it or {}).get("text","")).strip() def dedup_extend(dst_list, src_list, key_fn): - seen = set() - out = [] + seen = set(); out = [] for x in (dst_list or []): - x = normalize(x) - k = key_fn(x) - if k and k not in seen: - seen.add(k); out.append(x) + x = normalize(x); k = key_fn(x) + if k and k not in seen: seen.add(k); out.append(x) for x in (src_list or []): - x = normalize(x) - k = key_fn(x) - if k and k not in seen: - seen.add(k); out.append(x) + x = normalize(x); k = key_fn(x) + if k and k not in seen: seen.add(k); out.append(x) return out def deep_merge(dst, src): src = normalize(src) for k, v in (src or {}).items(): - if k in ("media", "refs", "comments_editorial") and is_arr(v): - if k == "media": - dst[k] = dedup_extend(dst.get(k, []), v, key_media) - elif k == "refs": - dst[k] = dedup_extend(dst.get(k, []), v, key_ref) - else: - dst[k] = dedup_extend(dst.get(k, []), v, key_comment) + if k in ("media","refs","comments_editorial") and is_arr(v): + if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media) + elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref) + else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment) continue if is_obj(v): - if not is_obj(dst.get(k)): - dst[k] = {} if not is_obj(dst.get(k)) else dst.get(k) + if not is_obj(dst.get(k)): dst[k] = {} deep_merge(dst[k], v) continue if is_arr(v): cur = dst.get(k, []) if not is_arr(cur): cur = [] - seen = set() - out = [] + seen = set(); out = [] for x in cur: x = normalize(x) s = json.dumps(x, sort_keys=True, ensure_ascii=False) - if s not in seen: - seen.add(s); out.append(x) + if s not in seen: seen.add(s); out.append(x) for x in v: x = normalize(x) s = json.dumps(x, sort_keys=True, ensure_ascii=False) - if s not in seen: - seen.add(s); out.append(x) + if s not in seen: seen.add(s); out.append(x) dst[k] = out continue - # scalaires: set seulement si absent / vide v = normalize(v) if k not in dst or dst.get(k) in (None, ""): dst[k] = v @@ -360,7 +395,6 @@ jobs: for k in ("media","refs","comments_editorial"): arr = entry.get(k) if not is_arr(arr): continue - def ts(x): x = normalize(x) try: @@ -368,24 +402,22 @@ jobs: return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0 except Exception: return 0 - arr = [normalize(x) for x in arr] arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False))) entry[k] = arr - pages = {} - errors = [] - if not os.path.isdir(ANNO_ROOT): raise SystemExit(f"Missing annotations root: {ANNO_ROOT}") + pages = {} + errors = [] + files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True)) for fp in files: try: with open(fp, "r", encoding="utf-8") as f: doc = yaml.safe_load(f) or {} doc = normalize(doc) - if not isinstance(doc, dict) or doc.get("schema") != 1: continue @@ -423,22 +455,49 @@ jobs: "errors": errors, } - out = normalize(out) - with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f: json.dump(out, f, ensure_ascii=False) print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"]) PY + # patch JSON into running containers for c in archicratie-web-blue archicratie-web-green; do - echo "== patch $c ==" + echo "== patch annotations-index.json into $c ==" docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json" done + # copy changed media files into containers (so new media appears without rebuild) + if [[ -s /tmp/changed.txt ]]; then + while IFS= read -r f; do + [[ -n "$f" ]] || continue + if [[ "$f" == public/media/* ]]; then + dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/... + for c in archicratie-web-blue archicratie-web-green; do + echo "== copy media into $c: $f -> $dest ==" + docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\"" + docker cp "$f" "$c:$dest" + done + fi + done < /tmp/changed.txt + fi + + # smoke after patch for p in 8081 8082; do echo "== smoke annotations-index on $p ==" - curl -fsS "http://127.0.0.1:${p}/annotations-index.json" | python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {}))' + curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \ + | python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))' done - echo "✅ hotpatch annotations-index done" \ No newline at end of file + echo "✅ hotpatch done" + + - name: Debug on failure (containers status/logs) + if: ${{ failure() }} + run: | + set -euo pipefail + echo "== docker ps ==" + docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true + for c in archicratie-web-blue archicratie-web-green; do + echo "== logs $c (tail 200) ==" + docker logs --tail 200 "$c" || true + done \ No newline at end of file