diff --git a/.gitea/workflows/deploy-staging-live.yml b/.gitea/workflows/deploy-staging-live.yml index ce9eb16..a75995d 100644 --- a/.gitea/workflows/deploy-staging-live.yml +++ b/.gitea/workflows/deploy-staging-live.yml @@ -106,8 +106,7 @@ jobs: exit 0 fi - # merge commit safe: -m => considère les parents (liste de fichiers plus fiable) - CHANGED="$(git show -m --name-only --pretty="" "$SHA" | sed '/^$/d' || true)" + CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)" echo "== changed files ==" echo "$CHANGED" | sed -n '1,240p' @@ -185,68 +184,24 @@ jobs: [[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; } PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}" - retry_url() { - local url="$1"; local tries="${2:-45}"; local delay="${3:-1}" - local i=1 - while (( i <= tries )); do - if curl -fsS --max-time 5 --retry 2 --retry-delay 0 --retry-all-errors "$url" >/dev/null 2>&1; then - return 0 - fi - echo "… wait [$i/$tries] $url" - sleep "$delay" - ((i++)) - done - return 1 - } - - fetch_html() { - local url="$1"; local tries="${2:-45}"; local delay="${3:-1}" - local i=1 - while (( i <= tries )); do - local html - html="$(curl -fsS --max-time 8 --retry 2 --retry-delay 0 --retry-all-errors "$url" 2>/dev/null || true)" - if [[ -n "$html" ]]; then - printf "%s" "$html" - return 0 - fi - echo "… wait HTML [$i/$tries] $url" - sleep "$delay" - ((i++)) - done - return 1 - } - - dump_container() { - local name="$1" - echo "== docker ps (filter=$name) ==" - docker ps -a --filter "name=$name" --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' || true - echo "== docker logs (tail) $name ==" - docker logs --tail 200 "$name" 2>/dev/null || true - } - TS="$(date -u +%Y%m%d-%H%M%S)" echo "TS='$TS'" >> /tmp/deploy.env docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true - docker compose -p "$PROJ" -f docker-compose.yml config >/dev/null - docker compose -p "$PROJ" -f docker-compose.yml build web_blue docker rm -f archicratie-web-blue || true docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue - retry_url "http://127.0.0.1:8081/para-index.json" 60 1 || { dump_container archicratie-web-blue; exit 31; } - retry_url "http://127.0.0.1:8081/annotations-index.json" 60 1 || { dump_container archicratie-web-blue; exit 32; } - retry_url "http://127.0.0.1:8081/pagefind/pagefind.js" 60 1 || { dump_container archicratie-web-blue; exit 33; } + curl -fsS "http://127.0.0.1:8081/para-index.json" >/dev/null + curl -fsS "http://127.0.0.1:8081/annotations-index.json" >/dev/null + curl -fsS "http://127.0.0.1:8081/pagefind/pagefind.js" >/dev/null - HTML="$(fetch_html "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" 60 1 || true)" - CANON="$(echo "$HTML" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)" + CANON="$(curl -fsS "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)" echo "canonical(blue)=$CANON" echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || { - dump_container archicratie-web-blue - echo "❌ staging canonical mismatch" - exit 34 + echo "❌ staging canonical mismatch"; exit 3; } echo "✅ staging OK" @@ -263,45 +218,6 @@ jobs: PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}" TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}" - retry_url() { - local url="$1"; local tries="${2:-45}"; local delay="${3:-1}" - local i=1 - while (( i <= tries )); do - if curl -fsS --max-time 5 --retry 2 --retry-delay 0 --retry-all-errors "$url" >/dev/null 2>&1; then - return 0 - fi - echo "… wait [$i/$tries] $url" - sleep "$delay" - ((i++)) - done - return 1 - } - - fetch_html() { - local url="$1"; local tries="${2:-45}"; local delay="${3:-1}" - local i=1 - while (( i <= tries )); do - local html - html="$(curl -fsS --max-time 8 --retry 2 --retry-delay 0 --retry-all-errors "$url" 2>/dev/null || true)" - if [[ -n "$html" ]]; then - printf "%s" "$html" - return 0 - fi - echo "… wait HTML [$i/$tries] $url" - sleep "$delay" - ((i++)) - done - return 1 - } - - dump_container() { - local name="$1" - echo "== docker ps (filter=$name) ==" - docker ps -a --filter "name=$name" --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' || true - echo "== docker logs (tail) $name ==" - docker logs --tail 200 "$name" 2>/dev/null || true - } - rollback() { echo "⚠️ rollback green -> previous image tag (best effort)" docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true @@ -309,36 +225,24 @@ jobs: docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true } - docker compose -p "$PROJ" -f docker-compose.yml config >/dev/null - - if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then - dump_container archicratie-web-green - rollback - exit 40 - fi + set +e + docker compose -p "$PROJ" -f docker-compose.yml build web_green docker rm -f archicratie-web-green || true - if ! docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green; then - dump_container archicratie-web-green - rollback - exit 41 - fi + docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green - retry_url "http://127.0.0.1:8082/para-index.json" 90 1 || { dump_container archicratie-web-green; rollback; exit 42; } - retry_url "http://127.0.0.1:8082/annotations-index.json" 90 1 || { dump_container archicratie-web-green; rollback; exit 43; } - retry_url "http://127.0.0.1:8082/pagefind/pagefind.js" 90 1 || { dump_container archicratie-web-green; rollback; exit 44; } + curl -fsS "http://127.0.0.1:8082/para-index.json" >/dev/null + curl -fsS "http://127.0.0.1:8082/annotations-index.json" >/dev/null + curl -fsS "http://127.0.0.1:8082/pagefind/pagefind.js" >/dev/null - HTML="$(fetch_html "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" 90 1 || true)" - CANON="$(echo "$HTML" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)" + CANON="$(curl -fsS "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)" echo "canonical(green)=$CANON" echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || { - dump_container archicratie-web-green - echo "❌ live canonical mismatch" - rollback - exit 45 + echo "❌ live canonical mismatch"; rollback; exit 4; } echo "✅ live OK" + set -e - name: Hotpatch annotations-index.json (deep merge shards) into blue+green run: | @@ -349,6 +253,7 @@ jobs: python3 - <<'PY' import os, re, json, glob, datetime import yaml + import datetime as dt ROOT = os.getcwd() ANNO_ROOT = os.path.join(ROOT, "src", "annotations") @@ -356,6 +261,26 @@ jobs: def is_obj(x): return isinstance(x, dict) def is_arr(x): return isinstance(x, list) + # --- KEY FIX: YAML timestamps -> datetime; JSON can't dump them + def iso_dt(x): + if isinstance(x, dt.datetime): + if x.tzinfo is None: + return x.isoformat() + return x.astimezone(dt.timezone.utc).isoformat().replace("+00:00","Z") + if isinstance(x, dt.date): + return x.isoformat() + return None + + def normalize(x): + s = iso_dt(x) + if s is not None: + return s + if isinstance(x, dict): + return {str(k): normalize(v) for k, v in x.items()} + if isinstance(x, list): + return [normalize(v) for v in x] + return x + def key_media(it): return str((it or {}).get("src","")) @@ -375,16 +300,19 @@ jobs: seen = set() out = [] for x in (dst_list or []): + x = normalize(x) k = key_fn(x) if k and k not in seen: seen.add(k); out.append(x) for x in (src_list or []): + x = normalize(x) k = key_fn(x) if k and k not in seen: seen.add(k); out.append(x) return out def deep_merge(dst, src): + src = normalize(src) for k, v in (src or {}).items(): if k in ("media", "refs", "comments_editorial") and is_arr(v): if k == "media": @@ -397,7 +325,7 @@ jobs: if is_obj(v): if not is_obj(dst.get(k)): - dst[k] = dst.get(k) if is_obj(dst.get(k)) else {} + dst[k] = {} if not is_obj(dst.get(k)) else dst.get(k) deep_merge(dst[k], v) continue @@ -407,16 +335,20 @@ jobs: seen = set() out = [] for x in cur: + x = normalize(x) s = json.dumps(x, sort_keys=True, ensure_ascii=False) if s not in seen: seen.add(s); out.append(x) for x in v: + x = normalize(x) s = json.dumps(x, sort_keys=True, ensure_ascii=False) if s not in seen: seen.add(s); out.append(x) dst[k] = out continue + # scalaires: set seulement si absent / vide + v = normalize(v) if k not in dst or dst.get(k) in (None, ""): dst[k] = v @@ -428,11 +360,16 @@ jobs: for k in ("media","refs","comments_editorial"): arr = entry.get(k) if not is_arr(arr): continue + def ts(x): + x = normalize(x) try: - return datetime.datetime.fromisoformat(str((x or {}).get("ts","")).replace("Z","+00:00")).timestamp() + s = str((x or {}).get("ts","")) + return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0 except Exception: return 0 + + arr = [normalize(x) for x in arr] arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False))) entry[k] = arr @@ -447,8 +384,11 @@ jobs: try: with open(fp, "r", encoding="utf-8") as f: doc = yaml.safe_load(f) or {} + doc = normalize(doc) + if not isinstance(doc, dict) or doc.get("schema") != 1: continue + page = str(doc.get("page","")).strip().strip("/") paras = doc.get("paras") or {} if not page or not isinstance(paras, dict): @@ -473,7 +413,7 @@ jobs: out = { "schema": 1, - "generatedAt": datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat().replace("+00:00","Z"), + "generatedAt": dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc).isoformat().replace("+00:00","Z"), "pages": pages, "stats": { "pages": len(pages), @@ -483,6 +423,8 @@ jobs: "errors": errors, } + out = normalize(out) + with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f: json.dump(out, f, ensure_ascii=False)