Merge pull request 'ci: fix hotpatch (yaml datetime -> json safe)' (#138) from chore/fix-hotpatch-json into main
Reviewed-on: #138
This commit was merged in pull request #138.
This commit is contained in:
@@ -106,8 +106,7 @@ jobs:
|
|||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# merge commit safe: -m => considère les parents (liste de fichiers plus fiable)
|
CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)"
|
||||||
CHANGED="$(git show -m --name-only --pretty="" "$SHA" | sed '/^$/d' || true)"
|
|
||||||
echo "== changed files =="
|
echo "== changed files =="
|
||||||
echo "$CHANGED" | sed -n '1,240p'
|
echo "$CHANGED" | sed -n '1,240p'
|
||||||
|
|
||||||
@@ -185,68 +184,24 @@ jobs:
|
|||||||
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||||
|
|
||||||
retry_url() {
|
|
||||||
local url="$1"; local tries="${2:-45}"; local delay="${3:-1}"
|
|
||||||
local i=1
|
|
||||||
while (( i <= tries )); do
|
|
||||||
if curl -fsS --max-time 5 --retry 2 --retry-delay 0 --retry-all-errors "$url" >/dev/null 2>&1; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
echo "… wait [$i/$tries] $url"
|
|
||||||
sleep "$delay"
|
|
||||||
((i++))
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
fetch_html() {
|
|
||||||
local url="$1"; local tries="${2:-45}"; local delay="${3:-1}"
|
|
||||||
local i=1
|
|
||||||
while (( i <= tries )); do
|
|
||||||
local html
|
|
||||||
html="$(curl -fsS --max-time 8 --retry 2 --retry-delay 0 --retry-all-errors "$url" 2>/dev/null || true)"
|
|
||||||
if [[ -n "$html" ]]; then
|
|
||||||
printf "%s" "$html"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
echo "… wait HTML [$i/$tries] $url"
|
|
||||||
sleep "$delay"
|
|
||||||
((i++))
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
dump_container() {
|
|
||||||
local name="$1"
|
|
||||||
echo "== docker ps (filter=$name) =="
|
|
||||||
docker ps -a --filter "name=$name" --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' || true
|
|
||||||
echo "== docker logs (tail) $name =="
|
|
||||||
docker logs --tail 200 "$name" 2>/dev/null || true
|
|
||||||
}
|
|
||||||
|
|
||||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||||
echo "TS='$TS'" >> /tmp/deploy.env
|
echo "TS='$TS'" >> /tmp/deploy.env
|
||||||
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
||||||
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
||||||
|
|
||||||
docker compose -p "$PROJ" -f docker-compose.yml config >/dev/null
|
|
||||||
|
|
||||||
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
||||||
|
|
||||||
docker rm -f archicratie-web-blue || true
|
docker rm -f archicratie-web-blue || true
|
||||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
||||||
|
|
||||||
retry_url "http://127.0.0.1:8081/para-index.json" 60 1 || { dump_container archicratie-web-blue; exit 31; }
|
curl -fsS "http://127.0.0.1:8081/para-index.json" >/dev/null
|
||||||
retry_url "http://127.0.0.1:8081/annotations-index.json" 60 1 || { dump_container archicratie-web-blue; exit 32; }
|
curl -fsS "http://127.0.0.1:8081/annotations-index.json" >/dev/null
|
||||||
retry_url "http://127.0.0.1:8081/pagefind/pagefind.js" 60 1 || { dump_container archicratie-web-blue; exit 33; }
|
curl -fsS "http://127.0.0.1:8081/pagefind/pagefind.js" >/dev/null
|
||||||
|
|
||||||
HTML="$(fetch_html "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" 60 1 || true)"
|
CANON="$(curl -fsS "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||||
CANON="$(echo "$HTML" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
|
||||||
echo "canonical(blue)=$CANON"
|
echo "canonical(blue)=$CANON"
|
||||||
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
||||||
dump_container archicratie-web-blue
|
echo "❌ staging canonical mismatch"; exit 3;
|
||||||
echo "❌ staging canonical mismatch"
|
|
||||||
exit 34
|
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "✅ staging OK"
|
echo "✅ staging OK"
|
||||||
@@ -263,45 +218,6 @@ jobs:
|
|||||||
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||||
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
||||||
|
|
||||||
retry_url() {
|
|
||||||
local url="$1"; local tries="${2:-45}"; local delay="${3:-1}"
|
|
||||||
local i=1
|
|
||||||
while (( i <= tries )); do
|
|
||||||
if curl -fsS --max-time 5 --retry 2 --retry-delay 0 --retry-all-errors "$url" >/dev/null 2>&1; then
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
echo "… wait [$i/$tries] $url"
|
|
||||||
sleep "$delay"
|
|
||||||
((i++))
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
fetch_html() {
|
|
||||||
local url="$1"; local tries="${2:-45}"; local delay="${3:-1}"
|
|
||||||
local i=1
|
|
||||||
while (( i <= tries )); do
|
|
||||||
local html
|
|
||||||
html="$(curl -fsS --max-time 8 --retry 2 --retry-delay 0 --retry-all-errors "$url" 2>/dev/null || true)"
|
|
||||||
if [[ -n "$html" ]]; then
|
|
||||||
printf "%s" "$html"
|
|
||||||
return 0
|
|
||||||
fi
|
|
||||||
echo "… wait HTML [$i/$tries] $url"
|
|
||||||
sleep "$delay"
|
|
||||||
((i++))
|
|
||||||
done
|
|
||||||
return 1
|
|
||||||
}
|
|
||||||
|
|
||||||
dump_container() {
|
|
||||||
local name="$1"
|
|
||||||
echo "== docker ps (filter=$name) =="
|
|
||||||
docker ps -a --filter "name=$name" --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' || true
|
|
||||||
echo "== docker logs (tail) $name =="
|
|
||||||
docker logs --tail 200 "$name" 2>/dev/null || true
|
|
||||||
}
|
|
||||||
|
|
||||||
rollback() {
|
rollback() {
|
||||||
echo "⚠️ rollback green -> previous image tag (best effort)"
|
echo "⚠️ rollback green -> previous image tag (best effort)"
|
||||||
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
||||||
@@ -309,36 +225,24 @@ jobs:
|
|||||||
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
||||||
}
|
}
|
||||||
|
|
||||||
docker compose -p "$PROJ" -f docker-compose.yml config >/dev/null
|
set +e
|
||||||
|
docker compose -p "$PROJ" -f docker-compose.yml build web_green
|
||||||
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
|
|
||||||
dump_container archicratie-web-green
|
|
||||||
rollback
|
|
||||||
exit 40
|
|
||||||
fi
|
|
||||||
|
|
||||||
docker rm -f archicratie-web-green || true
|
docker rm -f archicratie-web-green || true
|
||||||
if ! docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green; then
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
||||||
dump_container archicratie-web-green
|
|
||||||
rollback
|
|
||||||
exit 41
|
|
||||||
fi
|
|
||||||
|
|
||||||
retry_url "http://127.0.0.1:8082/para-index.json" 90 1 || { dump_container archicratie-web-green; rollback; exit 42; }
|
curl -fsS "http://127.0.0.1:8082/para-index.json" >/dev/null
|
||||||
retry_url "http://127.0.0.1:8082/annotations-index.json" 90 1 || { dump_container archicratie-web-green; rollback; exit 43; }
|
curl -fsS "http://127.0.0.1:8082/annotations-index.json" >/dev/null
|
||||||
retry_url "http://127.0.0.1:8082/pagefind/pagefind.js" 90 1 || { dump_container archicratie-web-green; rollback; exit 44; }
|
curl -fsS "http://127.0.0.1:8082/pagefind/pagefind.js" >/dev/null
|
||||||
|
|
||||||
HTML="$(fetch_html "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" 90 1 || true)"
|
CANON="$(curl -fsS "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||||
CANON="$(echo "$HTML" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
|
||||||
echo "canonical(green)=$CANON"
|
echo "canonical(green)=$CANON"
|
||||||
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
||||||
dump_container archicratie-web-green
|
echo "❌ live canonical mismatch"; rollback; exit 4;
|
||||||
echo "❌ live canonical mismatch"
|
|
||||||
rollback
|
|
||||||
exit 45
|
|
||||||
}
|
}
|
||||||
|
|
||||||
echo "✅ live OK"
|
echo "✅ live OK"
|
||||||
|
set -e
|
||||||
|
|
||||||
- name: Hotpatch annotations-index.json (deep merge shards) into blue+green
|
- name: Hotpatch annotations-index.json (deep merge shards) into blue+green
|
||||||
run: |
|
run: |
|
||||||
@@ -349,6 +253,7 @@ jobs:
|
|||||||
python3 - <<'PY'
|
python3 - <<'PY'
|
||||||
import os, re, json, glob, datetime
|
import os, re, json, glob, datetime
|
||||||
import yaml
|
import yaml
|
||||||
|
import datetime as dt
|
||||||
|
|
||||||
ROOT = os.getcwd()
|
ROOT = os.getcwd()
|
||||||
ANNO_ROOT = os.path.join(ROOT, "src", "annotations")
|
ANNO_ROOT = os.path.join(ROOT, "src", "annotations")
|
||||||
@@ -356,6 +261,26 @@ jobs:
|
|||||||
def is_obj(x): return isinstance(x, dict)
|
def is_obj(x): return isinstance(x, dict)
|
||||||
def is_arr(x): return isinstance(x, list)
|
def is_arr(x): return isinstance(x, list)
|
||||||
|
|
||||||
|
# --- KEY FIX: YAML timestamps -> datetime; JSON can't dump them
|
||||||
|
def iso_dt(x):
|
||||||
|
if isinstance(x, dt.datetime):
|
||||||
|
if x.tzinfo is None:
|
||||||
|
return x.isoformat()
|
||||||
|
return x.astimezone(dt.timezone.utc).isoformat().replace("+00:00","Z")
|
||||||
|
if isinstance(x, dt.date):
|
||||||
|
return x.isoformat()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def normalize(x):
|
||||||
|
s = iso_dt(x)
|
||||||
|
if s is not None:
|
||||||
|
return s
|
||||||
|
if isinstance(x, dict):
|
||||||
|
return {str(k): normalize(v) for k, v in x.items()}
|
||||||
|
if isinstance(x, list):
|
||||||
|
return [normalize(v) for v in x]
|
||||||
|
return x
|
||||||
|
|
||||||
def key_media(it):
|
def key_media(it):
|
||||||
return str((it or {}).get("src",""))
|
return str((it or {}).get("src",""))
|
||||||
|
|
||||||
@@ -375,16 +300,19 @@ jobs:
|
|||||||
seen = set()
|
seen = set()
|
||||||
out = []
|
out = []
|
||||||
for x in (dst_list or []):
|
for x in (dst_list or []):
|
||||||
|
x = normalize(x)
|
||||||
k = key_fn(x)
|
k = key_fn(x)
|
||||||
if k and k not in seen:
|
if k and k not in seen:
|
||||||
seen.add(k); out.append(x)
|
seen.add(k); out.append(x)
|
||||||
for x in (src_list or []):
|
for x in (src_list or []):
|
||||||
|
x = normalize(x)
|
||||||
k = key_fn(x)
|
k = key_fn(x)
|
||||||
if k and k not in seen:
|
if k and k not in seen:
|
||||||
seen.add(k); out.append(x)
|
seen.add(k); out.append(x)
|
||||||
return out
|
return out
|
||||||
|
|
||||||
def deep_merge(dst, src):
|
def deep_merge(dst, src):
|
||||||
|
src = normalize(src)
|
||||||
for k, v in (src or {}).items():
|
for k, v in (src or {}).items():
|
||||||
if k in ("media", "refs", "comments_editorial") and is_arr(v):
|
if k in ("media", "refs", "comments_editorial") and is_arr(v):
|
||||||
if k == "media":
|
if k == "media":
|
||||||
@@ -397,7 +325,7 @@ jobs:
|
|||||||
|
|
||||||
if is_obj(v):
|
if is_obj(v):
|
||||||
if not is_obj(dst.get(k)):
|
if not is_obj(dst.get(k)):
|
||||||
dst[k] = dst.get(k) if is_obj(dst.get(k)) else {}
|
dst[k] = {} if not is_obj(dst.get(k)) else dst.get(k)
|
||||||
deep_merge(dst[k], v)
|
deep_merge(dst[k], v)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
@@ -407,16 +335,20 @@ jobs:
|
|||||||
seen = set()
|
seen = set()
|
||||||
out = []
|
out = []
|
||||||
for x in cur:
|
for x in cur:
|
||||||
|
x = normalize(x)
|
||||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||||
if s not in seen:
|
if s not in seen:
|
||||||
seen.add(s); out.append(x)
|
seen.add(s); out.append(x)
|
||||||
for x in v:
|
for x in v:
|
||||||
|
x = normalize(x)
|
||||||
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
|
||||||
if s not in seen:
|
if s not in seen:
|
||||||
seen.add(s); out.append(x)
|
seen.add(s); out.append(x)
|
||||||
dst[k] = out
|
dst[k] = out
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# scalaires: set seulement si absent / vide
|
||||||
|
v = normalize(v)
|
||||||
if k not in dst or dst.get(k) in (None, ""):
|
if k not in dst or dst.get(k) in (None, ""):
|
||||||
dst[k] = v
|
dst[k] = v
|
||||||
|
|
||||||
@@ -428,11 +360,16 @@ jobs:
|
|||||||
for k in ("media","refs","comments_editorial"):
|
for k in ("media","refs","comments_editorial"):
|
||||||
arr = entry.get(k)
|
arr = entry.get(k)
|
||||||
if not is_arr(arr): continue
|
if not is_arr(arr): continue
|
||||||
|
|
||||||
def ts(x):
|
def ts(x):
|
||||||
|
x = normalize(x)
|
||||||
try:
|
try:
|
||||||
return datetime.datetime.fromisoformat(str((x or {}).get("ts","")).replace("Z","+00:00")).timestamp()
|
s = str((x or {}).get("ts",""))
|
||||||
|
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
|
||||||
except Exception:
|
except Exception:
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
arr = [normalize(x) for x in arr]
|
||||||
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
|
||||||
entry[k] = arr
|
entry[k] = arr
|
||||||
|
|
||||||
@@ -447,8 +384,11 @@ jobs:
|
|||||||
try:
|
try:
|
||||||
with open(fp, "r", encoding="utf-8") as f:
|
with open(fp, "r", encoding="utf-8") as f:
|
||||||
doc = yaml.safe_load(f) or {}
|
doc = yaml.safe_load(f) or {}
|
||||||
|
doc = normalize(doc)
|
||||||
|
|
||||||
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
if not isinstance(doc, dict) or doc.get("schema") != 1:
|
||||||
continue
|
continue
|
||||||
|
|
||||||
page = str(doc.get("page","")).strip().strip("/")
|
page = str(doc.get("page","")).strip().strip("/")
|
||||||
paras = doc.get("paras") or {}
|
paras = doc.get("paras") or {}
|
||||||
if not page or not isinstance(paras, dict):
|
if not page or not isinstance(paras, dict):
|
||||||
@@ -473,7 +413,7 @@ jobs:
|
|||||||
|
|
||||||
out = {
|
out = {
|
||||||
"schema": 1,
|
"schema": 1,
|
||||||
"generatedAt": datetime.datetime.utcnow().replace(tzinfo=datetime.timezone.utc).isoformat().replace("+00:00","Z"),
|
"generatedAt": dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc).isoformat().replace("+00:00","Z"),
|
||||||
"pages": pages,
|
"pages": pages,
|
||||||
"stats": {
|
"stats": {
|
||||||
"pages": len(pages),
|
"pages": len(pages),
|
||||||
@@ -483,6 +423,8 @@ jobs:
|
|||||||
"errors": errors,
|
"errors": errors,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
out = normalize(out)
|
||||||
|
|
||||||
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
|
||||||
json.dump(out, f, ensure_ascii=False)
|
json.dump(out, f, ensure_ascii=False)
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user