Compare commits
24 Commits
chore/anno
...
chore/fix-
| Author | SHA1 | Date | |
|---|---|---|---|
| 0d5b790e52 | |||
| 4dec9e182b | |||
| 9b4584f70a | |||
| 7b64fb7401 | |||
|
|
57cb23ce8b | ||
| 708b87ff35 | |||
| 577cfd08e8 | |||
| de9edbe532 | |||
| 5e95dc9898 | |||
| 006fec7efd | |||
| 2b612214bb | |||
| 29a6c349aa | |||
|
|
33a227c401 | ||
| 396ad4df7c | |||
|
|
0b39427090 | ||
| 8fcb18cb46 | |||
| d03fc519de | |||
| 97dd3797d6 | |||
| 6c7b7ab6a0 | |||
| 105dfe1b5b | |||
| 82f6453538 | |||
| fe862102d3 | |||
| 6ef538a0c4 | |||
| 689612ff7f |
@@ -34,99 +34,98 @@ jobs:
|
|||||||
- name: Derive context (event.json / workflow_dispatch)
|
- name: Derive context (event.json / workflow_dispatch)
|
||||||
env:
|
env:
|
||||||
INPUT_ISSUE: ${{ inputs.issue }}
|
INPUT_ISSUE: ${{ inputs.issue }}
|
||||||
|
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
export EVENT_JSON="/var/run/act/workflow/event.json"
|
export EVENT_JSON="/var/run/act/workflow/event.json"
|
||||||
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||||
|
|
||||||
node --input-type=module - <<'NODE' > /tmp/anno.env
|
node --input-type=module - <<'NODE' > /tmp/anno.env
|
||||||
import fs from "node:fs";
|
import fs from "node:fs";
|
||||||
|
|
||||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||||
|
const repoObj = ev?.repository || {};
|
||||||
|
|
||||||
const repoObj = ev?.repository || {};
|
const cloneUrl =
|
||||||
const cloneUrl =
|
repoObj?.clone_url ||
|
||||||
repoObj?.clone_url ||
|
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
|
||||||
|
|
||||||
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||||
|
|
||||||
let owner =
|
let owner =
|
||||||
repoObj?.owner?.login ||
|
repoObj?.owner?.login ||
|
||||||
repoObj?.owner?.username ||
|
repoObj?.owner?.username ||
|
||||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||||
|
|
||||||
let repo =
|
let repo =
|
||||||
repoObj?.name ||
|
repoObj?.name ||
|
||||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||||
|
|
||||||
if (!owner || !repo) {
|
if (!owner || !repo) {
|
||||||
// fallback parse from clone url
|
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
||||||
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
}
|
||||||
}
|
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
|
||||||
|
|
||||||
const defaultBranch = repoObj?.default_branch || "master";
|
const defaultBranch = repoObj?.default_branch || "main";
|
||||||
|
|
||||||
const issueNumber =
|
const issueNumber =
|
||||||
ev?.issue?.number ||
|
ev?.issue?.number ||
|
||||||
ev?.issue?.index ||
|
ev?.issue?.index ||
|
||||||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
|
||||||
|
|
||||||
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
|
||||||
throw new Error("No issue number in event.json or workflow_dispatch input");
|
throw new Error("No issue number in event.json or workflow_dispatch input");
|
||||||
}
|
}
|
||||||
|
|
||||||
const labelName =
|
const labelName =
|
||||||
ev?.label?.name ||
|
ev?.label?.name ||
|
||||||
ev?.label ||
|
ev?.label ||
|
||||||
"workflow_dispatch";
|
"workflow_dispatch";
|
||||||
|
|
||||||
const u = new URL(cloneUrl);
|
const u = new URL(cloneUrl);
|
||||||
const origin = u.origin; // https://gitea...
|
const origin = u.origin;
|
||||||
const apiBase = (process.env.FORGE_API && process.env.FORGE_API.trim())
|
|
||||||
? process.env.FORGE_API.trim().replace(/\/+$/,"")
|
|
||||||
: origin;
|
|
||||||
|
|
||||||
function sh(s){ return JSON.stringify(String(s)); }
|
const apiBase = (process.env.FORGE_API && String(process.env.FORGE_API).trim())
|
||||||
|
? String(process.env.FORGE_API).trim().replace(/\/+$/,"")
|
||||||
|
: origin;
|
||||||
|
|
||||||
process.stdout.write([
|
function sh(s){ return JSON.stringify(String(s)); }
|
||||||
`CLONE_URL=${sh(cloneUrl)}`,
|
|
||||||
`OWNER=${sh(owner)}`,
|
process.stdout.write([
|
||||||
`REPO=${sh(repo)}`,
|
`CLONE_URL=${sh(cloneUrl)}`,
|
||||||
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
|
`OWNER=${sh(owner)}`,
|
||||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
`REPO=${sh(repo)}`,
|
||||||
`LABEL_NAME=${sh(labelName)}`,
|
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
|
||||||
`API_BASE=${sh(apiBase)}`,
|
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||||
].join("\n") + "\n");
|
`LABEL_NAME=${sh(labelName)}`,
|
||||||
NODE
|
`API_BASE=${sh(apiBase)}`
|
||||||
|
].join("\n") + "\n");
|
||||||
|
NODE
|
||||||
|
|
||||||
echo "✅ context:"
|
echo "✅ context:"
|
||||||
sed -n '1,80p' /tmp/anno.env
|
sed -n '1,120p' /tmp/anno.env
|
||||||
|
|
||||||
- name: Gate on label state/approved
|
- name: Gate on label state/approved
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" ]]; then
|
||||||
echo "ℹ️ label=$LABEL_NAME => skip (only state/approved triggers apply)"
|
echo "ℹ️ label=$LABEL_NAME => skip"
|
||||||
|
echo "SKIP=1" >> /tmp/anno.env
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
echo "✅ proceed (label=$LABEL_NAME issue=$ISSUE_NUMBER)"
|
echo "✅ proceed (issue=$ISSUE_NUMBER)"
|
||||||
|
|
||||||
- name: Checkout default branch (from event.json, no external actions)
|
- name: Checkout default branch
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
rm -rf .git
|
rm -rf .git
|
||||||
git init -q
|
git init -q
|
||||||
git remote add origin "$CLONE_URL"
|
git remote add origin "$CLONE_URL"
|
||||||
|
|
||||||
echo "Repo URL: $CLONE_URL"
|
|
||||||
echo "Base: $DEFAULT_BRANCH"
|
|
||||||
|
|
||||||
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
||||||
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||||
git log -1 --oneline
|
git log -1 --oneline
|
||||||
@@ -134,9 +133,38 @@ NODE
|
|||||||
- name: Install deps
|
- name: Install deps
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
|
source /tmp/anno.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
npm ci
|
npm ci
|
||||||
|
|
||||||
|
- name: Check apply script exists
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/anno.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
test -f scripts/apply-annotation-ticket.mjs || {
|
||||||
|
echo "❌ missing scripts/apply-annotation-ticket.mjs on $DEFAULT_BRANCH"
|
||||||
|
ls -la scripts | sed -n '1,200p' || true
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
|
||||||
|
- name: Build dist (needed for --verify)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/anno.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
npm run build:clean
|
||||||
|
|
||||||
|
test -f dist/para-index.json || {
|
||||||
|
echo "❌ missing dist/para-index.json after build"
|
||||||
|
ls -la dist | sed -n '1,200p' || true
|
||||||
|
exit 1
|
||||||
|
}
|
||||||
|
echo "✅ dist/para-index.json present"
|
||||||
|
|
||||||
- name: Apply ticket on bot branch (strict+verify, commit)
|
- name: Apply ticket on bot branch (strict+verify, commit)
|
||||||
|
continue-on-error: true
|
||||||
env:
|
env:
|
||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
|
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
|
||||||
@@ -144,21 +172,19 @@ NODE
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||||
|
|
||||||
# git identity (required for commits)
|
|
||||||
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
|
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
|
||||||
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
|
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
|
||||||
|
|
||||||
START_SHA="$(git rev-parse HEAD)"
|
START_SHA="$(git rev-parse HEAD)"
|
||||||
|
|
||||||
TS="$(date -u +%Y%m%d-%H%M%S)"
|
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||||
BR="bot/anno-${ISSUE_NUMBER}-${TS}"
|
BR="bot/anno-${ISSUE_NUMBER}-${TS}"
|
||||||
echo "BRANCH=$BR" >> /tmp/anno.env
|
echo "BRANCH=$BR" >> /tmp/anno.env
|
||||||
git checkout -b "$BR"
|
git checkout -b "$BR"
|
||||||
|
|
||||||
# env for script
|
|
||||||
export FORGE_API="$API_BASE"
|
export FORGE_API="$API_BASE"
|
||||||
export GITEA_OWNER="$OWNER"
|
export GITEA_OWNER="$OWNER"
|
||||||
export GITEA_REPO="$REPO"
|
export GITEA_REPO="$REPO"
|
||||||
@@ -169,14 +195,16 @@ NODE
|
|||||||
RC=$?
|
RC=$?
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
|
echo "APPLY_RC=$RC" >> /tmp/anno.env
|
||||||
|
|
||||||
echo "== apply log (tail) =="
|
echo "== apply log (tail) =="
|
||||||
tail -n 120 "$LOG" || true
|
tail -n 180 "$LOG" || true
|
||||||
|
|
||||||
END_SHA="$(git rev-parse HEAD)"
|
END_SHA="$(git rev-parse HEAD)"
|
||||||
|
|
||||||
if [[ "$RC" -ne 0 ]]; then
|
if [[ "$RC" -ne 0 ]]; then
|
||||||
echo "APPLY_RC=$RC" >> /tmp/anno.env
|
echo "NOOP=0" >> /tmp/anno.env
|
||||||
exit "$RC"
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
if [[ "$START_SHA" == "$END_SHA" ]]; then
|
if [[ "$START_SHA" == "$END_SHA" ]]; then
|
||||||
@@ -193,17 +221,18 @@ NODE
|
|||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
# si apply a échoué, la step précédente s'arrête => ce step tourne quand même (always)
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
if [[ -z "${APPLY_RC:-}" ]]; then
|
|
||||||
|
RC="${APPLY_RC:-0}"
|
||||||
|
if [[ "$RC" == "0" ]]; then
|
||||||
echo "ℹ️ no failure detected"
|
echo "ℹ️ no failure detected"
|
||||||
exit 0
|
exit 0
|
||||||
fi
|
fi
|
||||||
|
|
||||||
BODY="$(tail -n 120 /tmp/apply.log | sed 's/\r$//' )"
|
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
|
||||||
MSG="❌ apply-annotation-ticket a échoué (rc=${APPLY_RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
MSG="❌ apply-annotation-ticket a échoué (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
|
||||||
|
|
||||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.env.MSG}))' \
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
MSG="$MSG")"
|
|
||||||
|
|
||||||
curl -fsS -X POST \
|
curl -fsS -X POST \
|
||||||
-H "Authorization: token $FORGE_TOKEN" \
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
@@ -211,21 +240,20 @@ NODE
|
|||||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
--data-binary "$PAYLOAD"
|
--data-binary "$PAYLOAD"
|
||||||
|
|
||||||
exit "${APPLY_RC}"
|
|
||||||
|
|
||||||
- name: Comment issue if no-op (already applied)
|
- name: Comment issue if no-op (already applied)
|
||||||
|
if: ${{ always() }}
|
||||||
env:
|
env:
|
||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
if [[ "${NOOP:-0}" != "1" ]]; then
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
echo "ℹ️ changes exist -> will create PR"
|
|
||||||
exit 0
|
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
|
||||||
fi
|
[[ "${NOOP:-0}" == "1" ]] || exit 0
|
||||||
|
|
||||||
MSG="ℹ️ Ticket #${ISSUE_NUMBER} : rien à appliquer (déjà présent / dédupliqué)."
|
MSG="ℹ️ Ticket #${ISSUE_NUMBER} : rien à appliquer (déjà présent / dédupliqué)."
|
||||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.env.MSG}))' MSG="$MSG")"
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
curl -fsS -X POST \
|
curl -fsS -X POST \
|
||||||
-H "Authorization: token $FORGE_TOKEN" \
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
@@ -233,48 +261,48 @@ NODE
|
|||||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
--data-binary "$PAYLOAD"
|
--data-binary "$PAYLOAD"
|
||||||
|
|
||||||
echo "✅ no-op handled"
|
|
||||||
exit 0
|
|
||||||
|
|
||||||
- name: Push bot branch
|
- name: Push bot branch
|
||||||
|
if: ${{ always() }}
|
||||||
env:
|
env:
|
||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
test "${NOOP:-0}" = "0" || { echo "ℹ️ no-op -> skip push"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
|
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip push"; exit 0; }
|
||||||
|
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip push"; exit 0; }
|
||||||
|
|
||||||
# auth remote (Gitea supports oauth2:<token>)
|
|
||||||
AUTH_URL="$(node --input-type=module -e '
|
AUTH_URL="$(node --input-type=module -e '
|
||||||
const u = new URL(process.env.CLONE_URL);
|
const [clone, tok] = process.argv.slice(1);
|
||||||
|
const u = new URL(clone);
|
||||||
u.username = "oauth2";
|
u.username = "oauth2";
|
||||||
u.password = process.env.FORGE_TOKEN;
|
u.password = tok;
|
||||||
console.log(u.toString());
|
console.log(u.toString());
|
||||||
' CLONE_URL="$CLONE_URL" FORGE_TOKEN="$FORGE_TOKEN")"
|
' "$CLONE_URL" "$FORGE_TOKEN")"
|
||||||
|
|
||||||
git remote set-url origin "$AUTH_URL"
|
git remote set-url origin "$AUTH_URL"
|
||||||
git push -u origin "$BRANCH"
|
git push -u origin "$BRANCH"
|
||||||
|
|
||||||
- name: Create PR + comment issue
|
- name: Create PR + comment issue
|
||||||
|
if: ${{ always() }}
|
||||||
env:
|
env:
|
||||||
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
|
||||||
run: |
|
run: |
|
||||||
set -euo pipefail
|
set -euo pipefail
|
||||||
source /tmp/anno.env
|
source /tmp/anno.env
|
||||||
test "${NOOP:-0}" = "0" || { echo "ℹ️ no-op -> skip PR"; exit 0; }
|
[[ "${SKIP:-0}" != "1" ]] || exit 0
|
||||||
|
|
||||||
|
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "ℹ️ apply failed -> skip PR"; exit 0; }
|
||||||
|
[[ "${NOOP:-0}" == "0" ]] || { echo "ℹ️ no-op -> skip PR"; exit 0; }
|
||||||
|
|
||||||
PR_TITLE="anno: apply ticket #${ISSUE_NUMBER}"
|
PR_TITLE="anno: apply ticket #${ISSUE_NUMBER}"
|
||||||
PR_BODY="PR générée automatiquement à partir du ticket #${ISSUE_NUMBER} (label state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA}\n\nMerge si CI OK."
|
PR_BODY="PR auto depuis ticket #${ISSUE_NUMBER} (state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA}\n\nMerge si CI OK."
|
||||||
|
|
||||||
PR_PAYLOAD="$(node --input-type=module -e '
|
PR_PAYLOAD="$(node --input-type=module -e '
|
||||||
console.log(JSON.stringify({
|
const [title, body, base, head] = process.argv.slice(1);
|
||||||
title: process.env.PR_TITLE,
|
console.log(JSON.stringify({ title, body, base, head, allow_maintainer_edit: true }));
|
||||||
body: process.env.PR_BODY,
|
' "$PR_TITLE" "$PR_BODY" "$DEFAULT_BRANCH" "${OWNER}:${BRANCH}")"
|
||||||
base: process.env.DEFAULT_BRANCH,
|
|
||||||
head: `${process.env.OWNER}:${process.env.BRANCH}`,
|
|
||||||
allow_maintainer_edit: true
|
|
||||||
}));
|
|
||||||
' PR_TITLE="$PR_TITLE" PR_BODY="$PR_BODY" OWNER="$OWNER" BRANCH="$BRANCH" DEFAULT_BRANCH="$DEFAULT_BRANCH")"
|
|
||||||
|
|
||||||
PR_JSON="$(curl -fsS -X POST \
|
PR_JSON="$(curl -fsS -X POST \
|
||||||
-H "Authorization: token $FORGE_TOKEN" \
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
@@ -283,14 +311,14 @@ NODE
|
|||||||
--data-binary "$PR_PAYLOAD")"
|
--data-binary "$PR_PAYLOAD")"
|
||||||
|
|
||||||
PR_URL="$(node --input-type=module -e '
|
PR_URL="$(node --input-type=module -e '
|
||||||
const pr = JSON.parse(process.env.PR_JSON);
|
const pr = JSON.parse(process.argv[1] || "{}");
|
||||||
console.log(pr.html_url || pr.url || "");
|
console.log(pr.html_url || pr.url || "");
|
||||||
' PR_JSON="$PR_JSON")"
|
' "$PR_JSON")"
|
||||||
|
|
||||||
test -n "$PR_URL" || { echo "❌ PR URL missing. Raw: $PR_JSON"; exit 1; }
|
test -n "$PR_URL" || { echo "❌ PR URL missing. Raw: $PR_JSON"; exit 1; }
|
||||||
|
|
||||||
MSG="✅ PR créée pour ticket #${ISSUE_NUMBER} : ${PR_URL}"
|
MSG="✅ PR créée pour ticket #${ISSUE_NUMBER} : ${PR_URL}"
|
||||||
C_PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.env.MSG}))' MSG="$MSG")"
|
C_PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
curl -fsS -X POST \
|
curl -fsS -X POST \
|
||||||
-H "Authorization: token $FORGE_TOKEN" \
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
@@ -298,4 +326,18 @@ NODE
|
|||||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
|
||||||
--data-binary "$C_PAYLOAD"
|
--data-binary "$C_PAYLOAD"
|
||||||
|
|
||||||
echo "✅ PR: $PR_URL"
|
echo "✅ PR: $PR_URL"
|
||||||
|
|
||||||
|
- name: Finalize (fail job if apply failed)
|
||||||
|
if: ${{ always() }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/anno.env || true
|
||||||
|
[[ "${SKIP:-0}" != "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
RC="${APPLY_RC:-0}"
|
||||||
|
if [[ "$RC" != "0" ]]; then
|
||||||
|
echo "❌ apply failed (rc=$RC)"
|
||||||
|
exit "$RC"
|
||||||
|
fi
|
||||||
|
echo "✅ apply ok"
|
||||||
@@ -25,46 +25,44 @@ jobs:
|
|||||||
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||||
|
|
||||||
node --input-type=module - <<'NODE' > /tmp/reject.env
|
node --input-type=module - <<'NODE' > /tmp/reject.env
|
||||||
import fs from "node:fs";
|
import fs from "node:fs";
|
||||||
|
|
||||||
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||||
const repoObj = ev?.repository || {};
|
const repoObj = ev?.repository || {};
|
||||||
const cloneUrl =
|
const cloneUrl =
|
||||||
repoObj?.clone_url ||
|
repoObj?.clone_url ||
|
||||||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||||
if (!cloneUrl) throw new Error("No repository url");
|
if (!cloneUrl) throw new Error("No repository url");
|
||||||
|
|
||||||
let owner =
|
let owner =
|
||||||
repoObj?.owner?.login ||
|
repoObj?.owner?.login ||
|
||||||
repoObj?.owner?.username ||
|
repoObj?.owner?.username ||
|
||||||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
|
||||||
let repo =
|
let repo =
|
||||||
repoObj?.name ||
|
repoObj?.name ||
|
||||||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
|
||||||
|
|
||||||
if (!owner || !repo) {
|
if (!owner || !repo) {
|
||||||
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
|
||||||
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
|
||||||
}
|
}
|
||||||
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
|
||||||
|
|
||||||
const issueNumber = ev?.issue?.number || ev?.issue?.index;
|
const issueNumber = ev?.issue?.number || ev?.issue?.index;
|
||||||
if (!issueNumber) throw new Error("No issue number");
|
if (!issueNumber) throw new Error("No issue number");
|
||||||
|
|
||||||
const labelName = ev?.label?.name || ev?.label || "";
|
const labelName = ev?.label?.name || ev?.label || "";
|
||||||
|
const u = new URL(cloneUrl);
|
||||||
|
|
||||||
const u = new URL(cloneUrl);
|
function sh(s){ return JSON.stringify(String(s)); }
|
||||||
const apiBase = u.origin;
|
process.stdout.write([
|
||||||
|
`OWNER=${sh(owner)}`,
|
||||||
function sh(s){ return JSON.stringify(String(s)); }
|
`REPO=${sh(repo)}`,
|
||||||
process.stdout.write([
|
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
||||||
`OWNER=${sh(owner)}`,
|
`LABEL_NAME=${sh(labelName)}`,
|
||||||
`REPO=${sh(repo)}`,
|
`API_BASE=${sh(u.origin)}`
|
||||||
`ISSUE_NUMBER=${sh(issueNumber)}`,
|
].join("\n") + "\n");
|
||||||
`LABEL_NAME=${sh(labelName)}`,
|
NODE
|
||||||
`API_BASE=${sh(apiBase)}`
|
|
||||||
].join("\n") + "\n");
|
|
||||||
NODE
|
|
||||||
|
|
||||||
- name: Gate on label state/rejected
|
- name: Gate on label state/rejected
|
||||||
run: |
|
run: |
|
||||||
@@ -85,7 +83,7 @@ NODE
|
|||||||
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
|
||||||
|
|
||||||
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
|
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
|
||||||
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.env.MSG}))' MSG="$MSG")"
|
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
|
||||||
|
|
||||||
curl -fsS -X POST \
|
curl -fsS -X POST \
|
||||||
-H "Authorization: token $FORGE_TOKEN" \
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
@@ -97,6 +95,4 @@ NODE
|
|||||||
-H "Authorization: token $FORGE_TOKEN" \
|
-H "Authorization: token $FORGE_TOKEN" \
|
||||||
-H "Content-Type: application/json" \
|
-H "Content-Type: application/json" \
|
||||||
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
|
||||||
--data-binary '{"state":"closed"}'
|
--data-binary '{"state":"closed"}'
|
||||||
|
|
||||||
echo "✅ closed #$ISSUE_NUMBER"
|
|
||||||
247
.gitea/workflows/deploy-staging-live.yml
Normal file
247
.gitea/workflows/deploy-staging-live.yml
Normal file
@@ -0,0 +1,247 @@
|
|||||||
|
name: Deploy staging+live (annotations)
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches: [main]
|
||||||
|
workflow_dispatch:
|
||||||
|
inputs:
|
||||||
|
force:
|
||||||
|
description: "Force deploy even if gate would skip (1=yes, 0=no)"
|
||||||
|
required: false
|
||||||
|
default: "0"
|
||||||
|
|
||||||
|
env:
|
||||||
|
NODE_OPTIONS: --dns-result-order=ipv4first
|
||||||
|
DOCKER_API_VERSION: "1.43"
|
||||||
|
COMPOSE_VERSION: "2.29.7"
|
||||||
|
|
||||||
|
defaults:
|
||||||
|
run:
|
||||||
|
shell: bash
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: deploy-staging-live-main
|
||||||
|
cancel-in-progress: false
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
deploy:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
container:
|
||||||
|
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- name: Tools sanity
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
git --version
|
||||||
|
node --version
|
||||||
|
npm --version
|
||||||
|
|
||||||
|
- name: Checkout (push or workflow_dispatch, no external actions)
|
||||||
|
env:
|
||||||
|
EVENT_JSON: /var/run/act/workflow/event.json
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
|
||||||
|
|
||||||
|
node --input-type=module <<'NODE'
|
||||||
|
import fs from "node:fs";
|
||||||
|
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
|
||||||
|
const repoObj = ev?.repository || {};
|
||||||
|
const cloneUrl =
|
||||||
|
repoObj?.clone_url ||
|
||||||
|
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
|
||||||
|
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
|
||||||
|
|
||||||
|
const defaultBranch = repoObj?.default_branch || "main";
|
||||||
|
const sha =
|
||||||
|
(process.env.GITHUB_SHA && String(process.env.GITHUB_SHA).trim()) ||
|
||||||
|
ev?.after ||
|
||||||
|
ev?.sha ||
|
||||||
|
ev?.head_commit?.id ||
|
||||||
|
ev?.pull_request?.head?.sha ||
|
||||||
|
"";
|
||||||
|
|
||||||
|
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
|
||||||
|
fs.writeFileSync("/tmp/deploy.env", [
|
||||||
|
`REPO_URL=${shq(cloneUrl)}`,
|
||||||
|
`DEFAULT_BRANCH=${shq(defaultBranch)}`,
|
||||||
|
`SHA=${shq(sha)}`
|
||||||
|
].join("\n") + "\n");
|
||||||
|
NODE
|
||||||
|
|
||||||
|
source /tmp/deploy.env
|
||||||
|
echo "Repo URL: $REPO_URL"
|
||||||
|
echo "Default branch: $DEFAULT_BRANCH"
|
||||||
|
echo "SHA: ${SHA:-<empty>}"
|
||||||
|
|
||||||
|
rm -rf .git
|
||||||
|
git init -q
|
||||||
|
git remote add origin "$REPO_URL"
|
||||||
|
|
||||||
|
if [[ -n "${SHA:-}" ]]; then
|
||||||
|
git fetch --depth 1 origin "$SHA"
|
||||||
|
git -c advice.detachedHead=false checkout -q FETCH_HEAD
|
||||||
|
else
|
||||||
|
git fetch --depth 1 origin "$DEFAULT_BRANCH"
|
||||||
|
git -c advice.detachedHead=false checkout -q "origin/$DEFAULT_BRANCH"
|
||||||
|
SHA="$(git rev-parse HEAD)"
|
||||||
|
echo "SHA='$SHA'" >> /tmp/deploy.env
|
||||||
|
echo "Resolved SHA: $SHA"
|
||||||
|
fi
|
||||||
|
|
||||||
|
git log -1 --oneline
|
||||||
|
|
||||||
|
- name: Gate — auto deploy only on annotations/media changes
|
||||||
|
env:
|
||||||
|
INPUT_FORCE: ${{ inputs.force }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/deploy.env
|
||||||
|
|
||||||
|
FORCE="${INPUT_FORCE:-0}"
|
||||||
|
if [[ "$FORCE" == "1" ]]; then
|
||||||
|
echo "✅ force=1 -> bypass gate -> deploy allowed"
|
||||||
|
echo "GO=1" >> /tmp/deploy.env
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
CHANGED="$(git show --name-only --pretty="" "$SHA" | sed '/^$/d' || true)"
|
||||||
|
echo "== changed files =="
|
||||||
|
echo "$CHANGED" | sed -n '1,240p'
|
||||||
|
|
||||||
|
if echo "$CHANGED" | grep -qE '^(src/annotations/|public/media/)'; then
|
||||||
|
echo "GO=1" >> /tmp/deploy.env
|
||||||
|
echo "✅ deploy allowed (annotations/media change detected)"
|
||||||
|
else
|
||||||
|
echo "GO=0" >> /tmp/deploy.env
|
||||||
|
echo "ℹ️ no annotations/media change -> skip deploy"
|
||||||
|
fi
|
||||||
|
|
||||||
|
- name: Install docker client + docker compose plugin (v2)
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/deploy.env
|
||||||
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
apt-get -o Acquire::Retries=5 -o Acquire::ForceIPv4=true update
|
||||||
|
apt-get install -y --no-install-recommends ca-certificates curl docker.io
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
mkdir -p /usr/local/lib/docker/cli-plugins
|
||||||
|
curl -fsSL \
|
||||||
|
"https://github.com/docker/compose/releases/download/v${COMPOSE_VERSION}/docker-compose-linux-x86_64" \
|
||||||
|
-o /usr/local/lib/docker/cli-plugins/docker-compose
|
||||||
|
chmod +x /usr/local/lib/docker/cli-plugins/docker-compose
|
||||||
|
|
||||||
|
docker version
|
||||||
|
docker compose version
|
||||||
|
|
||||||
|
# 🔥 KEY FIX: reuse existing compose project name if containers already exist
|
||||||
|
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||||
|
if [[ -z "${PROJ:-}" ]]; then
|
||||||
|
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
|
||||||
|
fi
|
||||||
|
if [[ -z "${PROJ:-}" ]]; then PROJ="archicratie-web"; fi
|
||||||
|
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
|
||||||
|
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
|
||||||
|
|
||||||
|
- name: Assert required vars (PUBLIC_GITEA_*)
|
||||||
|
env:
|
||||||
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
|
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/deploy.env
|
||||||
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
|
||||||
|
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
|
||||||
|
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
|
||||||
|
echo "✅ vars OK"
|
||||||
|
|
||||||
|
- name: Assert deploy files exist
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/deploy.env
|
||||||
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
|
||||||
|
test -f docker-compose.yml
|
||||||
|
test -f Dockerfile
|
||||||
|
test -f nginx.conf
|
||||||
|
echo "✅ deploy files OK"
|
||||||
|
|
||||||
|
- name: Build + deploy staging (blue) then smoke
|
||||||
|
env:
|
||||||
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
|
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/deploy.env
|
||||||
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||||
|
|
||||||
|
TS="$(date -u +%Y%m%d-%H%M%S)"
|
||||||
|
echo "TS='$TS'" >> /tmp/deploy.env
|
||||||
|
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
|
||||||
|
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
|
||||||
|
|
||||||
|
# ✅ use cache (DO NOT --no-cache)
|
||||||
|
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
|
||||||
|
|
||||||
|
# ✅ hard fix: remove existing container if name conflicts
|
||||||
|
docker rm -f archicratie-web-blue || true
|
||||||
|
|
||||||
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
|
||||||
|
|
||||||
|
curl -fsS "http://127.0.0.1:8081/para-index.json" >/dev/null
|
||||||
|
curl -fsS "http://127.0.0.1:8081/annotations-index.json" >/dev/null
|
||||||
|
curl -fsS "http://127.0.0.1:8081/pagefind/pagefind.js" >/dev/null
|
||||||
|
|
||||||
|
CANON="$(curl -fsS "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||||
|
echo "canonical(blue)=$CANON"
|
||||||
|
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
|
||||||
|
echo "❌ staging canonical mismatch"; exit 3;
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "✅ staging OK"
|
||||||
|
|
||||||
|
- name: Build + deploy live (green) then smoke + rollback if needed
|
||||||
|
env:
|
||||||
|
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
|
||||||
|
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
|
||||||
|
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
|
||||||
|
run: |
|
||||||
|
set -euo pipefail
|
||||||
|
source /tmp/deploy.env
|
||||||
|
[[ "${GO:-0}" == "1" ]] || { echo "ℹ️ skipped"; exit 0; }
|
||||||
|
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
|
||||||
|
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
|
||||||
|
|
||||||
|
rollback() {
|
||||||
|
echo "⚠️ rollback green -> previous image tag (best effort)"
|
||||||
|
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
|
||||||
|
docker rm -f archicratie-web-green || true
|
||||||
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
|
||||||
|
}
|
||||||
|
|
||||||
|
set +e
|
||||||
|
docker compose -p "$PROJ" -f docker-compose.yml build web_green
|
||||||
|
|
||||||
|
docker rm -f archicratie-web-green || true
|
||||||
|
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
|
||||||
|
|
||||||
|
curl -fsS "http://127.0.0.1:8082/para-index.json" >/dev/null
|
||||||
|
curl -fsS "http://127.0.0.1:8082/annotations-index.json" >/dev/null
|
||||||
|
curl -fsS "http://127.0.0.1:8082/pagefind/pagefind.js" >/dev/null
|
||||||
|
|
||||||
|
CANON="$(curl -fsS "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
|
||||||
|
echo "canonical(green)=$CANON"
|
||||||
|
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
|
||||||
|
echo "❌ live canonical mismatch"; rollback; exit 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
echo "✅ live OK"
|
||||||
|
set -e
|
||||||
Binary file not shown.
|
After Width: | Height: | Size: 816 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 822 KiB |
Binary file not shown.
|
After Width: | Height: | Size: 822 KiB |
688
scripts/apply-annotation-ticket.mjs
Normal file
688
scripts/apply-annotation-ticket.mjs
Normal file
@@ -0,0 +1,688 @@
|
|||||||
|
#!/usr/bin/env node
|
||||||
|
// scripts/apply-annotation-ticket.mjs
|
||||||
|
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers src/annotations + public/media
|
||||||
|
// Robuste, idempotent, non destructif
|
||||||
|
//
|
||||||
|
// DRY RUN par défaut si --dry-run
|
||||||
|
// Options: --dry-run --no-download --verify --strict --commit --close
|
||||||
|
//
|
||||||
|
// Env requis:
|
||||||
|
// FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
|
||||||
|
// FORGE_TOKEN = PAT Gitea (repo + issues)
|
||||||
|
//
|
||||||
|
// Env optionnel:
|
||||||
|
// GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||||||
|
// ANNO_DIR (défaut: src/annotations)
|
||||||
|
// PUBLIC_DIR (défaut: public)
|
||||||
|
// MEDIA_ROOT (défaut URL: /media)
|
||||||
|
//
|
||||||
|
// Ticket attendu (body):
|
||||||
|
// Chemin: /archicrat-ia/chapitre-4/
|
||||||
|
// Ancre: #p-0-xxxxxxxx
|
||||||
|
// Type: type/media | type/reference | type/comment
|
||||||
|
//
|
||||||
|
// Exit codes:
|
||||||
|
// 0 ok
|
||||||
|
// 1 erreur fatale
|
||||||
|
// 2 refus (strict/verify/usage)
|
||||||
|
|
||||||
|
import fs from "node:fs/promises";
|
||||||
|
import path from "node:path";
|
||||||
|
import process from "node:process";
|
||||||
|
import { spawnSync } from "node:child_process";
|
||||||
|
import YAML from "yaml";
|
||||||
|
|
||||||
|
/* ---------------------------------- usage --------------------------------- */
|
||||||
|
|
||||||
|
function usage(exitCode = 0) {
|
||||||
|
console.log(`
|
||||||
|
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
|
||||||
|
|
||||||
|
Flags:
|
||||||
|
--dry-run : n'écrit rien (affiche un aperçu)
|
||||||
|
--no-download : n'essaie pas de télécharger les pièces jointes (media)
|
||||||
|
--verify : tente de vérifier que (page, ancre) existent (baseline/dist si dispo)
|
||||||
|
--strict : refuse si URL ref invalide (http/https) OU caption media vide
|
||||||
|
--commit : git add + git commit (le script commit dans la branche courante)
|
||||||
|
--close : ferme le ticket (nécessite --commit)
|
||||||
|
|
||||||
|
Env requis:
|
||||||
|
FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
|
||||||
|
FORGE_TOKEN = PAT Gitea (repo + issues)
|
||||||
|
|
||||||
|
Env optionnel:
|
||||||
|
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
|
||||||
|
ANNO_DIR (défaut: src/annotations)
|
||||||
|
PUBLIC_DIR (défaut: public)
|
||||||
|
MEDIA_ROOT (défaut URL: /media) -> écrit dans public/media/...
|
||||||
|
|
||||||
|
Exit codes:
|
||||||
|
0 ok
|
||||||
|
1 erreur fatale
|
||||||
|
2 refus (strict/verify/close sans commit / incohérence)
|
||||||
|
`);
|
||||||
|
process.exit(exitCode);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ---------------------------------- args ---------------------------------- */
|
||||||
|
|
||||||
|
const argv = process.argv.slice(2);
|
||||||
|
if (argv.length === 0 || argv.includes("--help") || argv.includes("-h")) usage(0);
|
||||||
|
|
||||||
|
const issueNum = Number(argv[0]);
|
||||||
|
if (!Number.isFinite(issueNum) || issueNum <= 0) {
|
||||||
|
console.error("❌ Numéro de ticket invalide.");
|
||||||
|
usage(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
const DRY_RUN = argv.includes("--dry-run");
|
||||||
|
const NO_DOWNLOAD = argv.includes("--no-download");
|
||||||
|
const DO_VERIFY = argv.includes("--verify");
|
||||||
|
const STRICT = argv.includes("--strict");
|
||||||
|
const DO_COMMIT = argv.includes("--commit");
|
||||||
|
const DO_CLOSE = argv.includes("--close");
|
||||||
|
|
||||||
|
if (DO_CLOSE && !DO_COMMIT) {
|
||||||
|
console.error("❌ --close nécessite --commit.");
|
||||||
|
process.exit(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof fetch !== "function") {
|
||||||
|
console.error("❌ fetch() indisponible. Utilise Node 18+.");
|
||||||
|
process.exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* --------------------------------- config --------------------------------- */
|
||||||
|
|
||||||
|
const CWD = process.cwd();
|
||||||
|
const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations");
|
||||||
|
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
|
||||||
|
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
|
||||||
|
|
||||||
|
function getEnv(name, fallback = "") {
|
||||||
|
return (process.env[name] ?? fallback).trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
function run(cmd, args, opts = {}) {
|
||||||
|
const r = spawnSync(cmd, args, { stdio: "inherit", ...opts });
|
||||||
|
if (r.error) throw r.error;
|
||||||
|
if (r.status !== 0) throw new Error(`Command failed: ${cmd} ${args.join(" ")}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
function runQuiet(cmd, args, opts = {}) {
|
||||||
|
const r = spawnSync(cmd, args, { encoding: "utf8", stdio: "pipe", ...opts });
|
||||||
|
if (r.error) throw r.error;
|
||||||
|
if (r.status !== 0) {
|
||||||
|
const out = (r.stdout || "") + (r.stderr || "");
|
||||||
|
throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${out}`);
|
||||||
|
}
|
||||||
|
return r.stdout || "";
|
||||||
|
}
|
||||||
|
|
||||||
|
async function exists(p) {
|
||||||
|
try { await fs.access(p); return true; } catch { return false; }
|
||||||
|
}
|
||||||
|
|
||||||
|
function inferOwnerRepoFromGit() {
|
||||||
|
const r = spawnSync("git", ["remote", "get-url", "origin"], { encoding: "utf-8" });
|
||||||
|
if (r.status !== 0) return null;
|
||||||
|
const u = (r.stdout || "").trim();
|
||||||
|
const m = u.match(/[:/](?<owner>[^/]+)\/(?<repo>[^/]+?)(?:\.git)?$/);
|
||||||
|
if (!m?.groups) return null;
|
||||||
|
return { owner: m.groups.owner, repo: m.groups.repo };
|
||||||
|
}
|
||||||
|
|
||||||
|
function gitHasStagedChanges() {
|
||||||
|
const r = spawnSync("git", ["diff", "--cached", "--quiet"]);
|
||||||
|
return r.status === 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------ gitea helpers ------------------------------ */
|
||||||
|
|
||||||
|
function apiBaseNorm(forgeApiBase) {
|
||||||
|
return forgeApiBase.replace(/\/+$/, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
async function giteaGET(url, token) {
|
||||||
|
const res = await fetch(url, {
|
||||||
|
headers: {
|
||||||
|
Authorization: `token ${token}`,
|
||||||
|
Accept: "application/json",
|
||||||
|
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||||
|
},
|
||||||
|
});
|
||||||
|
if (!res.ok) {
|
||||||
|
const t = await res.text().catch(() => "");
|
||||||
|
throw new Error(`HTTP ${res.status} GET ${url}\n${t}`);
|
||||||
|
}
|
||||||
|
return await res.json();
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
|
||||||
|
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||||||
|
return await giteaGET(url, token);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
|
||||||
|
// ✅ Gitea: /issues/{index}/assets
|
||||||
|
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
|
||||||
|
try {
|
||||||
|
const json = await giteaGET(url, token);
|
||||||
|
return Array.isArray(json) ? json : [];
|
||||||
|
} catch {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment }) {
|
||||||
|
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/comments`;
|
||||||
|
const res = await fetch(url, {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
Authorization: `token ${token}`,
|
||||||
|
Accept: "application/json",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ body: comment }),
|
||||||
|
});
|
||||||
|
if (!res.ok) {
|
||||||
|
const t = await res.text().catch(() => "");
|
||||||
|
throw new Error(`HTTP ${res.status} POST comment ${url}\n${t}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment }) {
|
||||||
|
if (comment) await postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment });
|
||||||
|
|
||||||
|
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
|
||||||
|
const res = await fetch(url, {
|
||||||
|
method: "PATCH",
|
||||||
|
headers: {
|
||||||
|
Authorization: `token ${token}`,
|
||||||
|
Accept: "application/json",
|
||||||
|
"Content-Type": "application/json",
|
||||||
|
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||||
|
},
|
||||||
|
body: JSON.stringify({ state: "closed" }),
|
||||||
|
});
|
||||||
|
if (!res.ok) {
|
||||||
|
const t = await res.text().catch(() => "");
|
||||||
|
throw new Error(`HTTP ${res.status} closing issue: ${url}\n${t}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------ parsing helpers ---------------------------- */
|
||||||
|
|
||||||
|
function escapeRegExp(s) {
|
||||||
|
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||||
|
}
|
||||||
|
|
||||||
|
function pickLine(body, key) {
|
||||||
|
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
|
||||||
|
const m = String(body || "").match(re);
|
||||||
|
return m ? m[1].trim() : "";
|
||||||
|
}
|
||||||
|
|
||||||
|
function pickSection(body, markers) {
|
||||||
|
const text = String(body || "").replace(/\r\n/g, "\n");
|
||||||
|
const idx = markers
|
||||||
|
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
|
||||||
|
.filter((x) => x.i >= 0)
|
||||||
|
.sort((a, b) => a.i - b.i)[0];
|
||||||
|
if (!idx) return "";
|
||||||
|
|
||||||
|
const start = idx.i + idx.m.length;
|
||||||
|
const tail = text.slice(start);
|
||||||
|
|
||||||
|
const stops = [
|
||||||
|
"\n## ",
|
||||||
|
"\n---",
|
||||||
|
"\nJustification",
|
||||||
|
"\nProposition",
|
||||||
|
"\nSources",
|
||||||
|
];
|
||||||
|
let end = tail.length;
|
||||||
|
for (const s of stops) {
|
||||||
|
const j = tail.toLowerCase().indexOf(s.toLowerCase());
|
||||||
|
if (j >= 0 && j < end) end = j;
|
||||||
|
}
|
||||||
|
return tail.slice(0, end).trim();
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeChemin(chemin) {
|
||||||
|
let c = String(chemin || "").trim();
|
||||||
|
if (!c) return "";
|
||||||
|
if (!c.startsWith("/")) c = "/" + c;
|
||||||
|
if (!c.endsWith("/")) c = c + "/";
|
||||||
|
c = c.replace(/\/{2,}/g, "/");
|
||||||
|
return c;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizePageKeyFromChemin(chemin) {
|
||||||
|
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeAnchorId(s) {
|
||||||
|
let a = String(s || "").trim();
|
||||||
|
if (a.startsWith("#")) a = a.slice(1);
|
||||||
|
return a;
|
||||||
|
}
|
||||||
|
|
||||||
|
function assert(cond, msg, code = 1) {
|
||||||
|
if (!cond) {
|
||||||
|
const e = new Error(msg);
|
||||||
|
e.__exitCode = code;
|
||||||
|
throw e;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function isPlainObject(x) {
|
||||||
|
return !!x && typeof x === "object" && !Array.isArray(x);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------- verify helpers ------------------------------ */
|
||||||
|
|
||||||
|
function paraIndexFromId(id) {
|
||||||
|
const m = String(id).match(/^p-(\d+)-/i);
|
||||||
|
return m ? Number(m[1]) : Number.NaN;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function tryVerifyAnchor(pageKey, anchorId) {
|
||||||
|
// 1) dist/para-index.json (si build déjà faite)
|
||||||
|
const distIdx = path.join(CWD, "dist", "para-index.json");
|
||||||
|
if (await exists(distIdx)) {
|
||||||
|
const raw = await fs.readFile(distIdx, "utf8");
|
||||||
|
const idx = JSON.parse(raw);
|
||||||
|
const byId = idx?.byId;
|
||||||
|
if (byId && typeof byId === "object" && byId[anchorId] != null) return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 2) tests/anchors-baseline.json (si dispo)
|
||||||
|
const base = path.join(CWD, "tests", "anchors-baseline.json");
|
||||||
|
if (await exists(base)) {
|
||||||
|
const raw = await fs.readFile(base, "utf8");
|
||||||
|
const j = JSON.parse(raw);
|
||||||
|
|
||||||
|
// tolérant: cherche un array d'ids associé à la page
|
||||||
|
const candidates = [];
|
||||||
|
|
||||||
|
// cas 1: j.pages[...]
|
||||||
|
if (j?.pages && typeof j.pages === "object") {
|
||||||
|
for (const [k, v] of Object.entries(j.pages)) {
|
||||||
|
if (!Array.isArray(v)) continue;
|
||||||
|
// on matche large: pageKey inclus dans le path
|
||||||
|
if (String(k).includes(pageKey)) candidates.push(...v);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// cas 2: j.entries = [{page, ids}]
|
||||||
|
if (Array.isArray(j?.entries)) {
|
||||||
|
for (const it of j.entries) {
|
||||||
|
const p = String(it?.page || "");
|
||||||
|
const ids = it?.ids;
|
||||||
|
if (Array.isArray(ids) && p.includes(pageKey)) candidates.push(...ids);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (candidates.length) {
|
||||||
|
return candidates.some((x) => String(x) === anchorId);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// impossible à vérifier
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------- annotations I/O ----------------------------- */
|
||||||
|
|
||||||
|
async function loadAnnoDoc(fileAbs, pageKey) {
|
||||||
|
if (!(await exists(fileAbs))) {
|
||||||
|
return { schema: 1, page: pageKey, paras: {} };
|
||||||
|
}
|
||||||
|
|
||||||
|
const raw = await fs.readFile(fileAbs, "utf8");
|
||||||
|
let doc;
|
||||||
|
try {
|
||||||
|
doc = YAML.parse(raw);
|
||||||
|
} catch (e) {
|
||||||
|
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`);
|
||||||
|
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`);
|
||||||
|
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`);
|
||||||
|
|
||||||
|
if (doc.page != null) {
|
||||||
|
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
|
||||||
|
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`);
|
||||||
|
} else {
|
||||||
|
doc.page = pageKey;
|
||||||
|
}
|
||||||
|
|
||||||
|
return doc;
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortParasObject(paras) {
|
||||||
|
const keys = Object.keys(paras || {});
|
||||||
|
keys.sort((a, b) => {
|
||||||
|
const ia = paraIndexFromId(a);
|
||||||
|
const ib = paraIndexFromId(b);
|
||||||
|
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
|
||||||
|
return String(a).localeCompare(String(b));
|
||||||
|
});
|
||||||
|
const out = {};
|
||||||
|
for (const k of keys) out[k] = paras[k];
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function saveAnnoDocYaml(fileAbs, doc) {
|
||||||
|
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
|
||||||
|
doc.paras = sortParasObject(doc.paras);
|
||||||
|
const out = YAML.stringify(doc);
|
||||||
|
await fs.writeFile(fileAbs, out, "utf8");
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ------------------------------ apply per type ----------------------------- */
|
||||||
|
|
||||||
|
function ensureEntry(doc, paraId) {
|
||||||
|
if (!doc.paras[paraId] || !isPlainObject(doc.paras[paraId])) doc.paras[paraId] = {};
|
||||||
|
return doc.paras[paraId];
|
||||||
|
}
|
||||||
|
|
||||||
|
function uniqPush(arr, item, keyFn) {
|
||||||
|
const k = keyFn(item);
|
||||||
|
const exists = arr.some((x) => keyFn(x) === k);
|
||||||
|
if (!exists) arr.push(item);
|
||||||
|
return !exists;
|
||||||
|
}
|
||||||
|
|
||||||
|
function stableSortByTs(arr) {
|
||||||
|
if (!Array.isArray(arr)) return;
|
||||||
|
arr.sort((a, b) => {
|
||||||
|
const ta = Date.parse(a?.ts || "") || 0;
|
||||||
|
const tb = Date.parse(b?.ts || "") || 0;
|
||||||
|
if (ta !== tb) return ta - tb;
|
||||||
|
return JSON.stringify(a).localeCompare(JSON.stringify(b));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseReferenceBlock(body) {
|
||||||
|
const block =
|
||||||
|
pickSection(body, ["Référence (à compléter):", "Reference (à compléter):"]) ||
|
||||||
|
pickSection(body, ["Référence:", "Reference:"]);
|
||||||
|
|
||||||
|
const lines = String(block || "").split(/\r?\n/).map((l) => l.trim());
|
||||||
|
const get = (k) => {
|
||||||
|
const re = new RegExp(`^[-*]\\s*${escapeRegExp(k)}\\s*:\\s*(.*)$`, "i");
|
||||||
|
const m = lines.map((l) => l.match(re)).find(Boolean);
|
||||||
|
return (m?.[1] ?? "").trim();
|
||||||
|
};
|
||||||
|
|
||||||
|
return {
|
||||||
|
url: get("URL") || "",
|
||||||
|
label: get("Label") || "",
|
||||||
|
kind: get("Kind") || "",
|
||||||
|
citation: get("Citation") || get("Passage") || get("Extrait") || "",
|
||||||
|
rawBlock: block || "",
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function inferMediaTypeFromFilename(name) {
|
||||||
|
const n = String(name || "").toLowerCase();
|
||||||
|
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
|
||||||
|
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
|
||||||
|
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
|
||||||
|
return "link";
|
||||||
|
}
|
||||||
|
|
||||||
|
function sanitizeFilename(name) {
|
||||||
|
return String(name || "file")
|
||||||
|
.replace(/[\/\\]/g, "_")
|
||||||
|
.replace(/[^\w.\-]+/g, "_")
|
||||||
|
.replace(/_+/g, "_")
|
||||||
|
.slice(0, 180);
|
||||||
|
}
|
||||||
|
|
||||||
|
function isHttpUrl(u) {
|
||||||
|
try {
|
||||||
|
const x = new URL(String(u));
|
||||||
|
return x.protocol === "http:" || x.protocol === "https:";
|
||||||
|
} catch {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function downloadToFile(url, token, destAbs) {
|
||||||
|
const res = await fetch(url, {
|
||||||
|
headers: {
|
||||||
|
// la plupart des /attachments sont publics, mais on garde le token “au cas où”
|
||||||
|
Authorization: `token ${token}`,
|
||||||
|
"User-Agent": "archicratie-apply-annotation/1.0",
|
||||||
|
},
|
||||||
|
redirect: "follow",
|
||||||
|
});
|
||||||
|
if (!res.ok) {
|
||||||
|
const t = await res.text().catch(() => "");
|
||||||
|
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
|
||||||
|
}
|
||||||
|
const buf = Buffer.from(await res.arrayBuffer());
|
||||||
|
await fs.mkdir(path.dirname(destAbs), { recursive: true });
|
||||||
|
await fs.writeFile(destAbs, buf);
|
||||||
|
return buf.length;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* ----------------------------------- main ---------------------------------- */
|
||||||
|
|
||||||
|
async function main() {
|
||||||
|
const token = getEnv("FORGE_TOKEN");
|
||||||
|
assert(token, "❌ FORGE_TOKEN manquant.", 2);
|
||||||
|
|
||||||
|
const forgeApiBase = getEnv("FORGE_API") || getEnv("FORGE_BASE");
|
||||||
|
assert(forgeApiBase, "❌ FORGE_API (ou FORGE_BASE) manquant.", 2);
|
||||||
|
|
||||||
|
const inferred = inferOwnerRepoFromGit() || {};
|
||||||
|
const owner = getEnv("GITEA_OWNER", inferred.owner || "");
|
||||||
|
const repo = getEnv("GITEA_REPO", inferred.repo || "");
|
||||||
|
assert(owner && repo, "❌ Impossible de déterminer owner/repo. Fix: export GITEA_OWNER=... GITEA_REPO=...", 2);
|
||||||
|
|
||||||
|
console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo} …`);
|
||||||
|
const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum });
|
||||||
|
|
||||||
|
if (issue?.pull_request) {
|
||||||
|
console.error(`❌ #${issueNum} est une Pull Request, pas un ticket annotations.`);
|
||||||
|
process.exit(2);
|
||||||
|
}
|
||||||
|
|
||||||
|
const body = String(issue.body || "").replace(/\r\n/g, "\n");
|
||||||
|
const title = String(issue.title || "");
|
||||||
|
|
||||||
|
const type = pickLine(body, "Type").toLowerCase();
|
||||||
|
const chemin = normalizeChemin(pickLine(body, "Chemin"));
|
||||||
|
const ancre = normalizeAnchorId(pickLine(body, "Ancre"));
|
||||||
|
|
||||||
|
assert(chemin, "Ticket: Chemin manquant.", 2);
|
||||||
|
assert(ancre && /^p-\d+-/i.test(ancre), `Ticket: Ancre invalide ("${ancre}")`, 2);
|
||||||
|
assert(type, "Ticket: Type manquant.", 2);
|
||||||
|
|
||||||
|
const pageKey = normalizePageKeyFromChemin(chemin);
|
||||||
|
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
|
||||||
|
|
||||||
|
if (DO_VERIFY) {
|
||||||
|
const ok = await tryVerifyAnchor(pageKey, ancre);
|
||||||
|
if (ok === false) {
|
||||||
|
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
|
||||||
|
}
|
||||||
|
if (ok === null) {
|
||||||
|
// pas de source de vérité dispo
|
||||||
|
if (STRICT) throw Object.assign(new Error(`Ticket verify (strict): impossible de vérifier (pas de baseline/dist)`), { __exitCode: 2 });
|
||||||
|
console.warn("⚠️ verify: impossible de vérifier (pas de baseline/dist) — on continue.");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const annoFileAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
|
||||||
|
const annoFileRel = path.relative(CWD, annoFileAbs).replace(/\\/g, "/");
|
||||||
|
|
||||||
|
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: annoFileRel });
|
||||||
|
|
||||||
|
const doc = await loadAnnoDoc(annoFileAbs, pageKey);
|
||||||
|
const entry = ensureEntry(doc, ancre);
|
||||||
|
|
||||||
|
const touchedFiles = [];
|
||||||
|
const notes = [];
|
||||||
|
|
||||||
|
let changed = false;
|
||||||
|
const nowIso = new Date().toISOString();
|
||||||
|
|
||||||
|
if (type === "type/comment") {
|
||||||
|
const comment = pickSection(body, ["Commentaire:", "Comment:", "Commentaires:"]) || "";
|
||||||
|
const text = comment.trim();
|
||||||
|
assert(text.length >= 3, "Ticket comment: bloc 'Commentaire:' introuvable ou trop court.", 2);
|
||||||
|
|
||||||
|
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
|
||||||
|
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
|
||||||
|
|
||||||
|
const added = uniqPush(entry.comments_editorial, item, (x) => `${(x?.text || "").trim()}`);
|
||||||
|
if (added) { changed = true; notes.push(`+ comment added (len=${text.length})`); }
|
||||||
|
else notes.push(`~ comment already present (dedup)`);
|
||||||
|
|
||||||
|
stableSortByTs(entry.comments_editorial);
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (type === "type/reference") {
|
||||||
|
const ref = parseReferenceBlock(body);
|
||||||
|
|
||||||
|
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
|
||||||
|
|
||||||
|
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
|
||||||
|
throw Object.assign(new Error(`Ticket reference (strict): URL invalide (http/https requis): "${ref.url}"`), { __exitCode: 2 });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!Array.isArray(entry.refs)) entry.refs = [];
|
||||||
|
const item = {
|
||||||
|
url: ref.url || "",
|
||||||
|
label: ref.label || (ref.url ? ref.url : "Référence"),
|
||||||
|
kind: ref.kind || "",
|
||||||
|
ts: nowIso,
|
||||||
|
fromIssue: issueNum,
|
||||||
|
};
|
||||||
|
if (ref.citation) item.citation = ref.citation;
|
||||||
|
|
||||||
|
const added = uniqPush(entry.refs, item, (x) => `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`);
|
||||||
|
if (added) { changed = true; notes.push(`+ reference added (${item.url ? "url" : "label"})`); }
|
||||||
|
else notes.push(`~ reference already present (dedup)`);
|
||||||
|
|
||||||
|
stableSortByTs(entry.refs);
|
||||||
|
}
|
||||||
|
|
||||||
|
else if (type === "type/media") {
|
||||||
|
if (!Array.isArray(entry.media)) entry.media = [];
|
||||||
|
|
||||||
|
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
|
||||||
|
|
||||||
|
if (!atts.length) {
|
||||||
|
notes.push("! no assets found (nothing to download).");
|
||||||
|
}
|
||||||
|
|
||||||
|
for (const a of atts) {
|
||||||
|
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
|
||||||
|
const dl = a?.browser_download_url || a?.download_url || "";
|
||||||
|
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
|
||||||
|
|
||||||
|
// caption = title du ticket (fallback ".")
|
||||||
|
const caption = (title || "").trim() || ".";
|
||||||
|
if (STRICT && !caption.trim()) {
|
||||||
|
throw Object.assign(new Error("Ticket media (strict): caption vide."), { __exitCode: 2 });
|
||||||
|
}
|
||||||
|
|
||||||
|
const mediaDirAbs = path.join(PUBLIC_DIR, "media", pageKey, ancre);
|
||||||
|
const destAbs = path.join(mediaDirAbs, name);
|
||||||
|
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
|
||||||
|
|
||||||
|
if (await exists(destAbs)) {
|
||||||
|
notes.push(`~ media already exists: ${urlPath}`);
|
||||||
|
} else if (!DRY_RUN) {
|
||||||
|
const bytes = await downloadToFile(dl, token, destAbs);
|
||||||
|
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
|
||||||
|
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
|
||||||
|
} else {
|
||||||
|
notes.push(`(dry) would download ${name} -> ${urlPath}`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const item = {
|
||||||
|
type: inferMediaTypeFromFilename(name),
|
||||||
|
src: urlPath,
|
||||||
|
caption,
|
||||||
|
credit: "",
|
||||||
|
ts: nowIso,
|
||||||
|
fromIssue: issueNum,
|
||||||
|
};
|
||||||
|
|
||||||
|
const added = uniqPush(entry.media, item, (x) => String(x?.src || ""));
|
||||||
|
if (added) changed = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
stableSortByTs(entry.media);
|
||||||
|
}
|
||||||
|
|
||||||
|
else {
|
||||||
|
throw Object.assign(new Error(`Type non supporté: "${type}"`), { __exitCode: 2 });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!changed) {
|
||||||
|
console.log("ℹ️ No changes to apply.");
|
||||||
|
for (const n of notes) console.log(" ", n);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (DRY_RUN) {
|
||||||
|
console.log("\n--- DRY RUN (no write) ---");
|
||||||
|
console.log(`Would update: ${annoFileRel}`);
|
||||||
|
for (const n of notes) console.log(" ", n);
|
||||||
|
console.log("\nExcerpt (resulting entry):");
|
||||||
|
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
|
||||||
|
console.log("\n✅ Dry-run terminé.");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
await saveAnnoDocYaml(annoFileAbs, doc);
|
||||||
|
touchedFiles.unshift(annoFileRel);
|
||||||
|
|
||||||
|
console.log(`✅ Updated: ${annoFileRel}`);
|
||||||
|
for (const n of notes) console.log(" ", n);
|
||||||
|
|
||||||
|
if (DO_COMMIT) {
|
||||||
|
run("git", ["add", ...touchedFiles], { cwd: CWD });
|
||||||
|
|
||||||
|
if (!gitHasStagedChanges()) {
|
||||||
|
console.log("ℹ️ Nothing to commit (aucun changement staged).");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const msg = `anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})`;
|
||||||
|
run("git", ["commit", "-m", msg], { cwd: CWD });
|
||||||
|
|
||||||
|
const sha = runQuiet("git", ["rev-parse", "--short", "HEAD"], { cwd: CWD }).trim();
|
||||||
|
console.log(`✅ Committed: ${msg} (${sha})`);
|
||||||
|
|
||||||
|
if (DO_CLOSE) {
|
||||||
|
const comment = `✅ Appliqué par apply-annotation-ticket.\nCommit: ${sha}`;
|
||||||
|
await closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment });
|
||||||
|
console.log(`✅ Ticket #${issueNum} fermé.`);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log("\nNext (manuel) :");
|
||||||
|
console.log(` git diff -- ${touchedFiles[0]}`);
|
||||||
|
console.log(` git add ${touchedFiles.join(" ")}`);
|
||||||
|
console.log(` git commit -m "anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})"`);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
main().catch((e) => {
|
||||||
|
const code = e?.__exitCode || 1;
|
||||||
|
console.error("💥", e?.message || e);
|
||||||
|
process.exit(code);
|
||||||
|
});
|
||||||
30
src/annotations/archicrat-ia/chapitre-4.yml
Normal file
30
src/annotations/archicrat-ia/chapitre-4.yml
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
schema: 1
|
||||||
|
page: archicrat-ia/chapitre-4
|
||||||
|
paras:
|
||||||
|
p-2-31b12529:
|
||||||
|
media:
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-4/p-2-31b12529/Capture_d_e_cran_2026-02-16_a_13.05.58.png
|
||||||
|
caption: "[Media] p-2-31b12529 — Chapitre 4 — Histoire archicratique des
|
||||||
|
révolutions industrielles"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-25T18:58:32.359Z
|
||||||
|
fromIssue: 115
|
||||||
|
p-7-1da4a458:
|
||||||
|
media:
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-4/p-7-1da4a458/Capture_d_e_cran_2026-02-16_a_13.05.58.png
|
||||||
|
caption: "[Media] p-7-1da4a458 — Chapitre 4 — Histoire archicratique des
|
||||||
|
révolutions industrielles"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-25T19:11:32.634Z
|
||||||
|
fromIssue: 121
|
||||||
|
p-11-67c14c09:
|
||||||
|
media:
|
||||||
|
- type: image
|
||||||
|
src: /media/archicrat-ia/chapitre-4/p-11-67c14c09/Capture_d_e_cran_2026-02-16_a_13.07.35.png
|
||||||
|
caption: "[Media] p-11-67c14c09 — Chapitre 4 — Histoire archicratique des
|
||||||
|
révolutions industrielles"
|
||||||
|
credit: ""
|
||||||
|
ts: 2026-02-26T13:17:41.286Z
|
||||||
|
fromIssue: 129
|
||||||
Reference in New Issue
Block a user