Compare commits

...

314 Commits

Author SHA1 Message Date
8605b7198f Merge pull request 'reset(archicrat-ia): resynchronise les sources DOCX/MDX canoniques' (#322) from chore/archicrat-ia-canonical-docx-mdx-resync-20260423 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 19s
CI / build-and-anchors (push) Successful in 36s
SMOKE / smoke (push) Successful in 8s
Deploy staging+live (annotations) / deploy (push) Successful in 8m5s
Reviewed-on: #322
2026-04-23 11:53:07 +00:00
d41aed040f reset(archicrat-ia): resynchronise les sources DOCX/MDX canoniques
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 33s
CI / build-and-anchors (pull_request) Successful in 37s
2026-04-23 13:51:27 +02:00
bf01a83268 Merge pull request 'Synchronise les contenus glossaire et ajoute les scripts de conversion DOCX/MDX' (#321) from chore/reinject-docx-mdx-and-glossaire-sync-20260423 into main
All checks were successful
CI / build-and-anchors (push) Successful in 35s
Proposer Apply (Queue) / apply-proposer (push) Successful in 28s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 9m56s
Reviewed-on: #321
2026-04-23 10:25:49 +00:00
5b427d5602 Synchronise les contenus glossaire et ajoute les scripts de conversion DOCX/MDX
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 37s
CI / build-and-anchors (pull_request) Successful in 34s
2026-04-23 12:04:31 +02:00
fa46971e76 Merge pull request 'reset(archicrat-ia): canonical docx import and anchors baseline' (#320) from reset/archicrat-ia-canonical-import-anchors into main
All checks were successful
CI / build-and-anchors (push) Successful in 54s
SMOKE / smoke (push) Successful in 12s
Deploy staging+live (annotations) / deploy (push) Successful in 8m25s
Proposer Apply (Queue) / apply-proposer (push) Successful in 8s
Reviewed-on: #320
2026-04-19 17:50:59 +02:00
c313587b26 reset(archicrat-ia): canonical docx import and anchors baseline
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 54s
CI / build-and-anchors (pull_request) Successful in 54s
2026-04-19 16:25:42 +02:00
4976ddcc16 Merge pull request 'fix/ch4-docx-sync-20260330' (#319) from fix/ch4-docx-sync-20260330 into main
All checks were successful
CI / build-and-anchors (push) Successful in 50s
Proposer Apply (Queue) / apply-proposer (push) Successful in 44s
SMOKE / smoke (push) Successful in 22s
Deploy staging+live (annotations) / deploy (push) Successful in 12m42s
Reviewed-on: #319
2026-03-30 16:08:47 +02:00
17e11f0322 fix(archicrat-ia): remove duplicated chapitre 4 heading
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 47s
CI / build-and-anchors (pull_request) Successful in 48s
2026-03-30 16:06:22 +02:00
7df18adfa8 fix(archicrat-ia): sync chapitre 4 from official docx and accept anchor reset
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 58s
2026-03-30 15:57:43 +02:00
535c5108e2 Merge pull request 'fix(archicrat-ia): drop duplicate chapter 3 heading' (#318) from fix/ch3-docx-sync-20260329 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 28s
CI / build-and-anchors (push) Successful in 47s
SMOKE / smoke (push) Successful in 12s
Deploy staging+live (annotations) / deploy (push) Successful in 11m38s
Reviewed-on: #318
2026-03-30 10:30:19 +02:00
20705f6c90 fix(archicrat-ia): drop duplicate chapter 3 heading
All checks were successful
SMOKE / smoke (push) Successful in 2s
CI / build-and-anchors (push) Successful in 47s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-30 10:26:22 +02:00
eabd2f5f29 Merge pull request 'fix(archicrat-ia): sync chapitre 3 from official docx and accept anchor reset' (#317) from fix/ch3-docx-sync-20260329 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 23s
CI / build-and-anchors (push) Successful in 47s
SMOKE / smoke (push) Successful in 9s
Deploy staging+live (annotations) / deploy (push) Successful in 9m38s
Reviewed-on: #317
2026-03-29 21:57:44 +02:00
482151c31c fix(archicrat-ia): sync chapitre 3 from official docx and accept anchor reset
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 46s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-29 21:51:25 +02:00
6d9d5a460e Merge pull request 'fix(archicrat-ia): restore chapitre 1 doctrinal opening' (#315) from fix/ch1-restore-opening-20260329-144234 into main
All checks were successful
CI / build-and-anchors (push) Successful in 46s
Proposer Apply (Queue) / apply-proposer (push) Successful in 27s
SMOKE / smoke (push) Successful in 12s
Deploy staging+live (annotations) / deploy (push) Successful in 10m31s
Reviewed-on: #315
2026-03-29 14:53:31 +02:00
89d06ade16 fix(archicrat-ia): restore chapitre 1 doctrinal opening
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 1m25s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-29 14:46:51 +02:00
69b35df10c Merge pull request 'fix(archicrat-ia): restore advanced chapitre 1/2 sources and accept chapter 2 anchor reset' (#314) from fix/restore-ch1-ch2-sources-and-churn-20260329 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 43s
Deploy staging+live (annotations) / deploy (push) Successful in 12m16s
SMOKE / smoke (push) Successful in 10s
CI / build-and-anchors (push) Successful in 58s
Reviewed-on: #314
2026-03-29 11:31:33 +02:00
b5475e9be1 fix(archicrat-ia): restore advanced chapitre 1/2 sources and accept chapter 2 anchor reset
All checks were successful
SMOKE / smoke (push) Successful in 15s
CI / build-and-anchors (push) Successful in 49s
CI / build-and-anchors (pull_request) Successful in 1m4s
2026-03-29 11:28:19 +02:00
fdd3aace5a Merge pull request 'chore(tooling): add docx source audit and repair helpers' (#313) from chore/docx-tooling-source-audit-and-fix-v3-20260328 into main
All checks were successful
CI / build-and-anchors (push) Successful in 47s
Proposer Apply (Queue) / apply-proposer (push) Successful in 28s
SMOKE / smoke (push) Successful in 6s
Deploy staging+live (annotations) / deploy (push) Successful in 16m22s
Reviewed-on: #313
2026-03-28 23:55:55 +01:00
f86704d67e chore(tooling): add docx source audit and repair helpers
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 48s
CI / build-and-anchors (pull_request) Successful in 47s
2026-03-28 23:34:42 +01:00
ec8e29a313 Merge pull request 'content(archicrat-ia): drop duplicate chapter H1 in chapitres 1 and 2' (#311) from fix/archicrat-ia-drop-duplicate-h1-ch1-ch2-20260328 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 25s
Deploy staging+live (annotations) / deploy (push) Successful in 8m10s
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 51s
Reviewed-on: #311
2026-03-28 23:21:55 +01:00
1dc9a60580 content(archicrat-ia): drop duplicate chapter H1 in chapitres 1 and 2
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 48s
CI / build-and-anchors (pull_request) Successful in 48s
2026-03-28 23:20:01 +01:00
ee18b26d03 Merge pull request 'content(archicrat-ia): refresh official chapitre 2 docx' (#309) from chore/chapitre-2-docx-refresh-20260328 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 22s
Deploy staging+live (annotations) / deploy (push) Successful in 9m29s
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 46s
CI / build-and-anchors (pull_request) Successful in 43s
Reviewed-on: #309
2026-03-28 22:46:24 +01:00
5f4a0f74db content(archicrat-ia): refresh official chapitre 2 docx
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 46s
CI / build-and-anchors (pull_request) Successful in 49s
2026-03-28 22:42:26 +01:00
6b17df7320 Merge pull request 'content(archicrat-ia): refresh official chapitre 1 docx and anchors baseline' (#308) from chore/chapitre-1-docx-refresh-20260328 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 18s
CI / build-and-anchors (push) Successful in 44s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 9m26s
Reviewed-on: #308
2026-03-28 19:14:50 +01:00
0c33495342 content(archicrat-ia): refresh official chapitre 1 docx and anchors baseline
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 46s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-28 19:12:21 +01:00
d8a09b1def Merge pull request 'chore/prologue-docx-corrections-20260328' (#307) from chore/prologue-docx-corrections-20260328 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 27s
CI / build-and-anchors (push) Successful in 49s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 9m15s
Reviewed-on: #307
2026-03-28 14:49:48 +01:00
39af501ea0 test(anchors): refresh prologue baseline after intentional text cleanup
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 51s
CI / build-and-anchors (pull_request) Successful in 47s
2026-03-28 14:44:16 +01:00
4c821d9e83 content(archicrat-ia): refresh official prologue docx corrections 2026-03-28 14:44:16 +01:00
deb4a91348 Merge pull request 'chore/prologue-reimport-final-20260327' (#306) from chore/prologue-reimport-final-20260327 into main
All checks were successful
CI / build-and-anchors (push) Successful in 46s
Proposer Apply (Queue) / apply-proposer (push) Successful in 31s
SMOKE / smoke (push) Successful in 12s
Deploy staging+live (annotations) / deploy (push) Successful in 9m18s
Reviewed-on: #306
2026-03-27 23:20:58 +01:00
5b36b8e54e test(anchors): refresh baseline after prologue reimport
All checks were successful
SMOKE / smoke (push) Successful in 14s
CI / build-and-anchors (push) Successful in 50s
CI / build-and-anchors (pull_request) Successful in 54s
2026-03-27 23:11:01 +01:00
eda5a877ef content(archicrat-ia): reimport official prologue and align importer defaults 2026-03-27 23:11:01 +01:00
5b615a6999 Merge pull request 'fix(glossaire): align reading follow top actions with glossary navigation' (#305) from fix/glossaire-reading-follow-relations-h2 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 20s
CI / build-and-anchors (push) Successful in 42s
SMOKE / smoke (push) Successful in 9s
Deploy staging+live (annotations) / deploy (push) Successful in 8m16s
Reviewed-on: #305
2026-03-26 22:33:10 +01:00
99cf0947da fix(glossaire): align reading follow top actions with glossary navigation
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 47s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-26 22:30:05 +01:00
dbd1e14e4e Merge pull request 'fix(glossaire): include standalone relation headings in reading follow' (#304) from fix/glossaire-reading-follow-relations-h2 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 21s
CI / build-and-anchors (push) Successful in 42s
SMOKE / smoke (push) Successful in 9s
Deploy staging+live (annotations) / deploy (push) Successful in 7m55s
Reviewed-on: #304
2026-03-26 21:35:00 +01:00
7033354011 fix(glossaire): include standalone relation headings in reading follow
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-26 21:29:26 +01:00
7345730e3c Merge pull request 'fix(glossaire): expose relations heading to reading follow' (#303) from fix/glossaire-relations-follow-heading into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 24s
CI / build-and-anchors (push) Successful in 50s
SMOKE / smoke (push) Successful in 8s
Deploy staging+live (annotations) / deploy (push) Successful in 9m19s
Reviewed-on: #303
2026-03-26 20:47:02 +01:00
cea94c56db fix(glossaire): expose relations heading to reading follow
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-26 20:42:01 +01:00
c1e24736e3 Merge pull request 'feat(glossaire): deduplicate entry aside relation groups' (#302) from feat/glossaire-entry-aside-dedup into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 21s
CI / build-and-anchors (push) Successful in 49s
SMOKE / smoke (push) Successful in 4s
Deploy staging+live (annotations) / deploy (push) Successful in 9m41s
Reviewed-on: #302
2026-03-26 20:27:31 +01:00
24bbfbc17f feat(glossaire): deduplicate entry aside relation groups
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 47s
CI / build-and-anchors (pull_request) Successful in 46s
2026-03-26 20:24:49 +01:00
a11e2f1d18 Merge pull request 'fix(glossaire): compact sticky entry hero on glossary pages' (#301) from fix/glossaire-entry-sticky-hero-collapse into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 32s
CI / build-and-anchors (push) Successful in 45s
SMOKE / smoke (push) Successful in 8s
Deploy staging+live (annotations) / deploy (push) Successful in 9m18s
Reviewed-on: #301
2026-03-26 18:32:19 +01:00
630b146d02 fix(glossaire): compact sticky entry hero on glossary pages
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 50s
CI / build-and-anchors (pull_request) Successful in 46s
2026-03-26 18:30:34 +01:00
551360db83 Merge pull request 'fix(ci): use local pagefind binary instead of npx wrapper' (#300) from fix/ci-pagefind-local-bin into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 26s
CI / build-and-anchors (push) Successful in 44s
SMOKE / smoke (push) Successful in 8s
Deploy staging+live (annotations) / deploy (push) Successful in 9m45s
Reviewed-on: #300
2026-03-26 14:45:29 +01:00
a96c282780 fix(ci): use local pagefind binary instead of npx wrapper
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 46s
2026-03-26 14:41:02 +01:00
d2e0f147c2 Merge pull request 'audit(glossaire): tighten portal exposure and cross-page coherence' (#299) from audit/glossaire-transverse-coherence-fix into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 33s
SMOKE / smoke (push) Successful in 10s
Deploy staging+live (annotations) / deploy (push) Successful in 10m22s
CI / build-and-anchors (push) Successful in 45s
Reviewed-on: #299
2026-03-26 14:20:37 +01:00
ad95364021 audit(glossaire): tighten portal exposure and cross-page coherence
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 1m11s
CI / build-and-anchors (pull_request) Successful in 1m17s
2026-03-26 14:16:05 +01:00
e48e322363 Merge pull request 'feat(glossaire): harmonize portal pages and sticky reading ux' (#298) from feat/glossaire-portal-polish-final into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 27s
Deploy staging+live (annotations) / deploy (push) Successful in 10m51s
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 43s
Reviewed-on: #298
2026-03-26 13:01:46 +01:00
a9f2a5bbd4 feat(glossaire): harmonize portal pages and sticky reading ux
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 54s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-26 12:58:17 +01:00
0cba8f868e Merge pull request 'feat(glossaire): harmonize portal pages with shared components' (#297) from feat/glossaire-portals-harmonization into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 28s
CI / build-and-anchors (push) Successful in 43s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 10m36s
Reviewed-on: #297
2026-03-25 23:52:36 +01:00
f8e3ee4cca feat(glossaire): harmonize portal pages with shared components
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 47s
CI / build-and-anchors (pull_request) Successful in 46s
2026-03-25 23:49:00 +01:00
92e0ad01c6 Merge pull request 'refactor(glossaire): componentize glossary entry page' (#296) from refactor/glossaire-entry-componentization into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 22s
CI / build-and-anchors (push) Successful in 48s
SMOKE / smoke (push) Successful in 6s
Deploy staging+live (annotations) / deploy (push) Successful in 9m57s
Reviewed-on: #296
2026-03-25 19:28:10 +01:00
e6c18d6b16 refactor(glossaire): componentize glossary entry page
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-25 19:18:58 +01:00
a3092f5d5b Merge pull request 'refactor(glossaire): componentize glossary home sections' (#295) from refactor/glossaire-home-componentization into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 32s
Deploy staging+live (annotations) / deploy (push) Successful in 11m41s
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 46s
Reviewed-on: #295
2026-03-25 18:30:14 +01:00
7187b69935 refactor(glossaire): componentize glossary home sections
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 48s
2026-03-25 18:26:43 +01:00
4ba4453661 Merge pull request 'refactor(glossaire): centralize aside and home data' (#294) from feat/glossaire-relational-asides-and-home into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 21s
CI / build-and-anchors (push) Successful in 42s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 9m5s
Reviewed-on: #294
2026-03-25 16:50:31 +01:00
ee42e391e3 refactor(glossaire): centralize aside and home data
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 48s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-25 16:48:43 +01:00
f7756be59e Merge pull request 'feat/glossaire-entry-relations-rendering' (#293) from feat/glossaire-entry-relations-rendering into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 27s
CI / build-and-anchors (push) Successful in 42s
SMOKE / smoke (push) Successful in 6s
Deploy staging+live (annotations) / deploy (push) Successful in 9m40s
Reviewed-on: #293
2026-03-25 15:39:41 +01:00
4abe70e10e refactor(glossaire): extract entry relations rendering
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-25 15:30:51 +01:00
b2b4ec35c0 refactor(glossaire): preserve editorial order for entry relations 2026-03-25 15:20:39 +01:00
b255436958 Merge pull request 'refactor(glossaire): centralize glossary relation helpers' (#292) from feat/glossaire-ui-relations-foundation into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 24s
Deploy staging+live (annotations) / deploy (push) Successful in 9m2s
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 43s
Reviewed-on: #292
2026-03-25 14:17:11 +01:00
ad06b34a85 refactor(glossaire): centralize glossary relation helpers
All checks were successful
CI / build-and-anchors (pull_request) Successful in 42s
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 41s
2026-03-25 14:15:39 +01:00
a38f585f3d Merge pull request 'feat(glossaire): strengthen paradigms and support theories cross-links' (#291) from chore/glossaire-paradigmes-and-support-theories-mesh into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 14s
Deploy staging+live (annotations) / deploy (push) Successful in 9m54s
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 47s
CI / build-and-anchors (pull_request) Successful in 48s
Reviewed-on: #291
2026-03-25 10:30:37 +01:00
bf0dc125d1 feat(glossaire): strengthen paradigms and support theories cross-links
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 1m19s
2026-03-25 10:28:42 +01:00
f61dc15b47 Merge pull request 'feat(glossaire): strengthen meta-regimes and archicrations cross-links' (#290) from chore/glossaire-meta-regimes-and-archicrations-mesh into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 16s
Deploy staging+live (annotations) / deploy (push) Successful in 8m31s
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 45s
Reviewed-on: #290
2026-03-24 20:41:52 +01:00
1ac3d91a19 fix(glossaire): repair malformed normativo-politiques frontmatter
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-24 20:40:10 +01:00
100ba10409 feat(glossaire): strengthen meta-regimes and archicrations cross-links
Some checks failed
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Failing after 27s
2026-03-24 20:33:39 +01:00
5f14785abb Merge pull request 'chore/glossaire-pathologies-figures-and-reinstitution-mesh' (#289) from chore/glossaire-pathologies-figures-and-reinstitution-mesh into main
All checks were successful
CI / build-and-anchors (push) Successful in 47s
Proposer Apply (Queue) / apply-proposer (push) Successful in 32s
SMOKE / smoke (push) Successful in 8s
Deploy staging+live (annotations) / deploy (push) Successful in 11m12s
Reviewed-on: #289
2026-03-24 18:35:06 +01:00
c7043ae9d5 fix(glossaire): repair malformed seeAlso frontmatter
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 48s
CI / build-and-anchors (pull_request) Successful in 51s
2026-03-24 18:32:55 +01:00
bd1235f8c3 feat(glossaire): strengthen pathologies, figures, and reinstitution cross-links 2026-03-24 18:32:12 +01:00
7ae7b4dca3 Merge pull request 'chore/glossaire-scenes-topologies-audit-and-mesh2' (#288) from chore/glossaire-scenes-topologies-audit-and-mesh into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 18s
Deploy staging+live (annotations) / deploy (push) Successful in 8m20s
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 43s
Reviewed-on: #288
2026-03-24 17:57:50 +01:00
f088db57d4 feat(glossaire): strengthen scenes, topologies, and IA audit cross-links
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 43s
CI / build-and-anchors (pull_request) Successful in 42s
2026-03-24 17:56:13 +01:00
311e94ed91 Merge pull request 'feat(glossaire): strengthen tensions cross-links' (#287) from chore/glossaire-tensions-audit-and-mesh into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 18s
Deploy staging+live (annotations) / deploy (push) Successful in 9m53s
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 41s
Reviewed-on: #287
2026-03-24 14:37:57 +01:00
e078f3f9ab feat(glossaire): strengthen tensions cross-links
All checks were successful
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 45s
SMOKE / smoke (push) Successful in 5s
2026-03-24 14:32:44 +01:00
7c4bb5a2cf Merge pull request 'fix(glossaire): sanitize imported artefacts and relation metadata' (#286) from chore/glossaire-sanitize-corpus into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 30s
Deploy staging+live (annotations) / deploy (push) Successful in 11m10s
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 41s
Reviewed-on: #286
2026-03-24 13:37:47 +01:00
214e174635 fix(glossaire): sanitize imported artefacts and relation metadata
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 47s
2026-03-24 13:34:03 +01:00
f1b2f4605f Merge pull request 'feat/glossaire-sticky-entry-and-aside-polish-20260324' (#285) from feat/glossaire-sticky-entry-and-aside-polish-20260324 into main
All checks were successful
CI / build-and-anchors (push) Successful in 48s
Proposer Apply (Queue) / apply-proposer (push) Successful in 34s
SMOKE / smoke (push) Successful in 16s
Deploy staging+live (annotations) / deploy (push) Successful in 10m2s
Reviewed-on: #285
2026-03-24 00:32:36 +01:00
87955adf5d feat(glossaire): polish sticky entry flow and aside navigation
All checks were successful
SMOKE / smoke (push) Successful in 18s
CI / build-and-anchors (push) Successful in 48s
CI / build-and-anchors (pull_request) Successful in 48s
2026-03-24 00:29:39 +01:00
e39a0c547d feat(glossaire): compress paradigme hero when reading follow is active 2026-03-23 10:27:28 +01:00
c89ddf7237 chore(glossaire): checkpoint before portal hero compression 2026-03-23 00:33:19 +01:00
615effe8bf Merge pull request 'fix(glossaire): description précise du correctif' (#284) from fix/nom-court-et-clair into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 15s
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 44s
Deploy staging+live (annotations) / deploy (push) Successful in 8m29s
Reviewed-on: #284
2026-03-21 21:41:19 +01:00
e952b344a0 fix(glossaire): description précise du correctif
All checks were successful
SMOKE / smoke (push) Successful in 2s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-21 21:39:02 +01:00
bb0572cc1a Merge pull request 'fix(glossaire): harmonize portal sticky hero and follow behavior' (#283) from fix/glossaire-portails-sticky-follow-20260321 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 20s
CI / build-and-anchors (push) Successful in 44s
SMOKE / smoke (push) Successful in 9s
Deploy staging+live (annotations) / deploy (push) Successful in 9m19s
Reviewed-on: #283
2026-03-21 20:22:43 +01:00
f6a2347278 fix(glossaire): harmonize portal sticky hero and follow behavior
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-21 20:15:13 +01:00
d902c2bf98 Merge pull request 'feat(glossaire): refine portal pages and contextual asides' (#282) from feat/glossaire-portails-asides-polish into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 17s
CI / build-and-anchors (push) Successful in 48s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 8m30s
Reviewed-on: #282
2026-03-20 17:46:21 +01:00
baa2082f51 feat(glossaire): refine portal pages and contextual asides
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 49s
CI / build-and-anchors (pull_request) Successful in 45s
2026-03-20 17:42:09 +01:00
2f249b420f Merge pull request 'feat(glossaire): enrich entries and refine glossary navigation' (#281) from feat/glossaire-enrich-navigation into main
All checks were successful
CI / build-and-anchors (push) Successful in 48s
Proposer Apply (Queue) / apply-proposer (push) Successful in 29s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 10m44s
Reviewed-on: #281
2026-03-19 22:18:49 +01:00
d6b4eb82f4 feat(glossaire): enrich entries and refine glossary navigation
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 55s
CI / build-and-anchors (pull_request) Successful in 45s
2026-03-19 21:53:33 +01:00
bfa44fecda Merge pull request 'Add archicrations-esthetico-symboliques.md' (#280) from chore/add-archicrations-esthetico-symboliques into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 23s
CI / build-and-anchors (push) Successful in 47s
SMOKE / smoke (push) Successful in 8s
Deploy staging+live (annotations) / deploy (push) Successful in 10m53s
Reviewed-on: #280
2026-03-18 23:53:32 +01:00
e329235aa9 Add archicrations-esthetico-symboliques.md
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 51s
CI / build-and-anchors (pull_request) Successful in 45s
2026-03-18 23:51:48 +01:00
8cbaa5117c Merge pull request 'Document cockpit local, NAS supervision, and ops workflow' (#279) from docs-and-ops/cockpit-finalization into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 28s
Deploy staging+live (annotations) / deploy (push) Successful in 49s
CI / build-and-anchors (push) Successful in 44s
SMOKE / smoke (push) Successful in 4s
Reviewed-on: #279
2026-03-18 18:24:44 +01:00
3086f333ed Document cockpit local, NAS supervision, and ops workflow
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 49s
CI / build-and-anchors (pull_request) Successful in 42s
2026-03-18 18:21:52 +01:00
c1c3c19d13 Merge pull request 'Use Europe/Paris build time for ops health manifest' (#278) from ops/builtat-paris-time into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 30s
Deploy staging+live (annotations) / deploy (push) Successful in 46s
CI / build-and-anchors (push) Successful in 44s
SMOKE / smoke (push) Successful in 2s
Reviewed-on: #278
2026-03-18 11:14:11 +01:00
ddcd0acd4d Use Europe/Paris build time for ops health manifest
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 48s
CI / build-and-anchors (pull_request) Successful in 49s
2026-03-18 11:11:54 +01:00
9bc4eeb3e7 Merge pull request 'Add ops health manifest to deploy pipeline' (#277) from ops/ops-health-deploy into main
All checks were successful
CI / build-and-anchors (push) Successful in 44s
Proposer Apply (Queue) / apply-proposer (push) Successful in 19s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 11m8s
Reviewed-on: #277
2026-03-18 10:31:32 +01:00
7a9a5319ac Add ops health manifest to deploy pipeline
All checks were successful
SMOKE / smoke (push) Successful in 11s
CI / build-and-anchors (push) Successful in 58s
CI / build-and-anchors (pull_request) Successful in 1m3s
2026-03-18 10:28:13 +01:00
7d75de5c9f Merge pull request 'docs: formalize localhost auto-sync architecture' (#276) from docs/localhost-auto-sync into main
All checks were successful
CI / build-and-anchors (push) Successful in 51s
Proposer Apply (Queue) / apply-proposer (push) Successful in 19s
SMOKE / smoke (push) Successful in 6s
Deploy staging+live (annotations) / deploy (push) Successful in 46s
Reviewed-on: #276
2026-03-16 21:19:51 +01:00
69c91cb661 docs: formalize localhost auto-sync architecture
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 47s
CI / build-and-anchors (pull_request) Successful in 45s
2026-03-16 21:14:41 +01:00
a1bfbf4405 Merge pull request 'proposer: apply 2 tickets on /archicrat-ia/prologue/' (#275) from bot/proposer-273-ce91b3d8cf02 into main
All checks were successful
CI / build-and-anchors (push) Successful in 44s
Proposer Apply (Queue) / apply-proposer (push) Successful in 33s
SMOKE / smoke (push) Successful in 12s
Deploy staging+live (annotations) / deploy (push) Successful in 9m57s
Reviewed-on: #275
2026-03-16 16:46:20 +01:00
archicratie-bot
be26b425d8 edit: apply ticket #274 (/archicrat-ia/prologue/#p-23-d91a7b78)
All checks were successful
CI / build-and-anchors (push) Successful in 51s
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (pull_request) Successful in 42s
2026-03-16 15:43:09 +00:00
archicratie-bot
abf88e7037 edit: apply ticket #273 (/archicrat-ia/prologue/#p-22-a416d473) 2026-03-16 15:42:47 +00:00
04fee32fdb Merge pull request 'proposer: apply 2 tickets on /archicrat-ia/prologue/' (#272) from bot/proposer-270-39655773c199 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 26s
CI / build-and-anchors (push) Successful in 43s
SMOKE / smoke (push) Successful in 8s
Deploy staging+live (annotations) / deploy (push) Successful in 9m20s
Reviewed-on: #272
2026-03-16 13:57:16 +01:00
archicratie-bot
fbddf5c3fc edit: apply ticket #271 (/archicrat-ia/prologue/#p-17-b8c5bf21)
All checks were successful
CI / build-and-anchors (push) Successful in 50s
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (pull_request) Successful in 46s
2026-03-16 12:54:46 +00:00
archicratie-bot
bad748df3a edit: apply ticket #270 (/archicrat-ia/prologue/#p-7-64a0ca9c) 2026-03-16 12:54:21 +00:00
0066cf8601 Merge pull request 'fix(actions): harden proposer queue against duplicate batch PRs' (#269) from hotfix/proposer-close-verify into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 21s
Deploy staging+live (annotations) / deploy (push) Successful in 33s
CI / build-and-anchors (push) Successful in 44s
SMOKE / smoke (push) Successful in 4s
Reviewed-on: #269
2026-03-16 13:42:52 +01:00
5d3473d66c fix(actions): harden proposer queue against duplicate batch PRs
All checks were successful
SMOKE / smoke (push) Successful in 2s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-16 13:39:09 +01:00
f9d34110e4 Merge pull request 'proposer: apply 2 tickets on /archicrat-ia/prologue/' (#266) from bot/proposer-264-20260316-120249 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 20s
CI / build-and-anchors (push) Successful in 41s
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 7m47s
Reviewed-on: #266
2026-03-16 13:18:27 +01:00
archicratie-bot
84e9c3ead4 edit: apply ticket #265 (/archicrat-ia/prologue/#p-5-85126fa5)
All checks were successful
CI / build-and-anchors (pull_request) Successful in 43s
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 39s
2026-03-16 12:03:37 +00:00
archicratie-bot
72e59175fc edit: apply ticket #264 (/archicrat-ia/prologue/#p-4-8ed4f807) 2026-03-16 12:03:13 +00:00
81b69ac6d5 Merge pull request 'fix(actions): verify proposer issue closure after PR creation' (#263) from hotfix/proposer-close-verify into main
All checks were successful
CI / build-and-anchors (push) Successful in 43s
Deploy staging+live (annotations) / deploy (push) Successful in 47s
Proposer Apply (Queue) / apply-proposer (push) Successful in 33s
SMOKE / smoke (push) Successful in 2s
Reviewed-on: #263
2026-03-16 12:54:26 +01:00
513ae72e85 fix(actions): verify proposer issue closure after PR creation
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 43s
CI / build-and-anchors (pull_request) Successful in 47s
2026-03-16 12:52:47 +01:00
4c4dd1c515 Merge pull request 'fix(actions): remove fragile heredocs from proposer PR step' (#262) from hotfix/proposer-no-heredoc-pr-step into main
All checks were successful
Deploy staging+live (annotations) / deploy (push) Successful in 39s
CI / build-and-anchors (push) Successful in 46s
SMOKE / smoke (push) Successful in 7s
Proposer Apply (Queue) / apply-proposer (push) Successful in 1m30s
Reviewed-on: #262
2026-03-16 12:34:12 +01:00
46b15ed6ab fix(actions): remove fragile heredocs from proposer PR step
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 40s
2026-03-16 12:30:43 +01:00
a015e72f7c Merge pull request 'proposer: apply ticket #257' (#261) from bot/proposer-257-20260316-111742 into main
All checks were successful
CI / build-and-anchors (push) Successful in 42s
SMOKE / smoke (push) Successful in 8s
Proposer Apply (Queue) / apply-proposer (push) Successful in 1m26s
Deploy staging+live (annotations) / deploy (push) Successful in 7m52s
Reviewed-on: #261
2026-03-16 12:21:55 +01:00
archicratie-bot
d5df7d77a0 edit: apply ticket #257 (/archicrat-ia/prologue/#p-0-d7974f88)
All checks were successful
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 40s
SMOKE / smoke (push) Successful in 4s
2026-03-16 11:18:06 +00:00
ec3ceee862 Merge pull request 'fix(actions): tolerate empty label payload in proposer gate' (#260) from debug/proposer-257 into main
Some checks failed
CI / build-and-anchors (push) Successful in 46s
Deploy staging+live (annotations) / deploy (push) Successful in 48s
SMOKE / smoke (push) Successful in 2s
Proposer Apply (Queue) / apply-proposer (push) Failing after 1m39s
Reviewed-on: #260
2026-03-16 12:16:12 +01:00
867475c3ff fix(actions): tolerate empty label payload in proposer gate
All checks were successful
SMOKE / smoke (push) Successful in 8s
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-16 12:14:01 +01:00
b024c5557c Merge pull request 'fix(editorial): preserve frontmatter in apply-ticket' (#259) from hotfix/preserve-frontmatter-apply-ticket into main
All checks were successful
CI / build-and-anchors (push) Successful in 47s
Proposer Apply (Queue) / apply-proposer (push) Successful in 38s
SMOKE / smoke (push) Successful in 11s
Deploy staging+live (annotations) / deploy (push) Successful in 9m38s
Reviewed-on: #259
2026-03-16 11:52:07 +01:00
93306f360d fix(editorial): preserve frontmatter in apply-ticket
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 51s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-16 11:48:08 +01:00
52847d999d Merge pull request 'fix(actions): silence anno workflow on proposer tickets' (#258) from hotfix/fix-proposer-runtime-v2 into main
Some checks failed
Deploy staging+live (annotations) / deploy (push) Successful in 51s
CI / build-and-anchors (push) Successful in 49s
SMOKE / smoke (push) Successful in 5s
Proposer Apply (Queue) / apply-proposer (push) Failing after 48s
Reviewed-on: #258
2026-03-16 11:11:28 +01:00
b9629b43ff fix(actions): silence anno workflow on proposer tickets
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 49s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-16 11:06:11 +01:00
06482a9f8d Merge pull request 'fix(actions): make proposer queue runtime-safe' (#254) from hotfix/fix-proposer-runtime-v2 into main
All checks were successful
Proposer Apply (Queue) / apply-proposer (push) Successful in 21s
CI / build-and-anchors (push) Successful in 46s
SMOKE / smoke (push) Successful in 6s
Deploy staging+live (annotations) / deploy (push) Successful in 42s
Reviewed-on: #254
2026-03-16 00:59:50 +01:00
f2e4ae5ac2 fix(actions): make proposer queue runtime-safe
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 42s
2026-03-16 00:58:10 +01:00
71baf0f6da Merge pull request 'fix(actions): repair proposer workflow yaml' (#253) from hotfix/fix-proposer-workflow into main
Some checks failed
CI / build-and-anchors (push) Successful in 48s
Deploy staging+live (annotations) / deploy (push) Successful in 1m0s
SMOKE / smoke (push) Successful in 4s
Proposer Apply (Queue) / apply-proposer (push) Failing after 4s
Reviewed-on: #253
2026-03-16 00:42:17 +01:00
d02b6fc347 fix(actions): repair proposer workflow yaml
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 43s
CI / build-and-anchors (pull_request) Successful in 45s
2026-03-16 00:38:46 +01:00
431f1e347b Merge pull request 'chore(editorial): harden proposer queue and apply-ticket' (#252) from chore/editorial-hardening-v1-clean into main
Some checks are pending
Deploy staging+live (annotations) / deploy (push) Waiting to run
CI / build-and-anchors (push) Successful in 1m6s
SMOKE / smoke (push) Successful in 39s
Reviewed-on: #252
2026-03-16 00:02:31 +01:00
ab6f45ed5c chore(editorial): harden proposer queue and apply-ticket
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 43s
CI / build-and-anchors (pull_request) Successful in 46s
2026-03-15 23:58:11 +01:00
02c060d239 Merge pull request 'chore/reset-from-docx-clean-base' (#251) from chore/reset-from-docx-clean-base into main
All checks were successful
SMOKE / smoke (push) Successful in 8s
CI / build-and-anchors (push) Successful in 45s
Deploy staging+live (annotations) / deploy (push) Successful in 9m50s
Reviewed-on: #251
2026-03-15 21:19:24 +01:00
be2029de82 ci(anchors): accept intentional chapitre 1 anchor reset
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-15 21:11:20 +01:00
e148eaeaf3 content(archicrat-ia): refresh chapitre 1 from official docx
Some checks failed
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Failing after 42s
2026-03-15 20:50:35 +01:00
c63a1e6ce4 chore(reset): reimport chapitre 1 from docx clean base
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 44s
2026-03-15 20:33:49 +01:00
b3a73a7781 Merge pull request 'chore(reset): reimport chapitre 1 from docx clean base' (#250) from chore/reset-from-docx-clean-base into main
All checks were successful
SMOKE / smoke (push) Successful in 8s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Successful in 9m32s
Reviewed-on: #250
2026-03-15 15:31:37 +01:00
1968585d0f chore(reset): reimport chapitre 1 from docx clean base
All checks were successful
CI / build-and-anchors (push) Successful in 57s
CI / build-and-anchors (pull_request) Successful in 50s
SMOKE / smoke (push) Successful in 5s
2026-03-15 15:29:09 +01:00
b33c758411 Merge pull request 'proposer: apply ticket #236' (#243) from bot/proposer-236-20260315-112808 into main
All checks were successful
Deploy staging+live (annotations) / deploy (push) Successful in 12m1s
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 43s
Reviewed-on: #243
2026-03-15 12:55:33 +01:00
afa543125c Merge pull request 'proposer: apply ticket #235' (#242) from bot/proposer-235-20260315-112515 into main
Some checks are pending
Deploy staging+live (annotations) / deploy (push) Has started running
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 51s
Reviewed-on: #242
2026-03-15 12:47:26 +01:00
archicratie-bot
0d0252cac0 edit: apply ticket #236 (/archicrat-ia/chapitre-1/#p-400-8959d62e)
All checks were successful
CI / build-and-anchors (push) Successful in 45s
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-15 11:28:36 +00:00
archicratie-bot
a8bd9aeed5 edit: apply ticket #235 (/archicrat-ia/chapitre-1/#p-139-caa4e99d)
All checks were successful
CI / build-and-anchors (pull_request) Successful in 47s
CI / build-and-anchors (push) Successful in 41s
SMOKE / smoke (push) Successful in 3s
2026-03-15 11:25:43 +00:00
d277c61afd Merge pull request 'proposer: apply ticket #229' (#232) from bot/proposer-229-20260315-105537 into main
Some checks failed
CI / build-and-anchors (push) Successful in 51s
SMOKE / smoke (push) Successful in 10s
Deploy staging+live (annotations) / deploy (push) Has been cancelled
Reviewed-on: #232
2026-03-15 12:24:34 +01:00
archicratie-bot
86479952d1 edit: apply ticket #229 (/archicrat-ia/chapitre-1/#p-10-1a706744)
All checks were successful
CI / build-and-anchors (pull_request) Successful in 42s
CI / build-and-anchors (push) Successful in 42s
SMOKE / smoke (push) Successful in 5s
2026-03-15 10:56:03 +00:00
c94024a8ae Merge pull request 'feat(glossaire): add advanced external paradigms from chapter 3' (#228) from feat/glossaire-archicrations-cartographie into main
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 45s
Deploy staging+live (annotations) / deploy (push) Successful in 9m53s
Reviewed-on: #228
2026-03-15 10:08:48 +01:00
70611d16f8 feat(glossaire): add advanced external paradigms from chapter 3
All checks were successful
SMOKE / smoke (push) Successful in 11s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-15 10:05:32 +01:00
354db231b8 Merge pull request 'feat(glossaire): enrichit la cartographie archicratique et les archicrations' (#227) from feat/glossaire-archicrations-cartographie into main
All checks were successful
SMOKE / smoke (push) Successful in 14s
CI / build-and-anchors (push) Successful in 45s
Deploy staging+live (annotations) / deploy (push) Successful in 9m37s
Reviewed-on: #227
2026-03-14 19:57:38 +01:00
9d8d60d00f feat(glossaire): enrichit la cartographie archicratique et les archicrations
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 38s
2026-03-14 19:55:16 +01:00
f5d25abbec Merge pull request 'feat(glossaire): add advanced external paradigms from chapter 3' (#226) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 10s
CI / build-and-anchors (push) Successful in 44s
Deploy staging+live (annotations) / deploy (push) Successful in 7m13s
Reviewed-on: #226
2026-03-13 18:55:10 +01:00
8e9f7314f5 feat(glossaire): add advanced external paradigms from chapter 3
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 40s
2026-03-13 18:48:33 +01:00
03b88b944d Merge pull request 'feat(glossaire): integrate paradigms portal into glossary navigation' (#225) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 21s
CI / build-and-anchors (push) Successful in 43s
Deploy staging+live (annotations) / deploy (push) Successful in 7m38s
Reviewed-on: #225
2026-03-13 18:06:43 +01:00
385c36f660 feat(glossaire): integrate paradigms portal into glossary navigation
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-13 18:04:13 +01:00
cfa092cd38 Merge pull request 'feat(glossaire): introduce doctrine as ontology type' (#224) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 11s
CI / build-and-anchors (push) Successful in 43s
Deploy staging+live (annotations) / deploy (push) Successful in 8m36s
Reviewed-on: #224
2026-03-13 16:47:52 +01:00
1a762f8f54 feat(glossaire): introduce doctrine as ontology type
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 38s
2026-03-13 16:45:29 +01:00
fbdaf72775 Merge pull request 'refactor(glossaire): introduce dedicated GlossaryLayout wrapper' (#223) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 10s
CI / build-and-anchors (push) Successful in 41s
Deploy staging+live (annotations) / deploy (push) Successful in 8m12s
Reviewed-on: #223
2026-03-13 15:15:32 +01:00
67128a9ca1 refactor(glossaire): introduce dedicated GlossaryLayout wrapper
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-13 15:11:39 +01:00
898759db3d Merge pull request 'feat(glossaire): refactor core archicratic concepts' (#222) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 39s
Deploy staging+live (annotations) / deploy (push) Successful in 8m6s
Reviewed-on: #222
2026-03-13 14:36:40 +01:00
4f009a9557 feat(glossaire): refactor core archicratic concepts
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 37s
2026-03-13 14:32:41 +01:00
378d0981f0 Merge pull request 'refactor(glossaire): disable reading-only controls in glossary' (#221) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 20s
CI / build-and-anchors (push) Successful in 41s
Deploy staging+live (annotations) / deploy (push) Successful in 9m36s
Reviewed-on: #221
2026-03-12 19:27:16 +01:00
8f3702f803 refactor(glossaire): disable reading-only controls in glossary
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-12 19:23:50 +01:00
cfd303fc85 Merge pull request 'Nouveaux Ajouts - Glossaire' (#220) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 8s
CI / build-and-anchors (push) Successful in 39s
Deploy staging+live (annotations) / deploy (push) Successful in 8m26s
Reviewed-on: #220
2026-03-12 17:45:03 +01:00
0fc0976f8a refactor(glossaire): polish contextual aside navigation
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 42s
2026-03-12 17:42:16 +01:00
e247ea8ead Merge pull request 'feat(glossaire): strengthen conceptual and paradigmatic cross-links' (#219) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 12s
CI / build-and-anchors (push) Successful in 43s
Deploy staging+live (annotations) / deploy (push) Successful in 7m49s
Reviewed-on: #219
2026-03-12 14:44:17 +01:00
0c57c4bc6d feat(glossaire): strengthen conceptual and paradigmatic cross-links
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 37s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-12 14:39:46 +01:00
9b7998e1c3 Merge pull request 'feat(glossaire): add external paradigms and theoretical landscape' (#218) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 42s
Deploy staging+live (annotations) / deploy (push) Successful in 7m44s
Reviewed-on: #218
2026-03-12 13:57:00 +01:00
8997a00413 feat(glossaire): add external paradigms and theoretical landscape
All checks were successful
SMOKE / smoke (push) Successful in 10s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-12 13:53:11 +01:00
a2e6f6185f Merge pull request 'fix(glossaire): adjust section anchor offset for sticky header' (#217) from feat/glossaire-paradigmes-externes into main
All checks were successful
SMOKE / smoke (push) Successful in 13s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Successful in 9m8s
Reviewed-on: #217
2026-03-12 13:35:39 +01:00
c2715b01d7 fix(glossaire): adjust section anchor offset for sticky header
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 39s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-12 13:31:07 +01:00
6f09dfcd12 Merge pull request 'feat(glossaire): extend taxonomy and align Astro 6 content config' (#216) from feat/glossaire-paradigmes into main
All checks were successful
SMOKE / smoke (push) Successful in 12s
CI / build-and-anchors (push) Successful in 42s
Deploy staging+live (annotations) / deploy (push) Successful in 8m58s
Reviewed-on: #216
2026-03-12 12:08:15 +01:00
bb9f55a3b5 feat(glossaire): extend taxonomy and align Astro 6 content config
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-12 12:04:46 +01:00
298ee7492c Merge pull request 'EditionToc Modif' (#215) from chore/astro6-upgrade into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 42s
Deploy staging+live (annotations) / deploy (push) Successful in 7m16s
Reviewed-on: #215
2026-03-11 17:58:17 +01:00
37cb836246 chore(astro): upgrade to Astro 6 and fix dynamic collection routes
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-11 17:56:53 +01:00
19e3318125 Merge pull request 'chore(astro): upgrade to astro 6 with legacy content compatibility' (#214) from chore/astro6-upgrade into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 47s
Deploy staging+live (annotations) / deploy (push) Successful in 9m33s
Reviewed-on: #214
2026-03-11 17:19:04 +01:00
683b02f4a0 chore(astro): upgrade to astro 6 with legacy content compatibility
All checks were successful
SMOKE / smoke (push) Successful in 11s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 38s
2026-03-11 17:17:02 +01:00
20aecc30b1 Merge pull request 'feat(glossary): add core glossarial taxonomy and foundational entries' (#213) from feat/glossaire-core2 into main
All checks were successful
SMOKE / smoke (push) Successful in 18s
CI / build-and-anchors (push) Successful in 45s
Deploy staging+live (annotations) / deploy (push) Successful in 8m42s
Reviewed-on: #213
2026-03-11 13:04:24 +01:00
daf57aa152 feat(glossary): add core glossarial taxonomy and foundational entries
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 45s
2026-03-11 12:50:17 +01:00
bfd693de92 Merge pull request 'refactor(import): unify core editorial manifest' (#212) from refactor/unify-manifest-core into main
All checks were successful
SMOKE / smoke (push) Successful in 10s
CI / build-and-anchors (push) Successful in 40s
Deploy staging+live (annotations) / deploy (push) Successful in 8m0s
Reviewed-on: #212
2026-03-11 11:36:47 +01:00
ea2ad0017b refactor(import): unify core editorial manifest
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 35s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-11 11:33:37 +01:00
82e7473cac Merge pull request 'fix(content): declare commencer collection and remove implicit ia collection' (#211) from fix/content-collections-stability into main
All checks were successful
SMOKE / smoke (push) Successful in 17s
CI / build-and-anchors (push) Successful in 39s
Deploy staging+live (annotations) / deploy (push) Successful in 8m57s
Reviewed-on: #211
2026-03-11 11:09:39 +01:00
315523e80f refactor(site): depublish non-core sections and refresh anchors baseline
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 40s
2026-03-11 11:07:21 +01:00
569b6de154 fix(content): declare commencer collection and remove implicit ia collection
Some checks failed
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Failing after 41s
CI / build-and-anchors (pull_request) Failing after 46s
2026-03-11 10:54:27 +01:00
95f8159554 Merge pull request 'proposer: apply ticket #209' (#210) from bot/proposer-209-20260311-090431 into main
All checks were successful
SMOKE / smoke (push) Successful in 12s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Successful in 8m16s
Reviewed-on: #210
2026-03-11 10:06:49 +01:00
archicratie-bot
5698c494f1 edit: apply ticket #209 (/cas-ia/introduction/#p-16-615e3d61)
All checks were successful
CI / build-and-anchors (pull_request) Successful in 43s
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 37s
2026-03-11 09:04:53 +00:00
e640e66b8d Merge pull request 'proposer: apply ticket #207' (#208) from bot/proposer-207-20260311-082736 into main
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Successful in 7m40s
Reviewed-on: #208
2026-03-11 09:29:21 +01:00
archicratie-bot
9be7d170c6 edit: apply ticket #207 (/cas-ia/introduction/#p-10-ceba29a2)
All checks were successful
CI / build-and-anchors (pull_request) Successful in 40s
SMOKE / smoke (push) Successful in 2s
CI / build-and-anchors (push) Successful in 41s
2026-03-11 08:27:58 +00:00
c2c98c516b Merge pull request 'refactor(editorial): recentrer le site sur le noyau archicratique' (#206) from refactor/recentrage-noyau-archicratique into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Successful in 8m54s
Reviewed-on: #206
2026-03-10 20:43:16 +01:00
32554f5998 refactor(editorial): recentrer le site sur le noyau archicratique
All checks were successful
CI / build-and-anchors (pull_request) Successful in 39s
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 41s
2026-03-10 20:36:33 +01:00
308f4f92bc Merge pull request 'chore: merge main into fix branch' (#205) from fix/annotations-index-fail-open-20260304-223909 into main
All checks were successful
Deploy staging+live (annotations) / deploy (push) Successful in 53s
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 35s
Reviewed-on: #205
2026-03-06 11:18:04 +01:00
4dfd3b026b chore: merge main into fix branch
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 38s
2026-03-06 11:14:48 +01:00
c93f274f41 Merge pull request 'fix(annotations): fail-open when src/annotations is missing' (#204) from fix/annotations-index-fail-open-20260304-223909 into main
All checks were successful
SMOKE / smoke (push) Successful in 14s
CI / build-and-anchors (push) Successful in 46s
Deploy staging+live (annotations) / deploy (push) Successful in 8m58s
Reviewed-on: #204
2026-03-04 22:42:13 +01:00
dfa311fb5b fix(annotations): fail-open when src/annotations is missing
All checks were successful
SMOKE / smoke (push) Successful in 18s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-04 22:39:09 +01:00
3ef1dc2801 Merge pull request 'fix(annotations): fail-open when src/annotations missing + keep dir tracked' (#203) from chore/remove-test-anno-media-20260304-204810 into main
All checks were successful
SMOKE / smoke (push) Successful in 13s
CI / build-and-anchors (push) Successful in 44s
Deploy staging+live (annotations) / deploy (push) Successful in 8m47s
Reviewed-on: #203
2026-03-04 21:39:04 +01:00
435e41ed4d fix(annotations): fail-open when src/annotations missing + keep dir tracked
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 43s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-04 21:31:53 +01:00
8825932159 Merge pull request 'chore: keep public/media directory (gitkeep)' (#202) from chore/remove-test-anno-media-20260304-204810 into main
All checks were successful
SMOKE / smoke (push) Successful in 10s
Deploy staging+live (annotations) / deploy (push) Successful in 34s
CI / build-and-anchors (push) Successful in 45s
Reviewed-on: #202
2026-03-04 21:06:45 +01:00
b55decbea4 chore: keep public/media directory (gitkeep)
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 46s
CI / build-and-anchors (pull_request) Successful in 38s
2026-03-04 21:03:55 +01:00
414a848db3 Merge pull request 'chore: keep src/annotations directory (gitkeep)' (#201) from chore/remove-test-anno-media-20260304-204810 into main
All checks were successful
SMOKE / smoke (push) Successful in 5s
Deploy staging+live (annotations) / deploy (push) Successful in 39s
CI / build-and-anchors (push) Successful in 44s
Reviewed-on: #201
2026-03-04 21:03:39 +01:00
cbd4f3a57f chore: keep src/annotations directory (gitkeep)
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 36s
CI / build-and-anchors (pull_request) Successful in 36s
2026-03-04 21:02:19 +01:00
49f8d6a95e Merge pull request 'chore: remove test annotations and media' (#200) from chore/remove-test-anno-media-20260304-204810 into main
Some checks failed
CI / build-and-anchors (push) Failing after 34s
Deploy staging+live (annotations) / deploy (push) Successful in 47s
SMOKE / smoke (push) Successful in 20s
Reviewed-on: #200
2026-03-04 20:59:21 +01:00
5afa5cbfda chore: remove test annotations and media
Some checks failed
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Failing after 32s
CI / build-and-anchors (pull_request) Failing after 34s
2026-03-04 20:48:26 +01:00
a1b1df38ba Merge pull request 'chore: remove test annotations and media' (#196) from chore/remove-test-anno-media-20260304-202811 into main
Some checks failed
Deploy staging+live (annotations) / deploy (push) Failing after 54s
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Failing after 33s
CI / build-and-anchors (pull_request) Failing after 30s
Reviewed-on: #196
2026-03-04 20:35:47 +01:00
d3f7d74da7 Merge pull request 'chore: remove test annotations and media' (#197) from chore/remove-test-anno-media-20260304-202451 into main
Some checks are pending
Deploy staging+live (annotations) / deploy (push) Has started running
CI / build-and-anchors (push) Has started running
SMOKE / smoke (push) Successful in 8s
Reviewed-on: #197
2026-03-04 20:35:40 +01:00
6919190107 chore: remove test annotations and media
Some checks failed
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Failing after 36s
CI / build-and-anchors (pull_request) Failing after 36s
2026-03-04 20:28:29 +01:00
021ef5abd7 chore: remove test annotations and media
Some checks failed
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Failing after 31s
CI / build-and-anchors (pull_request) Failing after 38s
2026-03-04 20:25:03 +01:00
76cdc85f9c Merge pull request 'ci(deploy): treat src/public/package changes as FULL rebuild' (#193) from chore/supprimer-annotations-20260304-191252 into main
Some checks failed
SMOKE / smoke (push) Successful in 16s
CI / build-and-anchors (push) Failing after 35s
Deploy staging+live (annotations) / deploy (push) Successful in 48s
Reviewed-on: #193
2026-03-04 19:22:36 +01:00
f2f6df2127 ci(deploy): treat src/public/package changes as FULL rebuild
Some checks failed
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Failing after 36s
CI / build-and-anchors (pull_request) Failing after 37s
2026-03-04 19:13:25 +01:00
dfe13757f7 Merge pull request 'ci(deploy): treat src/public/package changes as FULL rebuild' (#191) from chore/fix-deploy-gate-full-src-public-20260304-181357 into main
All checks were successful
SMOKE / smoke (push) Successful in 8s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Successful in 34s
Reviewed-on: #191
2026-03-04 18:16:30 +01:00
148ac997df ci(deploy): treat src/public/package changes as FULL rebuild
All checks were successful
SMOKE / smoke (push) Successful in 2s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 32s
2026-03-04 18:14:48 +01:00
84492d2741 Merge pull request 'style(mobile): widen reading in landscape on small screens (css-only)' (#190) from feat/mobile-landscape-reading-YYYYMMDD into main
All checks were successful
SMOKE / smoke (push) Successful in 16s
CI / build-and-anchors (push) Successful in 43s
Deploy staging+live (annotations) / deploy (push) Successful in 34s
Reviewed-on: #190
2026-03-04 17:39:11 +01:00
81baadd57f style(mobile): widen reading in landscape on small screens (css-only)
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 36s
2026-03-04 17:36:56 +01:00
63d0ffc5fc Merge pull request 'chore: <résumé clair de la modif>' (#189) from chore/xxx-20260303-221031 into main
All checks were successful
SMOKE / smoke (push) Successful in 7s
Deploy staging+live (annotations) / deploy (push) Successful in 29s
CI / build-and-anchors (push) Successful in 39s
Reviewed-on: #189
2026-03-03 22:15:31 +01:00
24143fc2c4 chore: <résumé clair de la modif>
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 36s
CI / build-and-anchors (pull_request) Successful in 37s
2026-03-03 22:12:01 +01:00
55370b704f Merge pull request 'chore: cleanup testA/testB markers' (#188) from chore/cleanup-testA-testB-20260303-213115 into main
All checks were successful
SMOKE / smoke (push) Successful in 13s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Successful in 8m35s
Reviewed-on: #188
2026-03-03 21:38:00 +01:00
b8a3ce1337 chore: cleanup testA/testB markers
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 43s
CI / build-and-anchors (pull_request) Successful in 40s
2026-03-03 21:36:26 +01:00
7f9baedf41 Merge pull request 'test: B hotpatch-auto gate (touch src/annotations)' (#187) from testB-hotpatch-auto-20260303-211919 into main
All checks were successful
SMOKE / smoke (push) Successful in 10s
Deploy staging+live (annotations) / deploy (push) Successful in 38s
CI / build-and-anchors (push) Successful in 37s
Reviewed-on: #187
2026-03-03 21:22:04 +01:00
1adbe1c7a3 test: B hotpatch-auto gate (touch src/annotations)
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 37s
CI / build-and-anchors (pull_request) Successful in 37s
2026-03-03 21:20:07 +01:00
107a26352f Merge pull request 'test: A full-auto gate (touch src/content)' (#186) from testA-full-auto-20260303-205324 into main
All checks were successful
SMOKE / smoke (push) Successful in 16s
CI / build-and-anchors (push) Successful in 46s
Deploy staging+live (annotations) / deploy (push) Successful in 7m5s
Reviewed-on: #186
2026-03-03 20:57:09 +01:00
1c2b9ddbb6 test: A full-auto gate (touch src/content)
All checks were successful
SMOKE / smoke (push) Successful in 10s
CI / build-and-anchors (push) Successful in 47s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-03 20:54:46 +01:00
be99460d4d Merge pull request 'ci(deploy): fix gate bash + robust merge-proof diff + full/hotpatch auto' (#185) from chore/fix-deploy-gate-bash-20260303-204603 into main
All checks were successful
SMOKE / smoke (push) Successful in 13s
CI / build-and-anchors (push) Successful in 40s
Deploy staging+live (annotations) / deploy (push) Successful in 44s
Reviewed-on: #185
2026-03-03 20:48:02 +01:00
9e1b704aa6 ci(deploy): fix gate bash + robust merge-proof diff + full/hotpatch auto
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 35s
CI / build-and-anchors (pull_request) Successful in 44s
2026-03-03 20:46:03 +01:00
941fbf5845 Merge pull request 'ci(deploy): make gate merge-proof (diff before..after)' (#184) from chore/deploy-gate-mergeproof-20260303-195827 into main
Some checks failed
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 40s
Deploy staging+live (annotations) / deploy (push) Failing after 35s
Reviewed-on: #184
2026-03-03 20:00:37 +01:00
0b4a31a432 ci(deploy): make gate merge-proof (diff before..after)
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 43s
CI / build-and-anchors (pull_request) Successful in 37s
2026-03-03 19:58:27 +01:00
c617dc3979 Merge pull request 'test: B hotpatch-auto gate (touch src/annotations)' (#183) from testB-hotpatch-auto-20260303-183745 into main
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 38s
Deploy staging+live (annotations) / deploy (push) Successful in 7m50s
Reviewed-on: #183
2026-03-03 18:40:39 +01:00
1b95161de0 test: B hotpatch-auto gate (touch src/annotations)
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-03 18:38:52 +01:00
ebd976bd46 Merge pull request 'chore: cleanup testA/testB markers' (#182) from chore/cleanup-testA-testB-20260303-175846 into main
All checks were successful
SMOKE / smoke (push) Successful in 11s
CI / build-and-anchors (push) Successful in 42s
Deploy staging+live (annotations) / deploy (push) Successful in 8m55s
Reviewed-on: #182
2026-03-03 18:01:40 +01:00
f8d57d8fe0 chore: cleanup testA/testB markers
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 38s
2026-03-03 18:00:01 +01:00
09a4d2c472 Merge pull request 'test: B hotpatch-auto gate (touch src/annotations)' (#181) from testB-hotpatch-auto-20260303-174037 into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 41s
Deploy staging+live (annotations) / deploy (push) Successful in 8m0s
Reviewed-on: #181
2026-03-03 17:43:31 +01:00
1f6dc874d0 test: B hotpatch-auto gate (touch src/annotations)
All checks were successful
SMOKE / smoke (push) Successful in 2s
CI / build-and-anchors (push) Successful in 36s
CI / build-and-anchors (pull_request) Successful in 36s
2026-03-03 17:42:04 +01:00
4dd63945ee Merge pull request 'test: A full-auto gate (touch src/content)' (#180) from testA-full-auto-20260303-173032 into main
Some checks failed
SMOKE / smoke (push) Successful in 15s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Has been cancelled
Reviewed-on: #180
2026-03-03 17:36:14 +01:00
ba64b0694b test: A full-auto gate (touch src/content)
All checks were successful
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 41s
2026-03-03 17:34:32 +01:00
58e5ceda59 Merge pull request 'ci(deploy): auto FULL when content/anchors/pages/scripts change' (#179) from chore/deploy-gate-full-on-content-anchors-pages-scripts-20260303-171645 into main
All checks were successful
SMOKE / smoke (push) Successful in 15s
CI / build-and-anchors (push) Successful in 40s
Deploy staging+live (annotations) / deploy (push) Successful in 7m35s
Reviewed-on: #179
2026-03-03 17:20:36 +01:00
08f826ee01 ci(deploy): auto FULL when content/anchors/pages/scripts change
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 46s
CI / build-and-anchors (pull_request) Successful in 42s
2026-03-03 17:16:45 +01:00
3358d280ec Merge pull request 'edit: apply ticket #174 (/archicrat-ia/chapitre-3/#p-1-60c7ea48)' (#178) from chore/migrate-content-archicrat-ia-root-20260303-132407 into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 38s
Deploy staging+live (annotations) / deploy (push) Successful in 48s
Reviewed-on: #178
2026-03-03 15:04:07 +01:00
9cb0d5e416 content: wire archicrat-ia as first-class collection (routes + toc + schema)
All checks were successful
CI / build-and-anchors (push) Successful in 38s
CI / build-and-anchors (pull_request) Successful in 37s
SMOKE / smoke (push) Successful in 5s
2026-03-03 15:02:50 +01:00
a46f058917 edit: apply ticket #174 (/archicrat-ia/chapitre-3/#p-1-60c7ea48)
Some checks failed
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Failing after 39s
CI / build-and-anchors (pull_request) Failing after 36s
2026-03-03 14:27:35 +01:00
604b2199da Merge pull request 'ci: fix proposer apply workflow (checkout before APP_DIR detect)' (#177) from chore/fix-proposer-apply-checkout-order-20260303-122611 into main
All checks were successful
SMOKE / smoke (push) Successful in 14s
CI / build-and-anchors (push) Successful in 39s
Deploy staging+live (annotations) / deploy (push) Successful in 1m6s
Reviewed-on: #177
2026-03-03 12:32:34 +01:00
d153f71be6 ci: fix proposer apply workflow (checkout before APP_DIR detect)
All checks were successful
SMOKE / smoke (push) Successful in 5s
CI / build-and-anchors (push) Successful in 41s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-03 12:26:11 +01:00
8f64e4b098 Merge pull request 'ci: fix proposer workflow (auto APP_DIR + guards)' (#176) from chore/fix-proposer-workflow-appdir-20260303-115843 into main
All checks were successful
SMOKE / smoke (push) Successful in 12s
CI / build-and-anchors (push) Successful in 38s
Deploy staging+live (annotations) / deploy (push) Successful in 1m10s
Reviewed-on: #176
2026-03-03 12:01:18 +01:00
459bf195d8 ci: fix proposer workflow (auto APP_DIR + guards)
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 42s
2026-03-03 11:58:43 +01:00
0c46b0d19b Merge pull request 'ci: add Proposer Apply workflow (apply-ticket -> PR bot)' (#175) from chore/proposer-apply-workflow-20260302-234255 into main
All checks were successful
SMOKE / smoke (push) Successful in 11s
CI / build-and-anchors (push) Successful in 41s
Deploy staging+live (annotations) / deploy (push) Successful in 48s
Reviewed-on: #175
2026-03-02 23:49:57 +01:00
bfbdc7b688 ci: add Proposer Apply workflow (apply-ticket -> PR bot)
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 42s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-02 23:42:55 +01:00
8fd53dd4d2 Merge pull request 'anno: apply ticket #172' (#173) from bot/anno-172-20260302-200155 into main
All checks were successful
SMOKE / smoke (push) Successful in 12s
CI / build-and-anchors (push) Successful in 37s
Deploy staging+live (annotations) / deploy (push) Successful in 48s
Reviewed-on: #173
2026-03-02 21:03:36 +01:00
archicratie-bot
c8bbee4f74 anno: apply ticket #172 (archicrat-ia/chapitre-3#p-1-60c7ea48 type/reference)
All checks were successful
CI / build-and-anchors (push) Successful in 45s
CI / build-and-anchors (pull_request) Successful in 39s
SMOKE / smoke (push) Successful in 5s
2026-03-02 20:01:55 +00:00
04cdf54eb7 Merge pull request 'anno: apply ticket #169' (#171) from bot/anno-169-20260302-195320 into main
All checks were successful
SMOKE / smoke (push) Successful in 11s
CI / build-and-anchors (push) Successful in 43s
Deploy staging+live (annotations) / deploy (push) Successful in 56s
Reviewed-on: #171
2026-03-02 20:59:08 +01:00
archicratie-bot
d6bf645ae9 anno: apply ticket #169 (archicrat-ia/chapitre-3#p-0-ace27175 type/reference)
All checks were successful
CI / build-and-anchors (push) Successful in 47s
SMOKE / smoke (push) Successful in 4s
CI / build-and-anchors (pull_request) Successful in 42s
2026-03-02 19:53:21 +00:00
1ca6bcbd81 Merge pull request 'ci: make anno apply/reject gates API-hard (approved/rejected label present)' (#170) from chore/fix-anno-apply-approved-gate-v1 into main
All checks were successful
SMOKE / smoke (push) Successful in 13s
Deploy staging+live (annotations) / deploy (push) Successful in 46s
CI / build-and-anchors (push) Successful in 42s
Reviewed-on: #170
2026-03-02 20:17:58 +01:00
dec5f8eba7 ci: make anno apply/reject gates API-hard (approved/rejected label present)
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 40s
CI / build-and-anchors (pull_request) Successful in 39s
2026-03-02 20:12:29 +01:00
716c887045 Merge pull request 'ci: fix auto-label (no array fallback, retries, post-verify)' (#167) from chore/fix-auto-label-422-v1 into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 39s
Deploy staging+live (annotations) / deploy (push) Successful in 45s
Reviewed-on: #167
2026-03-02 19:37:43 +01:00
9b1789a164 ci: fix auto-label (no array fallback, retries, post-verify)
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 50s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-02 19:36:09 +01:00
17fa39c7ff Merge pull request 'ci: hard-gate anno apply/reject + fix JSON parsing' (#164) from chore/fix-anno-workflows-jsonparse-v3 into main
All checks were successful
SMOKE / smoke (push) Successful in 15s
CI / build-and-anchors (push) Successful in 45s
Deploy staging+live (annotations) / deploy (push) Successful in 55s
Reviewed-on: #164
2026-03-02 18:53:19 +01:00
8132e315f4 ci: hard-gate anno apply/reject + fix JSON parsing
All checks were successful
SMOKE / smoke (push) Successful in 10s
CI / build-and-anchors (push) Successful in 49s
CI / build-and-anchors (pull_request) Successful in 43s
2026-03-02 18:48:39 +01:00
8d993915d7 Merge pull request 'ci: stabilize anno apply/reject (event parsing + strict gating)' (#161) from chore/fix-anno-workflows-jsonparse-v2 into main
All checks were successful
SMOKE / smoke (push) Successful in 8s
CI / build-and-anchors (push) Successful in 44s
Deploy staging+live (annotations) / deploy (push) Successful in 1m10s
Reviewed-on: #161
2026-03-02 12:49:18 +01:00
497bddd05d ci: fix anno apply/reject JSON parsing + hard gates
All checks were successful
SMOKE / smoke (push) Successful in 8s
CI / build-and-anchors (push) Successful in 48s
CI / build-and-anchors (pull_request) Successful in 40s
2026-03-02 12:47:37 +01:00
7c8e49c1a9 ci: stabilize anno apply/reject (event parsing + strict gating)
Some checks failed
CI / build-and-anchors (push) Successful in 52s
CI / build-and-anchors (pull_request) Successful in 41s
SMOKE / smoke (push) Failing after 12m55s
2026-03-02 11:10:53 +01:00
901d28b89b Merge pull request 'deploy: pin nas-deploy image by digest' (#159) from chore/pin-nas-deploy-image-digest into main
All checks were successful
SMOKE / smoke (push) Successful in 12s
CI / build-and-anchors (push) Successful in 36s
Deploy staging+live (annotations) / deploy (push) Successful in 45s
Reviewed-on: #159
2026-02-28 20:24:46 +01:00
43e2862c89 deploy: pin nas-deploy image by digest
All checks were successful
SMOKE / smoke (push) Successful in 6s
CI / build-and-anchors (push) Successful in 39s
CI / build-and-anchors (pull_request) Successful in 37s
2026-02-28 20:22:17 +01:00
73fb38c4d1 Merge pull request 'deploy: use prebaked nas-deploy image; remove apt-get step' (#158) from chore/deploy-use-prebaked-image into main
All checks were successful
SMOKE / smoke (push) Successful in 9s
CI / build-and-anchors (push) Successful in 36s
Deploy staging+live (annotations) / deploy (push) Successful in 39s
Reviewed-on: #158
2026-02-28 19:51:27 +01:00
a81d206aba deploy: use prebaked nas-deploy image; remove apt-get step
All checks were successful
SMOKE / smoke (push) Successful in 3s
CI / build-and-anchors (push) Successful in 39s
CI / build-and-anchors (pull_request) Successful in 36s
2026-02-28 19:49:25 +01:00
9801ea3cea Merge pull request 'ci: lock deploy workflow to nas-deploy runner' (#157) from chore/lock-nas-deploy into main
All checks were successful
SMOKE / smoke (push) Successful in 16s
CI / build-and-anchors (push) Successful in 44s
Deploy staging+live (annotations) / deploy (push) Successful in 3m40s
Reviewed-on: #157
2026-02-28 17:45:35 +01:00
c11189fe11 ci: lock deploy workflow to nas-deploy runner
All checks were successful
SMOKE / smoke (push) Successful in 7s
CI / build-and-anchors (push) Successful in 44s
CI / build-and-anchors (pull_request) Successful in 40s
2026-02-28 17:43:19 +01:00
b47edb24cf Merge pull request 'ci: fix YAML newlines after runs-on (mac-ci)' (#156) from chore/fix-yaml-runs-on-newlines into main
Some checks failed
CI / build-and-anchors (push) Successful in 41s
SMOKE / smoke (push) Successful in 3s
Deploy staging+live (annotations) / deploy (push) Has been cancelled
Reviewed-on: #156
2026-02-28 15:54:48 +01:00
be191b09a0 ci: fix YAML newlines after runs-on (mac-ci)
All checks were successful
SMOKE / smoke (push) Successful in 25s
CI / build-and-anchors (push) Successful in 1m6s
CI / build-and-anchors (pull_request) Successful in 37s
2026-02-28 15:50:48 +01:00
e06587478d Merge pull request 'ci: route CI/bots to mac runner; keep deploy on NAS' (#155) from chore/route-ci-to-mac-runner into main
All checks were successful
Deploy staging+live (annotations) / deploy (push) Successful in 1m58s
Reviewed-on: #155
2026-02-28 15:29:35 +01:00
402ffb04cd ci: route CI/bots to mac runner; keep deploy on NAS 2026-02-28 15:28:49 +01:00
1cbfc02670 Merge pull request 'ci: harden anno-reject (dispatch + conflict guard) and keep deploy concurrency safe' (#153) from chore/fix-anno-reject-close-guard into main
All checks were successful
CI / build-and-anchors (push) Successful in 2m49s
Deploy staging+live (annotations) / deploy (push) Successful in 3m7s
SMOKE / smoke (push) Successful in 19s
Reviewed-on: #153
2026-02-28 10:05:26 +01:00
28d2fbbd2f ci: harden anno-reject (dispatch + conflict guard) and keep deploy concurrency safe
All checks were successful
CI / build-and-anchors (push) Successful in 2m21s
SMOKE / smoke (push) Successful in 19s
2026-02-28 09:55:37 +01:00
225368a952 Merge pull request 'ci: anno-apply gate on supported types (skip proposer)' (#149) from chore/fix-anno-apply-gate-types into main
All checks were successful
Deploy staging+live (annotations) / deploy (push) Successful in 2m12s
CI / build-and-anchors (push) Successful in 1m45s
SMOKE / smoke (push) Successful in 17s
Reviewed-on: #149
2026-02-27 20:06:57 +01:00
3574695041 ci: anno-apply gate on supported types (skip proposer)
All checks were successful
CI / build-and-anchors (push) Successful in 1m52s
SMOKE / smoke (push) Successful in 15s
2026-02-27 20:03:16 +01:00
ea68025a1d Merge pull request 'anno: apply ticket #144' (#148) from bot/anno-144-20260227-124313 into main
All checks were successful
CI / build-and-anchors (push) Successful in 2m5s
Deploy staging+live (annotations) / deploy (push) Successful in 2m8s
SMOKE / smoke (push) Successful in 13s
Reviewed-on: #148
2026-02-27 15:49:11 +01:00
3a08698003 Merge pull request 'anno: apply ticket #143' (#147) from bot/anno-143-20260227-124037 into main
Some checks failed
CI / build-and-anchors (push) Has been cancelled
Deploy staging+live (annotations) / deploy (push) Has been cancelled
SMOKE / smoke (push) Has been cancelled
Reviewed-on: #147
2026-02-27 15:48:28 +01:00
3d583608c2 Merge pull request 'anno: apply ticket #142' (#146) from bot/anno-142-20260227-123430 into main
Some checks failed
CI / build-and-anchors (push) Has been cancelled
Deploy staging+live (annotations) / deploy (push) Has been cancelled
SMOKE / smoke (push) Successful in 21s
Reviewed-on: #146
2026-02-27 15:47:44 +01:00
archicratie-bot
01ae95ab43 anno: apply ticket #144 (archicrat-ia/chapitre-3#p-0-ace27175 type/media)
All checks were successful
CI / build-and-anchors (push) Successful in 2m0s
SMOKE / smoke (push) Successful in 18s
2026-02-27 12:43:16 +00:00
archicratie-bot
0d5821c640 anno: apply ticket #143 (archicrat-ia/chapitre-1#p-1-8a6c18bf type/comment)
All checks were successful
CI / build-and-anchors (push) Successful in 1m54s
SMOKE / smoke (push) Successful in 16s
2026-02-27 12:40:39 +00:00
archicratie-bot
2bcea39558 anno: apply ticket #142 (archicrat-ia/chapitre-1#p-0-8d27a7f5 type/reference)
All checks were successful
CI / build-and-anchors (push) Successful in 1m53s
SMOKE / smoke (push) Successful in 14s
2026-02-27 12:34:32 +00:00
af85970d4a Merge pull request 'chore/fix-build-annotations-index-shards' (#141) from chore/fix-build-annotations-index-shards into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m45s
Deploy staging+live (annotations) / deploy (push) Successful in 1m53s
SMOKE / smoke (push) Successful in 13s
Reviewed-on: #141
2026-02-27 13:21:43 +01:00
210f621487 ci: support shard annotations in checks + endpoint (pageKey inference)
All checks were successful
CI / build-and-anchors (push) Successful in 1m58s
SMOKE / smoke (push) Successful in 13s
2026-02-27 13:13:31 +01:00
8ad960dc69 anno: build-annotations-index supports shard annotations
Some checks failed
SMOKE / smoke (push) Successful in 16s
CI / build-and-anchors (push) Failing after 1m48s
2026-02-27 12:27:35 +01:00
d45a8b285f anno: support shard annotations in annotations-index endpoint
Some checks failed
CI / build-and-anchors (push) Failing after 1m45s
SMOKE / smoke (push) Successful in 15s
2026-02-27 12:09:40 +01:00
b6e04a9138 anno: robust verify for para-index (normalize page keys)
Some checks failed
SMOKE / smoke (push) Successful in 33s
CI / build-and-anchors (push) Failing after 1m53s
2026-02-27 10:20:49 +01:00
dcf1fc2d0b anno: apply ticket #127 (archicrat-ia/chapitre-4#p-11-67c14c09 type/media) 2026-02-27 10:17:06 +01:00
41b0517c6c Merge pull request 'ci: deploy hotpatch-only + full rebuild warmup' (#139) from chore/fix-deploy-hotpatch-stable into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m50s
Deploy staging+live (annotations) / deploy (push) Successful in 2m12s
SMOKE / smoke (push) Successful in 19s
Reviewed-on: #139
2026-02-26 22:00:20 +01:00
6b43eb199d ci: deploy hotpatch-only + full rebuild warmup
All checks were successful
CI / build-and-anchors (push) Successful in 1m46s
SMOKE / smoke (push) Successful in 20s
2026-02-26 21:56:42 +01:00
d40f24e92d Merge pull request 'ci: fix hotpatch (yaml datetime -> json safe)' (#138) from chore/fix-hotpatch-json into main
Some checks failed
CI / build-and-anchors (push) Successful in 1m54s
Deploy staging+live (annotations) / deploy (push) Failing after 5m35s
SMOKE / smoke (push) Successful in 16s
Reviewed-on: #138
2026-02-26 21:32:56 +01:00
480a61b071 ci: fix hotpatch (yaml datetime -> json safe)
All checks were successful
CI / build-and-anchors (push) Successful in 1m45s
SMOKE / smoke (push) Successful in 22s
2026-02-26 21:29:52 +01:00
a5d68d6a7e Merge pull request 'ci: fix deploy workflow (warmup + hotpatch)' (#137) from chore/fix-deploy-warmup into main
Some checks failed
CI / build-and-anchors (push) Successful in 1m38s
Deploy staging+live (annotations) / deploy (push) Failing after 8m16s
SMOKE / smoke (push) Successful in 15s
Reviewed-on: #137
2026-02-26 21:09:21 +01:00
390f2c33e5 ci: fix deploy workflow (warmup + hotpatch)
All checks were successful
CI / build-and-anchors (push) Successful in 2m2s
SMOKE / smoke (push) Successful in 17s
2026-02-26 21:06:01 +01:00
16485dc4a9 Merge pull request 'ci: shard annotations by para + deploy deep-merge hotpatch' (#135) from chore/anno-shard-deepmerge into main
Some checks failed
CI / build-and-anchors (push) Successful in 1m54s
Deploy staging+live (annotations) / deploy (push) Failing after 7m20s
SMOKE / smoke (push) Successful in 13s
Reviewed-on: #135
2026-02-26 19:58:22 +01:00
a43ce5f188 ci: shard annotations by para + deploy deep-merge hotpatch
All checks were successful
CI / build-and-anchors (push) Successful in 1m51s
SMOKE / smoke (push) Successful in 29s
2026-02-26 19:54:46 +01:00
0519ae2dd0 Merge pull request 'ci: fix deploy checkout (no eval) + workflow_dispatch fallback' (#134) from chore/fix-deploy-container-conflict into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m52s
Deploy staging+live (annotations) / deploy (push) Successful in 8m48s
SMOKE / smoke (push) Successful in 19s
Reviewed-on: #134
2026-02-26 18:51:53 +01:00
0d5b790e52 ci: fix deploy checkout (no eval) + workflow_dispatch fallback
All checks were successful
CI / build-and-anchors (push) Successful in 1m50s
SMOKE / smoke (push) Successful in 17s
2026-02-26 18:48:12 +01:00
342e21b9ea Merge pull request 'ci: fix deploy checkout (no eval) + workflow_dispatch fallback' (#133) from chore/fix-deploy-checkout-quote into main
Some checks failed
CI / build-and-anchors (push) Successful in 1m40s
Deploy staging+live (annotations) / deploy (push) Failing after 10m11s
SMOKE / smoke (push) Successful in 16s
Reviewed-on: #133
2026-02-26 18:01:24 +01:00
4dec9e182b ci: fix deploy checkout (no eval) + workflow_dispatch fallback
All checks were successful
CI / build-and-anchors (push) Successful in 1m38s
SMOKE / smoke (push) Successful in 18s
2026-02-26 17:58:58 +01:00
c7ae883c6a Merge pull request 'ci: fix deploy workflow (workflow_dispatch checkout + gate)' (#132) from chore/fix-deploy-workflow into main
Some checks failed
CI / build-and-anchors (push) Successful in 1m37s
Deploy staging+live (annotations) / deploy (push) Failing after 19s
SMOKE / smoke (push) Successful in 17s
Reviewed-on: #132
2026-02-26 17:44:28 +01:00
9b4584f70a ci: fix deploy workflow (workflow_dispatch checkout + gate)
All checks were successful
CI / build-and-anchors (push) Successful in 1m53s
SMOKE / smoke (push) Successful in 19s
2026-02-26 17:39:51 +01:00
7b64fb7401 Merge pull request 'anno: apply ticket #129' (#131) from bot/anno-129-20260226-131740 into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m46s
Deploy staging+live (annotations) / deploy (push) Successful in 22s
SMOKE / smoke (push) Successful in 18s
Reviewed-on: #131
2026-02-26 14:23:46 +01:00
archicratie-bot
57cb23ce8b anno: apply ticket #129 (archicrat-ia/chapitre-4#p-11-67c14c09 type/media)
All checks were successful
CI / build-and-anchors (push) Successful in 1m39s
SMOKE / smoke (push) Successful in 18s
2026-02-26 13:17:43 +00:00
708b87ff35 Merge pull request 'ci: deploy staging+live (annotations) with manual force' (#126) from chore/deploy-staging-live into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m47s
Deploy staging+live (annotations) / deploy (push) Successful in 20s
SMOKE / smoke (push) Successful in 19s
Reviewed-on: #126
2026-02-26 12:41:21 +01:00
577cfd08e8 ci: deploy staging+live (annotations) with manual force
All checks were successful
CI / build-and-anchors (push) Successful in 1m51s
SMOKE / smoke (push) Successful in 15s
2026-02-26 12:40:47 +01:00
de9edbe532 Merge pull request 'ci: fix deploy (install docker compose plugin)' (#125) from chore/fix-deploy-compose into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m39s
Deploy staging+live (annotations) / deploy (push) Successful in 31s
SMOKE / smoke (push) Successful in 17s
Reviewed-on: #125
2026-02-26 11:16:19 +01:00
5e95dc9898 ci: fix deploy (install docker compose plugin)
All checks were successful
CI / build-and-anchors (push) Successful in 1m42s
SMOKE / smoke (push) Successful in 23s
2026-02-26 11:15:53 +01:00
006fec7efd Merge pull request 'ci: auto deploy staging+live (annotations-only)' (#124) from chore/deploy-auto into main
Some checks failed
CI / build-and-anchors (push) Successful in 1m48s
Deploy (staging + live) — annotations / deploy (push) Failing after 20s
SMOKE / smoke (push) Successful in 24s
Reviewed-on: #124
2026-02-26 10:29:50 +01:00
2b612214bb ci: auto deploy staging+live (annotations-only)
All checks were successful
CI / build-and-anchors (push) Successful in 1m55s
SMOKE / smoke (push) Successful in 18s
2026-02-26 10:29:17 +01:00
29a6c349aa Merge pull request 'anno: apply ticket #121' (#122) from bot/anno-121-20260225-191130 into main
All checks were successful
CI / build-and-anchors (push) Successful in 2m18s
SMOKE / smoke (push) Successful in 19s
Reviewed-on: #122
2026-02-26 09:01:52 +01:00
archicratie-bot
33a227c401 anno: apply ticket #121 (archicrat-ia/chapitre-4#p-7-1da4a458 type/media)
All checks were successful
CI / build-and-anchors (push) Successful in 1m46s
SMOKE / smoke (push) Successful in 21s
2026-02-25 19:11:36 +00:00
396ad4df7c Merge pull request 'anno: apply ticket #115' (#120) from bot/anno-115-20260225-185830 into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m45s
SMOKE / smoke (push) Successful in 15s
Reviewed-on: #120
2026-02-25 20:00:04 +01:00
archicratie-bot
0b39427090 anno: apply ticket #115 (archicrat-ia/chapitre-4#p-2-31b12529 type/media)
All checks were successful
CI / build-and-anchors (push) Successful in 1m42s
SMOKE / smoke (push) Successful in 15s
2026-02-25 18:58:34 +00:00
8fcb18cb46 Merge pull request 'ci: anno apply workflow builds dist for strict verify' (#119) from chore/fix-anno-verify-build2 into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m49s
SMOKE / smoke (push) Successful in 20s
Reviewed-on: #119
2026-02-25 19:51:24 +01:00
d03fc519de ci: anno apply workflow builds dist for strict verify
All checks were successful
CI / build-and-anchors (push) Successful in 1m50s
SMOKE / smoke (push) Successful in 19s
2026-02-25 19:50:47 +01:00
97dd3797d6 Merge pull request 'ci: anno apply workflow builds dist for strict verify' (#118) from chore/fix-anno-verify-build into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m35s
SMOKE / smoke (push) Successful in 20s
Reviewed-on: #118
2026-02-25 19:31:45 +01:00
6c7b7ab6a0 ci: anno apply workflow builds dist for strict verify
All checks were successful
CI / build-and-anchors (push) Successful in 1m59s
SMOKE / smoke (push) Successful in 23s
2026-02-25 19:29:36 +01:00
105dfe1b5b Merge pull request 'ci: add apply-annotation-ticket script for anno bot' (#117) from chore/add-apply-annotation-ticket into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m47s
SMOKE / smoke (push) Successful in 20s
Reviewed-on: #117
2026-02-25 19:02:33 +01:00
82f6453538 ci: add apply-annotation-ticket script for anno bot
All checks were successful
CI / build-and-anchors (push) Successful in 1m43s
SMOKE / smoke (push) Successful in 17s
2026-02-25 19:02:03 +01:00
fe862102d3 Merge pull request 'ci: fix anno apply/reject workflows (yaml valid)' (#116) from chore/fix-anno-workflows into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m49s
SMOKE / smoke (push) Successful in 17s
Reviewed-on: #116
2026-02-25 18:28:36 +01:00
6ef538a0c4 ci: fix anno apply/reject workflows (yaml valid)
All checks were successful
CI / build-and-anchors (push) Successful in 1m36s
SMOKE / smoke (push) Successful in 20s
2026-02-25 18:26:03 +01:00
689612ff7f Merge pull request 'anno-workflows' (#114) from chore/anno-workflows into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m52s
SMOKE / smoke (push) Successful in 18s
Reviewed-on: #114
2026-02-25 17:51:16 +01:00
7b135a4707 ci: add anno apply bot workflow
All checks were successful
CI / build-and-anchors (push) Successful in 2m0s
SMOKE / smoke (push) Successful in 16s
2026-02-25 17:50:45 +01:00
0cb8a54195 Merge pull request 'fix: remove specimen media from prologue annotations' (#108) from chore/Fix_whoami into main
Some checks failed
CI / build-and-anchors (push) Has started running
SMOKE / smoke (push) Has been cancelled
Reviewed-on: #108
2026-02-23 12:37:39 +01:00
a7a333397d fix: remove specimen media from prologue annotations
All checks were successful
CI / build-and-anchors (push) Successful in 1m36s
SMOKE / smoke (push) Successful in 15s
2026-02-23 12:33:55 +01:00
eb1d444776 Merge pull request 'fix: …' (#107) from chore/Fix_whoami into main
Some checks failed
CI / build-and-anchors (push) Failing after 1m45s
SMOKE / smoke (push) Successful in 15s
Reviewed-on: #107
2026-02-23 12:17:21 +01:00
68c3416594 fix: …
Some checks failed
CI / build-and-anchors (push) Failing after 2m6s
SMOKE / smoke (push) Successful in 13s
2026-02-23 12:07:01 +01:00
ae809e0152 Merge pull request 'docs: add pro runbooks (deploy/edge/public_site) + annotations spec + start-here v2' (#106) from fix/canonical-public-site into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m25s
SMOKE / smoke (push) Successful in 13s
Reviewed-on: #106
2026-02-21 15:36:07 +01:00
9bbebf5886 Merge pull request 'fix(seo): enforce PUBLIC_SITE at docker build (canonical/sitemap) + set per blue/green' (#105) from fix/canonical-public-site into main
All checks were successful
CI / build-and-anchors (push) Successful in 1m55s
SMOKE / smoke (push) Successful in 23s
Reviewed-on: #105
2026-02-21 12:32:08 +01:00
261 changed files with 70340 additions and 14156 deletions

View File

@@ -3,7 +3,7 @@ name: "Correction paragraphe"
about: "Proposer une correction ciblée (un paragraphe) avec justification." about: "Proposer une correction ciblée (un paragraphe) avec justification."
--- ---
## Chemin (ex: /archicratie/prologue/) ## Chemin (ex: /archicrat-ia/prologue/)
<!-- obligatoire --> <!-- obligatoire -->
/... /...

View File

@@ -3,7 +3,7 @@ name: "Vérification factuelle / sources"
about: "Signaler une assertion à sourcer ou à corriger (preuves, références)." about: "Signaler une assertion à sourcer ou à corriger (preuves, références)."
--- ---
## Chemin (ex: /archicratie/prologue/) ## Chemin (ex: /archicrat-ia/prologue/)
<!-- obligatoire --> <!-- obligatoire -->
/... /...

View File

@@ -0,0 +1,450 @@
name: Anno Apply (PR)
on:
issues:
types: [labeled]
workflow_dispatch:
inputs:
issue:
description: "Issue number to apply"
required: true
env:
NODE_OPTIONS: --dns-result-order=ipv4first
defaults:
run:
shell: bash
concurrency:
group: anno-apply-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
cancel-in-progress: true
jobs:
apply-approved:
runs-on: mac-ci
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps:
- name: Tools sanity
run: |
set -euo pipefail
git --version
node --version
npm --version
- name: Derive context (event.json / workflow_dispatch)
env:
INPUT_ISSUE: ${{ inputs.issue }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
run: |
set -euo pipefail
export EVENT_JSON="/var/run/act/workflow/event.json"
test -f "$EVENT_JSON" || { echo "Missing $EVENT_JSON"; exit 1; }
node --input-type=module - <<'NODE' > /tmp/anno.env
import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const repoObj = ev?.repository || {};
const cloneUrl =
repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
let owner =
repoObj?.owner?.login ||
repoObj?.owner?.username ||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
let repo =
repoObj?.name ||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
if (!owner || !repo) {
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
if (m?.groups) {
owner = owner || m.groups.o;
repo = repo || m.groups.r;
}
}
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
const defaultBranch = repoObj?.default_branch || "main";
const issueNumber =
ev?.issue?.number ||
ev?.issue?.index ||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
throw new Error("No issue number in event.json or workflow_dispatch input");
}
let labelName = "workflow_dispatch";
const lab = ev?.label;
if (typeof lab === "string") labelName = lab;
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
else if (ev?.label?.name) labelName = ev.label.name;
const u = new URL(cloneUrl);
const origin = u.origin;
const apiBase = (process.env.FORGE_API && String(process.env.FORGE_API).trim())
? String(process.env.FORGE_API).trim().replace(/\/+$/,"")
: origin;
function sh(s) { return JSON.stringify(String(s)); }
process.stdout.write([
`CLONE_URL=${sh(cloneUrl)}`,
`OWNER=${sh(owner)}`,
`REPO=${sh(repo)}`,
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
`ISSUE_NUMBER=${sh(issueNumber)}`,
`LABEL_NAME=${sh(labelName)}`,
`API_BASE=${sh(apiBase)}`
].join("\n") + "\n");
NODE
echo "context:"
sed -n '1,120p' /tmp/anno.env
- name: Early gate (label event fast-skip, but tolerant)
run: |
set -euo pipefail
source /tmp/anno.env
echo "event label = $LABEL_NAME"
if [[ "$LABEL_NAME" != "state/approved" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
echo "label=$LABEL_NAME => skip early"
echo "SKIP=1" >> /tmp/anno.env
echo "SKIP_REASON=\"label_not_approved_event\"" >> /tmp/anno.env
exit 0
fi
echo "continue to API gating (issue=$ISSUE_NUMBER)"
- name: Fetch issue + hard gate on labels + Type
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "Missing secret FORGE_TOKEN"; exit 1; }
curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
-o /tmp/issue.json
node --input-type=module - <<'NODE' >> /tmp/anno.env
import fs from "node:fs";
const issue = JSON.parse(fs.readFileSync("/tmp/issue.json", "utf8"));
const body = String(issue.body || "").replace(/\r\n/g, "\n");
const labels = Array.isArray(issue.labels)
? issue.labels.map(l => String(l.name || "")).filter(Boolean)
: [];
const hasApproved = labels.includes("state/approved");
function pickLine(key) {
const re = new RegExp(`^\\s*${key}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = body.match(re);
return m ? m[1].trim() : "";
}
const typeRaw = pickLine("Type");
const type = String(typeRaw || "").trim().toLowerCase();
const allowedAnno = new Set(["type/media", "type/reference", "type/comment"]);
const proposerTypes = new Set(["type/correction", "type/fact-check"]);
const out = [];
out.push(`ISSUE_TYPE=${JSON.stringify(type)}`);
if (!hasApproved) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("not_approved_label_present")}`);
process.stdout.write(out.join("\n") + "\n");
process.exit(0);
}
if (!type) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("missing_type")}`);
} else if (allowedAnno.has(type)) {
// proceed
} else if (proposerTypes.has(type)) {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("proposer_type:" + type)}`);
} else {
out.push(`SKIP=1`);
out.push(`SKIP_REASON=${JSON.stringify("unsupported_type:" + type)}`);
}
process.stdout.write(out.join("\n") + "\n");
NODE
echo "gating result:"
grep -E '^(ISSUE_TYPE|SKIP|SKIP_REASON)=' /tmp/anno.env || true
- name: Comment issue if skipped (unsupported / missing Type only)
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
[[ "${SKIP:-0}" == "1" ]] || exit 0
if [[ "${SKIP_REASON:-}" == "not_approved_label_present" || "${SKIP_REASON:-}" == "label_not_approved_event" ]]; then
echo "skip reason=${SKIP_REASON} -> no comment"
exit 0
fi
if [[ "${SKIP_REASON:-}" == proposer_type:* ]]; then
echo "proposer ticket detected -> anno stays silent"
exit 0
fi
test -n "${FORGE_TOKEN:-}" || exit 0
REASON="${SKIP_REASON:-}"
TYPE="${ISSUE_TYPE:-}"
if [[ "$REASON" == unsupported_type:* ]]; then
MSG="Ticket #${ISSUE_NUMBER} ignored: unsupported Type (${TYPE}). Supported types: type/media, type/reference, type/comment."
else
MSG="Ticket #${ISSUE_NUMBER} ignored: missing or unreadable 'Type:'. Expected: type/media|type/reference|type/comment"
fi
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1] || ""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
- name: Checkout default branch
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
rm -rf .git
git init -q
git remote add origin "$CLONE_URL"
git fetch --depth 1 origin "$DEFAULT_BRANCH"
git -c advice.detachedHead=false checkout -q FETCH_HEAD
git log -1 --oneline
- name: Install deps
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
npm ci --no-audit --no-fund
- name: Check apply script exists
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
test -f scripts/apply-annotation-ticket.mjs || {
echo "missing scripts/apply-annotation-ticket.mjs on $DEFAULT_BRANCH"
ls -la scripts | sed -n '1,200p' || true
exit 1
}
- name: Build dist (needed for --verify)
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
npm run build
test -f dist/para-index.json || {
echo "missing dist/para-index.json after build"
ls -la dist | sed -n '1,200p' || true
exit 1
}
echo "dist/para-index.json present"
- name: Apply ticket on bot branch (strict+verify, commit)
continue-on-error: true
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
BOT_GIT_EMAIL: ${{ secrets.BOT_GIT_EMAIL }}
run: |
set -euo pipefail
source /tmp/anno.env
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
test -d .git || { echo "not a git repo (checkout failed)"; echo "APPLY_RC=90" >> /tmp/anno.env; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "Missing secret FORGE_TOKEN"; exit 1; }
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
START_SHA="$(git rev-parse HEAD)"
TS="$(date -u +%Y%m%d-%H%M%S)"
BR="bot/anno-${ISSUE_NUMBER}-${TS}"
echo "BRANCH=$BR" >> /tmp/anno.env
git checkout -b "$BR"
export FORGE_API="$API_BASE"
export GITEA_OWNER="$OWNER"
export GITEA_REPO="$REPO"
LOG="/tmp/apply.log"
set +e
node scripts/apply-annotation-ticket.mjs "$ISSUE_NUMBER" --strict --verify --commit >"$LOG" 2>&1
RC=$?
set -e
echo "APPLY_RC=$RC" >> /tmp/anno.env
echo "== apply log (tail) =="
tail -n 180 "$LOG" || true
END_SHA="$(git rev-parse HEAD)"
if [[ "$RC" -ne 0 ]]; then
echo "NOOP=0" >> /tmp/anno.env
exit 0
fi
if [[ "$START_SHA" == "$END_SHA" ]]; then
echo "NOOP=1" >> /tmp/anno.env
else
echo "NOOP=0" >> /tmp/anno.env
echo "END_SHA=$END_SHA" >> /tmp/anno.env
fi
- name: Comment issue on failure (strict/verify/etc)
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
RC="${APPLY_RC:-0}"
if [[ "$RC" == "0" ]]; then
echo "no failure detected"
exit 0
fi
test -n "${FORGE_TOKEN:-}" || exit 0
if [[ -f /tmp/apply.log ]]; then
BODY="$(tail -n 160 /tmp/apply.log | sed 's/\r$//')"
else
BODY="(no apply log found)"
fi
MSG="apply-annotation-ticket failed (rc=${RC}).\n\n\`\`\`\n${BODY}\n\`\`\`\n"
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1] || ""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
- name: Push bot branch
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "apply failed -> skip push"; exit 0; }
[[ "${NOOP:-0}" == "0" ]] || { echo "no-op -> skip push"; exit 0; }
test -d .git || { echo "no git repo -> skip push"; exit 0; }
AUTH_URL="$(node --input-type=module -e '
const [clone, tok] = process.argv.slice(1);
const u = new URL(clone);
u.username = "oauth2";
u.password = tok;
console.log(u.toString());
' "$CLONE_URL" "$FORGE_TOKEN")"
git remote set-url origin "$AUTH_URL"
git push -u origin "$BRANCH"
- name: Create PR + comment issue
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "apply failed -> skip PR"; exit 0; }
[[ "${NOOP:-0}" == "0" ]] || { echo "no-op -> skip PR"; exit 0; }
PR_TITLE="anno: apply ticket #${ISSUE_NUMBER}"
PR_BODY="PR auto depuis ticket #${ISSUE_NUMBER} (state/approved).\n\n- Branche: ${BRANCH}\n- Commit: ${END_SHA}\n\nMerge si CI OK."
PR_PAYLOAD="$(node --input-type=module -e '
const [title, body, base, head] = process.argv.slice(1);
console.log(JSON.stringify({ title, body, base, head, allow_maintainer_edit: true }));
' "$PR_TITLE" "$PR_BODY" "$DEFAULT_BRANCH" "${OWNER}:${BRANCH}")"
PR_JSON="$(curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls" \
--data-binary "$PR_PAYLOAD")"
PR_URL="$(node --input-type=module -e '
const pr = JSON.parse(process.argv[1] || "{}");
console.log(pr.html_url || pr.url || "");
' "$PR_JSON")"
test -n "$PR_URL" || { echo "PR URL missing. Raw: $PR_JSON"; exit 1; }
MSG="PR created for ticket #${ISSUE_NUMBER}: ${PR_URL}"
C_PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1] || ""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$C_PAYLOAD"
echo "PR: $PR_URL"
- name: Finalize (fail job if apply failed)
if: ${{ always() }}
run: |
set -euo pipefail
source /tmp/anno.env || true
[[ "${SKIP:-0}" != "1" ]] || { echo "skipped"; exit 0; }
RC="${APPLY_RC:-0}"
if [[ "$RC" != "0" ]]; then
echo "apply failed (rc=$RC)"
exit "$RC"
fi
echo "apply ok"

View File

@@ -0,0 +1,181 @@
name: Anno Reject (close issue)
on:
issues:
types: [labeled]
workflow_dispatch:
inputs:
issue:
description: "Issue number to reject/close"
required: true
env:
NODE_OPTIONS: --dns-result-order=ipv4first
defaults:
run:
shell: bash
concurrency:
group: anno-reject-${{ github.event.issue.number || github.event.issue.index || inputs.issue || 'manual' }}
cancel-in-progress: true
jobs:
reject:
runs-on: mac-ci
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps:
- name: Tools sanity
run: |
set -euo pipefail
node --version
- name: Derive context (event.json / workflow_dispatch)
env:
INPUT_ISSUE: ${{ inputs.issue }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE || vars.FORGE_BASE_URL }}
run: |
set -euo pipefail
export EVENT_JSON="/var/run/act/workflow/event.json"
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
node --input-type=module - <<'NODE' > /tmp/reject.env
import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const repoObj = ev?.repository || {};
const cloneUrl =
repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
let owner =
repoObj?.owner?.login ||
repoObj?.owner?.username ||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
let repo =
repoObj?.name ||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
if ((!owner || !repo) && cloneUrl) {
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
if (m?.groups) { owner = owner || m.groups.o; repo = repo || m.groups.r; }
}
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
const issueNumber =
ev?.issue?.number ||
ev?.issue?.index ||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0);
if (!issueNumber || !Number.isFinite(Number(issueNumber))) {
throw new Error("No issue number in event.json or workflow_dispatch input");
}
// label name: best-effort (non-bloquant)
let labelName = "workflow_dispatch";
const lab = ev?.label;
if (typeof lab === "string") labelName = lab;
else if (lab && typeof lab === "object" && typeof lab.name === "string") labelName = lab.name;
let apiBase = "";
if (process.env.FORGE_API && String(process.env.FORGE_API).trim()) {
apiBase = String(process.env.FORGE_API).trim().replace(/\/+$/,"");
} else if (cloneUrl) {
apiBase = new URL(cloneUrl).origin;
} else {
apiBase = "";
}
function sh(s){ return JSON.stringify(String(s)); }
process.stdout.write([
`OWNER=${sh(owner)}`,
`REPO=${sh(repo)}`,
`ISSUE_NUMBER=${sh(issueNumber)}`,
`LABEL_NAME=${sh(labelName)}`,
`API_BASE=${sh(apiBase)}`
].join("\n") + "\n");
NODE
echo "✅ context:"
sed -n '1,120p' /tmp/reject.env
- name: Early gate (fast-skip, tolerant)
run: |
set -euo pipefail
source /tmp/reject.env
echo " event label = $LABEL_NAME"
if [[ "$LABEL_NAME" != "state/rejected" && "$LABEL_NAME" != "workflow_dispatch" && "$LABEL_NAME" != "" && "$LABEL_NAME" != "[object Object]" ]]; then
echo " label=$LABEL_NAME => skip early"
echo "SKIP=1" >> /tmp/reject.env
echo "SKIP_REASON=\"label_not_rejected_event\"" >> /tmp/reject.env
exit 0
fi
- name: Comment + close (only if label state/rejected is PRESENT now, and no conflict)
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/reject.env
[[ "${SKIP:-0}" != "1" ]] || { echo " skipped"; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "❌ Missing secret FORGE_TOKEN"; exit 1; }
test -n "${API_BASE:-}" || { echo "❌ Missing API_BASE"; exit 1; }
curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
-o /tmp/reject.issue.json
node --input-type=module - <<'NODE' > /tmp/reject.flags
import fs from "node:fs";
const issue = JSON.parse(fs.readFileSync("/tmp/reject.issue.json","utf8"));
const labels = Array.isArray(issue.labels) ? issue.labels.map(l => String(l.name || "")).filter(Boolean) : [];
const hasApproved = labels.includes("state/approved");
const hasRejected = labels.includes("state/rejected");
process.stdout.write(`HAS_APPROVED=${hasApproved ? "1":"0"}\nHAS_REJECTED=${hasRejected ? "1":"0"}\n`);
NODE
source /tmp/reject.flags
# Do nothing unless state/rejected is truly present now (anti payload weird)
if [[ "${HAS_REJECTED:-0}" != "1" ]]; then
echo " state/rejected not present -> skip"
exit 0
fi
if [[ "${HAS_APPROVED:-0}" == "1" && "${HAS_REJECTED:-0}" == "1" ]]; then
MSG="⚠️ Conflit d'état sur le ticket #${ISSUE_NUMBER} : labels **state/approved** et **state/rejected** présents.\n\n➡ Action manuelle requise : retirer l'un des deux labels avant relance."
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
echo " conflict => stop"
exit 0
fi
MSG="❌ Ticket #${ISSUE_NUMBER} refusé (label state/rejected)."
PAYLOAD="$(node --input-type=module -e 'console.log(JSON.stringify({body: process.argv[1]||""}))' "$MSG")"
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER/comments" \
--data-binary "$PAYLOAD"
curl -fsS -X PATCH \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_NUMBER" \
--data-binary '{"state":"closed"}'
echo "✅ rejected+closed"

View File

@@ -4,22 +4,37 @@ on:
issues: issues:
types: [opened, edited] types: [opened, edited]
concurrency:
group: auto-label-${{ github.event.issue.number || github.event.issue.index || 'manual' }}
cancel-in-progress: true
jobs: jobs:
label: label:
runs-on: ubuntu-latest runs-on: mac-ci
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps: steps:
- name: Apply labels from Type/State/Category - name: Apply labels from Type/State/Category
env: env:
FORGE_BASE: ${{ vars.FORGE_API || vars.FORGE_BASE }} # IMPORTANT: préfère FORGE_BASE (LAN) si défini, sinon FORGE_API
FORGE_BASE: ${{ vars.FORGE_BASE || vars.FORGE_API || vars.FORGE_API_BASE }}
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }} FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
REPO_FULL: ${{ gitea.repository }} REPO_FULL: ${{ gitea.repository }}
EVENT_PATH: ${{ github.event_path }} EVENT_PATH: ${{ github.event_path }}
NODE_OPTIONS: --dns-result-order=ipv4first
run: | run: |
python3 - <<'PY' python3 - <<'PY'
import json, os, re, urllib.request, urllib.error import json, os, re, time, urllib.request, urllib.error, socket
forge = (os.environ.get("FORGE_BASE") or "").rstrip("/")
if not forge:
raise SystemExit("Missing FORGE_BASE/FORGE_API repo variable (e.g. http://192.168.1.20:3000)")
token = os.environ.get("FORGE_TOKEN") or ""
if not token:
raise SystemExit("Missing secret FORGE_TOKEN")
forge = os.environ["FORGE_BASE"].rstrip("/")
token = os.environ["FORGE_TOKEN"]
owner, repo = os.environ["REPO_FULL"].split("/", 1) owner, repo = os.environ["REPO_FULL"].split("/", 1)
event_path = os.environ["EVENT_PATH"] event_path = os.environ["EVENT_PATH"]
@@ -46,12 +61,9 @@ jobs:
print("PARSED:", {"Type": t, "State": s, "Category": c}) print("PARSED:", {"Type": t, "State": s, "Category": c})
# 1) explicite depuis le body # 1) explicite depuis le body
if t: if t: desired.add(t)
desired.add(t) if s: desired.add(s)
if s: if c: desired.add(c)
desired.add(s)
if c:
desired.add(c)
# 2) fallback depuis le titre si Type absent # 2) fallback depuis le titre si Type absent
if not t: if not t:
@@ -76,42 +88,56 @@ jobs:
"Authorization": f"token {token}", "Authorization": f"token {token}",
"Accept": "application/json", "Accept": "application/json",
"Content-Type": "application/json", "Content-Type": "application/json",
"User-Agent": "archicratie-auto-label/1.0", "User-Agent": "archicratie-auto-label/1.1",
} }
def jreq(method, url, payload=None): def jreq(method, url, payload=None, timeout=60, retries=4, backoff=2.0):
data = None if payload is None else json.dumps(payload).encode("utf-8") data = None if payload is None else json.dumps(payload).encode("utf-8")
req = urllib.request.Request(url, data=data, headers=headers, method=method) last_err = None
try: for i in range(retries):
with urllib.request.urlopen(req, timeout=20) as r: req = urllib.request.Request(url, data=data, headers=headers, method=method)
b = r.read() try:
return json.loads(b.decode("utf-8")) if b else None with urllib.request.urlopen(req, timeout=timeout) as r:
except urllib.error.HTTPError as e: b = r.read()
b = e.read().decode("utf-8", errors="replace") return json.loads(b.decode("utf-8")) if b else None
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e except urllib.error.HTTPError as e:
b = e.read().decode("utf-8", errors="replace")
raise RuntimeError(f"HTTP {e.code} {method} {url}\n{b}") from e
except (TimeoutError, socket.timeout, urllib.error.URLError) as e:
last_err = e
# retry only on network/timeout
time.sleep(backoff * (i + 1))
raise RuntimeError(f"Network/timeout after retries: {method} {url}\n{last_err}")
# labels repo # labels repo
labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000") or [] labels = jreq("GET", f"{api}/repos/{owner}/{repo}/labels?limit=1000", timeout=60) or []
name_to_id = {x.get("name"): x.get("id") for x in labels} name_to_id = {x.get("name"): x.get("id") for x in labels}
missing = [x for x in desired if x not in name_to_id] missing = [x for x in desired if x not in name_to_id]
if missing: if missing:
raise SystemExit("Missing labels in repo: " + ", ".join(sorted(missing))) raise SystemExit("Missing labels in repo: " + ", ".join(sorted(missing)))
wanted_ids = [name_to_id[x] for x in desired] wanted_ids = sorted({int(name_to_id[x]) for x in desired})
# labels actuels de l'issue # labels actuels de l'issue
current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels") or [] current = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
current_ids = {x.get("id") for x in current if x.get("id") is not None} current_ids = {int(x.get("id")) for x in current if x.get("id") is not None}
final_ids = sorted(current_ids.union(wanted_ids)) final_ids = sorted(current_ids.union(wanted_ids))
# set labels = union (n'enlève rien) # Replace labels = union (n'enlève rien)
url = f"{api}/repos/{owner}/{repo}/issues/{number}/labels" url = f"{api}/repos/{owner}/{repo}/issues/{number}/labels"
try:
jreq("PUT", url, {"labels": final_ids}) # IMPORTANT: on n'envoie JAMAIS une liste brute ici (ça a causé le 422)
except Exception: jreq("PUT", url, {"labels": final_ids}, timeout=90, retries=4)
jreq("PUT", url, final_ids)
# vérif post-apply (anti "timeout mais appliqué")
post = jreq("GET", f"{api}/repos/{owner}/{repo}/issues/{number}/labels", timeout=60) or []
post_ids = {int(x.get("id")) for x in post if x.get("id") is not None}
missing_ids = [i for i in wanted_ids if i not in post_ids]
if missing_ids:
raise RuntimeError(f"Labels not applied after PUT (missing ids): {missing_ids}")
print(f"OK labels #{number}: {sorted(desired)}") print(f"OK labels #{number}: {sorted(desired)}")
PY PY

View File

@@ -3,7 +3,7 @@ name: CI
on: on:
push: push:
pull_request: pull_request:
branches: [master] branches: [main]
workflow_dispatch: workflow_dispatch:
env: env:
@@ -15,7 +15,7 @@ defaults:
jobs: jobs:
build-and-anchors: build-and-anchors:
runs-on: ubuntu-latest runs-on: mac-ci
container: container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
@@ -79,22 +79,7 @@ jobs:
set -euo pipefail set -euo pipefail
npm ci npm ci
- name: Inline scripts syntax check - name: Full test suite (CI=1)
run: | run: |
set -euo pipefail set -euo pipefail
node scripts/check-inline-js.mjs npm run ci
- name: Build (includes postbuild injection + pagefind)
run: |
set -euo pipefail
npm run build
- name: Anchors contract
run: |
set -euo pipefail
npm run test:anchors
- name: Verify anchor aliases injected in dist
run: |
set -euo pipefail
node scripts/verify-anchor-aliases-in-dist.mjs

View File

@@ -1,103 +0,0 @@
name: CI
on:
push: {}
pull_request:
branches: ["master"]
workflow_dispatch: {}
env:
NODE_OPTIONS: --dns-result-order=ipv4first
defaults:
run:
shell: bash
jobs:
build-and-anchors:
runs-on: ubuntu-latest
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps:
- name: Tools sanity
run: |
set -euo pipefail
git --version
node --version
npm --version
npm ping --registry=https://registry.npmjs.org
- name: Checkout (from event.json, no external actions)
run: |
set -euo pipefail
EVENT_JSON="/var/run/act/workflow/event.json"
test -f "$EVENT_JSON" || (echo "❌ Missing $EVENT_JSON" && exit 1)
eval "$(node - <<'NODE'
import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync("/var/run/act/workflow/event.json","utf8"));
const repo =
ev?.repository?.clone_url ||
(ev?.repository?.html_url ? (ev.repository.html_url.replace(/\/$/,'') + ".git") : "");
const sha =
ev?.after ||
ev?.pull_request?.head?.sha ||
ev?.head_commit?.id ||
ev?.sha ||
"";
if (!repo) { console.error("No repository.clone_url/html_url in event.json"); process.exit(1); }
if (!sha) { console.error("No sha/after/pull_request.head.sha in event.json"); process.exit(1); }
console.log(`REPO_URL=${JSON.stringify(repo)}`);
console.log(`SHA=${JSON.stringify(sha)}`);
NODE
)"
echo "Repo URL: $REPO_URL"
echo "SHA: $SHA"
rm -rf .git
git init
git remote add origin "$REPO_URL"
git fetch --depth 1 origin "$SHA"
git checkout -q FETCH_HEAD
git log -1 --oneline
- name: Anchor aliases schema
run: |
set -euo pipefail
node scripts/check-anchor-aliases.mjs
- name: NPM harden
run: |
set -euo pipefail
npm config set fetch-retries 5
npm config set fetch-retry-mintimeout 20000
npm config set fetch-retry-maxtimeout 120000
npm config set registry https://registry.npmjs.org
npm config get registry
- name: Install deps
run: |
set -euo pipefail
npm ci
- name: Inline scripts syntax check
run: |
set -euo pipefail
node scripts/check-inline-js.mjs
- name: Build (includes postbuild injection + pagefind)
run: |
set -euo pipefail
npm run build
- name: Anchors contract
run: |
set -euo pipefail
npm run test:anchors
- name: Verify anchor aliases injected in dist
run: |
set -euo pipefail
node scripts/verify-anchor-aliases-in-dist.mjs

View File

@@ -0,0 +1,613 @@
name: Deploy staging+live (annotations)
on:
push:
branches: [main]
workflow_dispatch:
inputs:
force:
description: "Force FULL deploy (rebuild+restart) even if gate would hotpatch-only (1=yes, 0=no)"
required: false
default: "0"
env:
NODE_OPTIONS: --dns-result-order=ipv4first
DOCKER_API_VERSION: "1.43"
COMPOSE_VERSION: "2.29.7"
ASTRO_TELEMETRY_DISABLED: "1"
defaults:
run:
shell: bash
concurrency:
group: deploy-staging-live-main
cancel-in-progress: false
jobs:
deploy:
runs-on: nas-deploy
container:
image: localhost:5000/archicratie/nas-deploy-node22@sha256:fefa8bb307005cebec07796661ab25528dc319c33a8f1e480e1d66f90cd5cff6
steps:
- name: Tools sanity
run: |
set -euo pipefail
git --version
node --version
npm --version
- name: Checkout (push or workflow_dispatch, no external actions)
env:
EVENT_JSON: /var/run/act/workflow/event.json
run: |
set -euo pipefail
test -f "$EVENT_JSON" || { echo "❌ Missing $EVENT_JSON"; exit 1; }
node --input-type=module <<'NODE'
import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const repoObj = ev?.repository || {};
const cloneUrl =
repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/,"") + ".git") : "");
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
const defaultBranch = repoObj?.default_branch || "main";
// Push-range (most reliable for change detection)
const before = String(ev?.before || "").trim();
const after =
(process.env.GITHUB_SHA && String(process.env.GITHUB_SHA).trim()) ||
String(ev?.after || ev?.sha || ev?.head_commit?.id || ev?.pull_request?.head?.sha || "").trim();
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
fs.writeFileSync("/tmp/deploy.env", [
`REPO_URL=${shq(cloneUrl)}`,
`DEFAULT_BRANCH=${shq(defaultBranch)}`,
`BEFORE=${shq(before)}`,
`AFTER=${shq(after)}`
].join("\n") + "\n");
NODE
source /tmp/deploy.env
echo "Repo URL: $REPO_URL"
echo "Default branch: $DEFAULT_BRANCH"
echo "BEFORE: ${BEFORE:-<empty>}"
echo "AFTER: ${AFTER:-<empty>}"
rm -rf .git
git init -q
git remote add origin "$REPO_URL"
# Checkout AFTER (or default branch if missing)
if [[ -n "${AFTER:-}" ]]; then
git fetch --depth 50 origin "$AFTER"
git -c advice.detachedHead=false checkout -q FETCH_HEAD
else
git fetch --depth 50 origin "$DEFAULT_BRANCH"
git -c advice.detachedHead=false checkout -q "origin/$DEFAULT_BRANCH"
AFTER="$(git rev-parse HEAD)"
echo "AFTER='$AFTER'" >> /tmp/deploy.env
echo "Resolved AFTER: $AFTER"
fi
git log -1 --oneline
- name: Gate — decide SKIP vs HOTPATCH vs FULL rebuild
env:
INPUT_FORCE: ${{ inputs.force }}
EVENT_JSON: /var/run/act/workflow/event.json
run: |
set -euo pipefail
source /tmp/deploy.env
FORCE="${INPUT_FORCE:-0}"
# Lire before/after du push depuis event.json (merge-proof)
node --input-type=module <<'NODE'
import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const before = ev?.before || "";
const after = ev?.after || ev?.sha || "";
const shq = (s) => "'" + String(s).replace(/'/g, "'\\''") + "'";
fs.writeFileSync("/tmp/gate.env", [
`EV_BEFORE=${shq(before)}`,
`EV_AFTER=${shq(after)}`
].join("\n") + "\n");
NODE
source /tmp/gate.env
BEFORE="${EV_BEFORE:-}"
AFTER="${EV_AFTER:-}"
if [[ -z "${AFTER:-}" ]]; then
AFTER="${SHA:-}"
fi
echo "Gate ctx: BEFORE=${BEFORE:-<empty>} AFTER=${AFTER:-<empty>} FORCE=${FORCE}"
# Produire une liste CHANGED fiable :
# - si BEFORE/AFTER valides -> git diff before..after
# - sinon fallback -> diff parent1..after ou show after
CHANGED=""
Z40="0000000000000000000000000000000000000000"
if [[ -n "${BEFORE:-}" && "${BEFORE}" != "${Z40}" ]] \
&& git cat-file -e "${BEFORE}^{commit}" 2>/dev/null \
&& git cat-file -e "${AFTER}^{commit}" 2>/dev/null; then
CHANGED="$(git diff --name-only "${BEFORE}" "${AFTER}" || true)"
else
P1="$(git rev-parse "${AFTER}^" 2>/dev/null || true)"
if [[ -n "${P1:-}" ]] && git cat-file -e "${P1}^{commit}" 2>/dev/null; then
CHANGED="$(git diff --name-only "${P1}" "${AFTER}" || true)"
else
CHANGED="$(git show --name-only --pretty="" "${AFTER}" | sed '/^$/d' || true)"
fi
fi
printf "%s\n" "${CHANGED}" > /tmp/changed.txt
echo "== changed files (first 200) =="
sed -n '1,200p' /tmp/changed.txt || true
# Flags
HAS_FULL=0
HAS_HOTPATCH=0
# HOTPATCH si annotations/media touchés
if grep -qE '^(src/annotations/|public/media/)' /tmp/changed.txt; then
HAS_HOTPATCH=1
fi
# FULL si build-impacting (robuste)
# 1) Tout src/ SAUF src/annotations/
if grep -qE '^src/' /tmp/changed.txt && grep -qEv '^src/annotations/' /tmp/changed.txt; then
HAS_FULL=1
fi
# 2) scripts/
if grep -qE '^scripts/' /tmp/changed.txt; then
HAS_FULL=1
fi
# 3) Tout public/ SAUF public/media/
if grep -qE '^public/' /tmp/changed.txt && grep -qEv '^public/media/' /tmp/changed.txt; then
HAS_FULL=1
fi
# 4) fichiers racine qui changent le build / limage
if grep -qE '^(package\.json|package-lock\.json|astro\.config\.mjs|tsconfig\.json|\.npmrc|\.nvmrc|Dockerfile|docker-compose\.yml|nginx\.conf)$' /tmp/changed.txt; then
HAS_FULL=1
fi
echo "Gate flags: HAS_FULL=${HAS_FULL} HAS_HOTPATCH=${HAS_HOTPATCH}"
# Décision
if [[ "${FORCE}" == "1" ]]; then
GO=1
MODE="full"
echo "✅ force=1 -> MODE=full (rebuild+restart)"
elif [[ "${HAS_FULL}" == "1" ]]; then
GO=1
MODE="full"
echo "✅ build-impacting change -> MODE=full (rebuild+restart)"
elif [[ "${HAS_HOTPATCH}" == "1" ]]; then
GO=1
MODE="hotpatch"
echo "✅ annotations/media change -> MODE=hotpatch"
else
GO=0
MODE="skip"
echo " no relevant change -> skip deploy"
fi
echo "GO=${GO}" >> /tmp/deploy.env
echo "MODE='${MODE}'" >> /tmp/deploy.env
- name: Toolchain sanity + resolve COMPOSE_PROJECT_NAME
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
# tools are prebaked in the image
git --version
docker version
docker compose version
python3 -c 'import yaml; print("PyYAML OK")'
# Reuse existing compose project name if containers already exist
PROJ="$(docker inspect archicratie-web-blue --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
if [[ -z "${PROJ:-}" ]]; then
PROJ="$(docker inspect archicratie-web-green --format '{{ index .Config.Labels "com.docker.compose.project" }}' 2>/dev/null || true)"
fi
if [[ -z "${PROJ:-}" ]]; then PROJ="archicratie-web"; fi
echo "COMPOSE_PROJECT_NAME='$PROJ'" >> /tmp/deploy.env
echo "✅ Using COMPOSE_PROJECT_NAME=$PROJ"
# Assert target containers exist (hotpatch needs them)
for c in archicratie-web-blue archicratie-web-green; do
docker inspect "$c" >/dev/null 2>&1 || { echo "❌ missing container $c"; exit 5; }
done
- name: Assert required vars (PUBLIC_GITEA_*) — only needed for MODE=full
env:
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
[[ "${MODE:-hotpatch}" == "full" ]] || { echo " hotpatch mode -> vars not required"; exit 0; }
test -n "${PUBLIC_GITEA_BASE:-}" || { echo "❌ missing repo var PUBLIC_GITEA_BASE"; exit 2; }
test -n "${PUBLIC_GITEA_OWNER:-}" || { echo "❌ missing repo var PUBLIC_GITEA_OWNER"; exit 2; }
test -n "${PUBLIC_GITEA_REPO:-}" || { echo "❌ missing repo var PUBLIC_GITEA_REPO"; exit 2; }
echo "✅ vars OK"
- name: Assert deploy files exist — only needed for MODE=full
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
[[ "${MODE:-hotpatch}" == "full" ]] || { echo " hotpatch mode -> files not required"; exit 0; }
test -f docker-compose.yml
test -f Dockerfile
test -f nginx.conf
echo "✅ deploy files OK"
- name: FULL — Build + deploy staging (blue) then warmup+smoke
env:
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
[[ "${MODE:-hotpatch}" == "full" ]] || { echo " MODE=$MODE -> skip full rebuild"; exit 0; }
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
wait_url() {
local url="$1"
local label="$2"
local tries="${3:-60}"
for i in $(seq 1 "$tries"); do
if curl -fsS --max-time 4 "$url" >/dev/null; then
echo "✅ $label OK ($url)"
return 0
fi
echo "… warmup $label ($i/$tries)"
sleep 1
done
echo "❌ timeout $label ($url)"
return 1
}
TS="$(date -u +%Y%m%d-%H%M%S)"
echo "TS='$TS'" >> /tmp/deploy.env
docker image tag archicratie-web:blue "archicratie-web:blue.BAK.${TS}" || true
docker image tag archicratie-web:green "archicratie-web:green.BAK.${TS}" || true
BUILD_TIME_RAW="$(TZ=Europe/Paris date '+%Y-%m-%dT%H:%M:%S%z')"
BUILD_TIME="${BUILD_TIME_RAW:0:${#BUILD_TIME_RAW}-2}:${BUILD_TIME_RAW:${#BUILD_TIME_RAW}-2}"
PUBLIC_OPS_ENV=staging \
PUBLIC_OPS_UPSTREAM=web_blue \
PUBLIC_BUILD_SHA="${AFTER}" \
PUBLIC_BUILD_TIME="${BUILD_TIME}" \
node scripts/write-ops-health.mjs
test -f public/__ops/health.json
echo "=== public/__ops/health.json (blue/staging) ==="
cat public/__ops/health.json
docker compose -p "$PROJ" -f docker-compose.yml build web_blue
docker rm -f archicratie-web-blue || true
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_blue
# warmup endpoints
wait_url "http://127.0.0.1:8081/para-index.json" "blue para-index"
wait_url "http://127.0.0.1:8081/annotations-index.json" "blue annotations-index"
wait_url "http://127.0.0.1:8081/pagefind/pagefind.js" "blue pagefind.js"
wait_url "http://127.0.0.1:8081/__ops/health.json" "blue ops health"
curl -fsS --max-time 6 "http://127.0.0.1:8081/__ops/health.json" \
| python3 -c 'import sys, json; j=json.load(sys.stdin); print("env=", j.get("env")); print("upstream=", j.get("upstream")); print("buildSha=", j.get("buildSha")); print("builtAt=", j.get("builtAt"))'
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8081/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
echo "canonical(blue)=$CANON"
echo "$CANON" | grep -q 'https://staging\.archicratie\.trans-hands\.synology\.me/' || {
echo "❌ staging canonical mismatch"
docker logs --tail 120 archicratie-web-blue || true
exit 3
}
echo "✅ staging OK"
- name: FULL — Build + deploy live (green) then warmup+smoke + rollback if needed
env:
PUBLIC_GITEA_BASE: ${{ vars.PUBLIC_GITEA_BASE }}
PUBLIC_GITEA_OWNER: ${{ vars.PUBLIC_GITEA_OWNER }}
PUBLIC_GITEA_REPO: ${{ vars.PUBLIC_GITEA_REPO }}
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
[[ "${MODE:-hotpatch}" == "full" ]] || { echo " MODE=$MODE -> skip full rebuild"; exit 0; }
PROJ="${COMPOSE_PROJECT_NAME:-archicratie-web}"
TS="${TS:-$(date -u +%Y%m%d-%H%M%S)}"
wait_url() {
local url="$1"
local label="$2"
local tries="${3:-60}"
for i in $(seq 1 "$tries"); do
if curl -fsS --max-time 4 "$url" >/dev/null; then
echo "✅ $label OK ($url)"
return 0
fi
echo "… warmup $label ($i/$tries)"
sleep 1
done
echo "❌ timeout $label ($url)"
return 1
}
rollback() {
echo "⚠️ rollback green -> previous image tag (best effort)"
docker image tag "archicratie-web:green.BAK.${TS}" archicratie-web:green || true
docker rm -f archicratie-web-green || true
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green || true
}
BUILD_TIME_RAW="$(TZ=Europe/Paris date '+%Y-%m-%dT%H:%M:%S%z')"
BUILD_TIME="${BUILD_TIME_RAW:0:${#BUILD_TIME_RAW}-2}:${BUILD_TIME_RAW:${#BUILD_TIME_RAW}-2}"
PUBLIC_OPS_ENV=prod \
PUBLIC_OPS_UPSTREAM=web_green \
PUBLIC_BUILD_SHA="${AFTER}" \
PUBLIC_BUILD_TIME="${BUILD_TIME}" \
node scripts/write-ops-health.mjs
test -f public/__ops/health.json
echo "=== public/__ops/health.json (green/prod) ==="
cat public/__ops/health.json
# build/restart green
if ! docker compose -p "$PROJ" -f docker-compose.yml build web_green; then
echo "❌ build green failed"; rollback; exit 4
fi
docker rm -f archicratie-web-green || true
docker compose -p "$PROJ" -f docker-compose.yml up -d --force-recreate --remove-orphans web_green
# warmup endpoints
if ! wait_url "http://127.0.0.1:8082/para-index.json" "green para-index"; then rollback; exit 4; fi
if ! wait_url "http://127.0.0.1:8082/annotations-index.json" "green annotations-index"; then rollback; exit 4; fi
if ! wait_url "http://127.0.0.1:8082/pagefind/pagefind.js" "green pagefind.js"; then rollback; exit 4; fi
if ! wait_url "http://127.0.0.1:8082/__ops/health.json" "green ops health"; then rollback; exit 4; fi
curl -fsS --max-time 6 "http://127.0.0.1:8082/__ops/health.json" \
| python3 -c 'import sys, json; j=json.load(sys.stdin); print("env=", j.get("env")); print("upstream=", j.get("upstream")); print("buildSha=", j.get("buildSha")); print("builtAt=", j.get("builtAt"))'
CANON="$(curl -fsS --max-time 6 "http://127.0.0.1:8082/archicrat-ia/chapitre-1/" | grep -oE 'rel="canonical" href="[^"]+"' | head -n1 || true)"
echo "canonical(green)=$CANON"
echo "$CANON" | grep -q 'https://archicratie\.trans-hands\.synology\.me/' || {
echo "❌ live canonical mismatch"
docker logs --tail 120 archicratie-web-green || true
rollback
exit 4
}
echo "✅ live OK"
- name: HOTPATCH — deep merge shards -> annotations-index + copy changed media into blue+green
run: |
set -euo pipefail
source /tmp/deploy.env
[[ "${GO:-0}" == "1" ]] || { echo " skipped"; exit 0; }
python3 - <<'PY'
import os, re, json, glob
import yaml
import datetime as dt
ROOT = os.getcwd()
ANNO_ROOT = os.path.join(ROOT, "src", "annotations")
def is_obj(x): return isinstance(x, dict)
def is_arr(x): return isinstance(x, list)
def iso_dt(x):
if isinstance(x, dt.datetime):
if x.tzinfo is None:
return x.isoformat()
return x.astimezone(dt.timezone.utc).isoformat().replace("+00:00","Z")
if isinstance(x, dt.date):
return x.isoformat()
return None
def normalize(x):
s = iso_dt(x)
if s is not None: return s
if isinstance(x, dict):
return {str(k): normalize(v) for k, v in x.items()}
if isinstance(x, list):
return [normalize(v) for v in x]
return x
def key_media(it): return str((it or {}).get("src",""))
def key_ref(it):
it = it or {}
return "||".join([str(it.get("url","")), str(it.get("label","")), str(it.get("kind","")), str(it.get("citation",""))])
def key_comment(it): return str((it or {}).get("text","")).strip()
def dedup_extend(dst_list, src_list, key_fn):
seen = set(); out = []
for x in (dst_list or []):
x = normalize(x); k = key_fn(x)
if k and k not in seen: seen.add(k); out.append(x)
for x in (src_list or []):
x = normalize(x); k = key_fn(x)
if k and k not in seen: seen.add(k); out.append(x)
return out
def deep_merge(dst, src):
src = normalize(src)
for k, v in (src or {}).items():
if k in ("media","refs","comments_editorial") and is_arr(v):
if k == "media": dst[k] = dedup_extend(dst.get(k, []), v, key_media)
elif k == "refs": dst[k] = dedup_extend(dst.get(k, []), v, key_ref)
else: dst[k] = dedup_extend(dst.get(k, []), v, key_comment)
continue
if is_obj(v):
if not is_obj(dst.get(k)): dst[k] = {}
deep_merge(dst[k], v)
continue
if is_arr(v):
cur = dst.get(k, [])
if not is_arr(cur): cur = []
seen = set(); out = []
for x in cur:
x = normalize(x)
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
if s not in seen: seen.add(s); out.append(x)
for x in v:
x = normalize(x)
s = json.dumps(x, sort_keys=True, ensure_ascii=False)
if s not in seen: seen.add(s); out.append(x)
dst[k] = out
continue
v = normalize(v)
if k not in dst or dst.get(k) in (None, ""):
dst[k] = v
def para_num(pid):
m = re.match(r"^p-(\d+)-", str(pid))
return int(m.group(1)) if m else 10**9
def sort_lists(entry):
for k in ("media","refs","comments_editorial"):
arr = entry.get(k)
if not is_arr(arr): continue
def ts(x):
x = normalize(x)
try:
s = str((x or {}).get("ts",""))
return dt.datetime.fromisoformat(s.replace("Z","+00:00")).timestamp() if s else 0
except Exception:
return 0
arr = [normalize(x) for x in arr]
arr.sort(key=lambda x: (ts(x), json.dumps(x, sort_keys=True, ensure_ascii=False)))
entry[k] = arr
if not os.path.isdir(ANNO_ROOT):
raise SystemExit(f"Missing annotations root: {ANNO_ROOT}")
pages = {}
errors = []
files = sorted(glob.glob(os.path.join(ANNO_ROOT, "**", "*.yml"), recursive=True))
for fp in files:
try:
with open(fp, "r", encoding="utf-8") as f:
doc = yaml.safe_load(f) or {}
doc = normalize(doc)
if not isinstance(doc, dict) or doc.get("schema") != 1:
continue
page = str(doc.get("page","")).strip().strip("/")
paras = doc.get("paras") or {}
if not page or not isinstance(paras, dict):
continue
pg = pages.setdefault(page, {"paras": {}})
for pid, entry in paras.items():
pid = str(pid)
if pid not in pg["paras"] or not isinstance(pg["paras"].get(pid), dict):
pg["paras"][pid] = {}
if isinstance(entry, dict):
deep_merge(pg["paras"][pid], entry)
sort_lists(pg["paras"][pid])
except Exception as e:
errors.append({"file": os.path.relpath(fp, ROOT), "error": str(e)})
for page, obj in pages.items():
keys = list((obj.get("paras") or {}).keys())
keys.sort(key=lambda k: (para_num(k), k))
obj["paras"] = {k: obj["paras"][k] for k in keys}
out = {
"schema": 1,
"generatedAt": dt.datetime.utcnow().replace(tzinfo=dt.timezone.utc).isoformat().replace("+00:00","Z"),
"pages": pages,
"stats": {
"pages": len(pages),
"paras": sum(len(v.get("paras") or {}) for v in pages.values()),
"errors": len(errors),
},
"errors": errors,
}
with open("/tmp/annotations-index.json", "w", encoding="utf-8") as f:
json.dump(out, f, ensure_ascii=False)
print("OK: wrote /tmp/annotations-index.json pages=", out["stats"]["pages"], "paras=", out["stats"]["paras"], "errors=", out["stats"]["errors"])
PY
# patch JSON into running containers
for c in archicratie-web-blue archicratie-web-green; do
echo "== patch annotations-index.json into $c =="
docker cp /tmp/annotations-index.json "${c}:/usr/share/nginx/html/annotations-index.json"
done
# copy changed media files into containers (so new media appears without rebuild)
if [[ -s /tmp/changed.txt ]]; then
while IFS= read -r f; do
[[ -n "$f" ]] || continue
if [[ "$f" == public/media/* ]]; then
dest="/usr/share/nginx/html/${f#public/}" # => /usr/share/nginx/html/media/...
for c in archicratie-web-blue archicratie-web-green; do
echo "== copy media into $c: $f -> $dest =="
docker exec "$c" sh -lc "mkdir -p \"$(dirname "$dest")\""
docker cp "$f" "$c:$dest"
done
fi
done < /tmp/changed.txt
fi
# smoke after patch
for p in 8081 8082; do
echo "== smoke annotations-index on $p =="
curl -fsS --max-time 6 "http://127.0.0.1:${p}/annotations-index.json" \
| python3 -c 'import sys,json; j=json.load(sys.stdin); print("generatedAt:", j.get("generatedAt")); print("pages:", len(j.get("pages") or {})); print("paras:", j.get("stats",{}).get("paras"))'
done
echo "✅ hotpatch done"
- name: Debug on failure (containers status/logs)
if: ${{ failure() }}
run: |
set -euo pipefail
echo "== docker ps =="
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | sed -n '1,80p' || true
for c in archicratie-web-blue archicratie-web-green; do
echo "== logs $c (tail 200) =="
docker logs --tail 200 "$c" || true
done

View File

@@ -0,0 +1,788 @@
name: Proposer Apply (Queue)
on:
issues:
types: [labeled]
push:
branches: [main]
workflow_dispatch:
inputs:
issue:
description: "Issue number to prioritize (optional)"
required: false
default: ""
env:
NODE_OPTIONS: --dns-result-order=ipv4first
defaults:
run:
shell: bash
concurrency:
group: proposer-queue-main
cancel-in-progress: false
jobs:
apply-proposer:
runs-on: mac-ci
container:
image: mcr.microsoft.com/devcontainers/javascript-node:22-bookworm
steps:
- name: Tools sanity
run: |
set -euo pipefail
git --version
node --version
npm --version
- name: Derive context (event.json / workflow_dispatch / push)
env:
INPUT_ISSUE: ${{ inputs.issue }}
EVENT_NAME_IN: ${{ github.event_name }}
FORGE_API: ${{ vars.FORGE_API || vars.FORGE_BASE }}
run: |
set -euo pipefail
export EVENT_JSON="/var/run/act/workflow/event.json"
test -f "$EVENT_JSON" || { echo "Missing $EVENT_JSON"; exit 1; }
node --input-type=module - <<'NODE' > /tmp/proposer.env
import fs from "node:fs";
const ev = JSON.parse(fs.readFileSync(process.env.EVENT_JSON, "utf8"));
const repoObj = ev?.repository || {};
const cloneUrl =
repoObj?.clone_url ||
(repoObj?.html_url ? (repoObj.html_url.replace(/\/$/, "") + ".git") : "");
if (!cloneUrl) throw new Error("No repository clone_url/html_url in event.json");
let owner =
repoObj?.owner?.login ||
repoObj?.owner?.username ||
(repoObj?.full_name ? repoObj.full_name.split("/")[0] : "");
let repo =
repoObj?.name ||
(repoObj?.full_name ? repoObj.full_name.split("/")[1] : "");
if (!owner || !repo) {
const m = cloneUrl.match(/[:/](?<o>[^/]+)\/(?<r>[^/]+?)(?:\.git)?$/);
if (m?.groups) {
owner = owner || m.groups.o;
repo = repo || m.groups.r;
}
}
if (!owner || !repo) throw new Error("Cannot infer owner/repo");
const defaultBranch = repoObj?.default_branch || "main";
const issueNumber =
ev?.issue?.number ||
ev?.issue?.index ||
(process.env.INPUT_ISSUE ? Number(process.env.INPUT_ISSUE) : 0) ||
0;
const labelName =
ev?.label?.name ||
(typeof ev?.label === "string" ? ev.label : "") ||
"";
const eventName =
String(process.env.EVENT_NAME_IN || "").trim() ||
(ev?.issue ? "issues" : (ev?.before || ev?.after ? "push" : "workflow_dispatch"));
const u = new URL(cloneUrl);
const origin = u.origin;
const apiBase =
(process.env.FORGE_API && String(process.env.FORGE_API).trim())
? String(process.env.FORGE_API).trim().replace(/\/+$/, "")
: origin;
function sh(s) {
return JSON.stringify(String(s));
}
process.stdout.write([
`CLONE_URL=${sh(cloneUrl)}`,
`OWNER=${sh(owner)}`,
`REPO=${sh(repo)}`,
`DEFAULT_BRANCH=${sh(defaultBranch)}`,
`ISSUE_NUMBER=${sh(issueNumber)}`,
`LABEL_NAME=${sh(labelName)}`,
`EVENT_NAME=${sh(eventName)}`,
`API_BASE=${sh(apiBase)}`
].join("\n") + "\n");
NODE
echo "Context:"
sed -n '1,200p' /tmp/proposer.env
- name: Early gate (tolerant on empty issue label payload)
run: |
set -euo pipefail
source /tmp/proposer.env
echo "event=$EVENT_NAME label=${LABEL_NAME:-<empty>}"
if [[ "$EVENT_NAME" == "issues" ]]; then
if [[ -n "${LABEL_NAME:-}" && "$LABEL_NAME" != "state/approved" ]]; then
echo "issues/labeled with explicit non-approved label=$LABEL_NAME -> skip"
echo 'SKIP=1' >> /tmp/proposer.env
echo 'SKIP_REASON="label_not_state_approved_event"' >> /tmp/proposer.env
exit 0
fi
fi
echo "Proceed to API-based selection/gating"
- name: Checkout default branch
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
rm -rf .git
git init -q
git remote add origin "$CLONE_URL"
git fetch --depth 1 origin "$DEFAULT_BRANCH"
git -c advice.detachedHead=false checkout -q FETCH_HEAD
git log -1 --oneline
- name: Detect app dir (repo-root vs ./site)
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
APP_DIR="."
if [[ -d "site" && -f "site/package.json" ]]; then
APP_DIR="site"
fi
echo "APP_DIR=$APP_DIR" >> /tmp/proposer.env
echo "APP_DIR=$APP_DIR"
test -f "$APP_DIR/package.json" || {
echo "package.json missing in APP_DIR=$APP_DIR"
exit 1
}
test -d "$APP_DIR/scripts" || {
echo "scripts/ missing in APP_DIR=$APP_DIR"
exit 1
}
- name: Select next proposer batch (by path)
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
test -n "${FORGE_TOKEN:-}" || {
echo "Missing secret FORGE_TOKEN"
exit 1
}
export GITEA_OWNER="$OWNER"
export GITEA_REPO="$REPO"
export FORGE_API="$API_BASE"
cd "$APP_DIR"
test -f scripts/pick-proposer-issue.mjs || {
echo "missing scripts/pick-proposer-issue.mjs in APP_DIR=$APP_DIR"
ls -la scripts | sed -n '1,200p' || true
exit 1
}
node scripts/pick-proposer-issue.mjs "${ISSUE_NUMBER:-0}" > /tmp/proposer.pick.env
cat /tmp/proposer.pick.env >> /tmp/proposer.env
source /tmp/proposer.pick.env
if [[ "${TARGET_FOUND:-0}" != "1" ]]; then
echo 'SKIP=1' >> /tmp/proposer.env
echo "SKIP_REASON=${TARGET_REASON:-no_target}" >> /tmp/proposer.env
echo "No target batch"
exit 0
fi
echo "Target batch:"
grep -E '^(TARGET_PRIMARY_ISSUE|TARGET_ISSUES|TARGET_COUNT|TARGET_CHEMIN)=' /tmp/proposer.env
- name: Derive deterministic batch identity
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
export TARGET_ISSUES TARGET_CHEMIN
node --input-type=module - <<'NODE'
import fs from "node:fs";
import crypto from "node:crypto";
const issues = String(process.env.TARGET_ISSUES || "")
.trim()
.split(/\s+/)
.filter(Boolean)
.sort((a, b) => Number(a) - Number(b));
const chemin = String(process.env.TARGET_CHEMIN || "").trim();
const keySource = `${chemin}::${issues.join(",")}`;
const hash = crypto.createHash("sha1").update(keySource).digest("hex").slice(0, 12);
const primary = issues[0] || "0";
const batchBranch = `bot/proposer-${primary}-${hash}`;
fs.appendFileSync(
"/tmp/proposer.env",
[
`BATCH_KEY=${JSON.stringify(keySource)}`,
`BATCH_HASH=${JSON.stringify(hash)}`,
`BATCH_BRANCH=${JSON.stringify(batchBranch)}`
].join("\n") + "\n"
);
NODE
echo "Batch identity:"
grep -E '^(BATCH_KEY|BATCH_HASH|BATCH_BRANCH)=' /tmp/proposer.env
- name: Inspect open proposer PRs
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls?state=open&limit=100" \
-o /tmp/open_pulls.json
export TARGET_ISSUES="${TARGET_ISSUES:-}"
export BATCH_BRANCH="${BATCH_BRANCH:-}"
export BATCH_KEY="${BATCH_KEY:-}"
node --input-type=module - <<'NODE' >> /tmp/proposer.env
import fs from "node:fs";
const pulls = JSON.parse(fs.readFileSync("/tmp/open_pulls.json", "utf8"));
const issues = String(process.env.TARGET_ISSUES || "")
.trim()
.split(/\s+/)
.filter(Boolean);
const batchBranch = String(process.env.BATCH_BRANCH || "");
const batchKey = String(process.env.BATCH_KEY || "");
const proposerOpen = Array.isArray(pulls)
? pulls.filter((pr) => String(pr?.head?.ref || "").startsWith("bot/proposer-"))
: [];
const sameBatch = proposerOpen.find((pr) => {
const ref = String(pr?.head?.ref || "");
const title = String(pr?.title || "");
const body = String(pr?.body || "");
if (batchBranch && ref === batchBranch) return true;
if (batchKey && body.includes(`Batch-Key: ${batchKey}`)) return true;
return issues.some((n) =>
ref.startsWith(`bot/proposer-${n}-`) ||
title.includes(`#${n}`) ||
body.includes(`#${n}`) ||
body.includes(`ticket #${n}`)
);
});
const out = [];
if (sameBatch) {
out.push("SKIP=1");
out.push(`SKIP_REASON=${JSON.stringify("issue_already_has_open_pr")}`);
out.push(`OPEN_PR_URL=${JSON.stringify(String(sameBatch.html_url || sameBatch.url || ""))}`);
out.push(`OPEN_PR_BRANCH=${JSON.stringify(String(sameBatch?.head?.ref || ""))}`);
} else if (proposerOpen.length > 0) {
const first = proposerOpen[0];
out.push("SKIP=1");
out.push(`SKIP_REASON=${JSON.stringify("queue_busy_open_proposer_pr")}`);
out.push(`OPEN_PR_URL=${JSON.stringify(String(first.html_url || first.url || ""))}`);
out.push(`OPEN_PR_BRANCH=${JSON.stringify(String(first?.head?.ref || ""))}`);
}
process.stdout.write(out.join("\n") + (out.length ? "\n" : ""));
NODE
- name: Guard on remote batch branch before heavy work
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
if git ls-remote --exit-code --heads origin "$BATCH_BRANCH" >/dev/null 2>&1; then
echo 'SKIP=1' >> /tmp/proposer.env
echo 'SKIP_REASON="batch_branch_exists_without_pr"' >> /tmp/proposer.env
echo "OPEN_PR_BRANCH=${BATCH_BRANCH}" >> /tmp/proposer.env
echo "Remote batch branch already exists -> skip duplicate materialization"
exit 0
fi
echo "Remote batch branch is free"
- name: Comment issue if queued / skipped
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" == "1" ]] || exit 0
[[ "${EVENT_NAME:-}" != "push" ]] || exit 0
if [[ "${SKIP_REASON:-}" == "label_not_state_approved_event" || "${SKIP_REASON:-}" == "label_not_state_approved" ]]; then
echo "Skip reason=${SKIP_REASON} -> no comment"
exit 0
fi
test -n "${FORGE_TOKEN:-}" || exit 0
ISSUE_TO_COMMENT="${ISSUE_NUMBER:-0}"
if [[ "$ISSUE_TO_COMMENT" == "0" || -z "$ISSUE_TO_COMMENT" ]]; then
ISSUE_TO_COMMENT="${TARGET_PRIMARY_ISSUE:-0}"
fi
[[ "$ISSUE_TO_COMMENT" != "0" ]] || exit 0
case "${SKIP_REASON:-}" in
queue_busy_open_proposer_pr)
MSG="Ticket queued in proposer queue. An open proposer PR already exists: ${OPEN_PR_URL:-"(URL unavailable)"}. The workflow will resume after merge on main."
;;
issue_already_has_open_pr)
MSG="This batch already has an open proposer PR: ${OPEN_PR_URL:-"(URL unavailable)"}"
;;
batch_branch_exists_without_pr)
MSG="This batch already has a remote batch branch (${OPEN_PR_BRANCH:-"(unknown branch)"}). Manual inspection is required before any new proposer PR is created."
;;
batch_branch_already_materialized)
MSG="This batch was already materialized by another run on branch ${OPEN_PR_BRANCH:-"(unknown branch)"}. No duplicate PR was created."
;;
explicit_issue_missing_chemin)
MSG="Proposer Apply: cannot process this ticket automatically because field Chemin is missing or unreadable."
;;
explicit_issue_missing_type)
MSG="Proposer Apply: cannot process this ticket automatically because field Type is missing or unreadable."
;;
explicit_issue_not_approved)
MSG="Proposer Apply: this ticket is not currently labeled state/approved."
;;
explicit_issue_rejected)
MSG="Proposer Apply: this ticket has state/rejected and is not eligible for the proposer queue."
;;
no_open_approved_proposer_issue)
MSG="No approved proposer ticket is currently waiting."
;;
*)
MSG="Proposer Apply: skip - ${SKIP_REASON:-unspecified reason}."
;;
esac
export MSG
node --input-type=module - <<'NODE' > /tmp/proposer.skip.comment.json
const msg = process.env.MSG || "";
process.stdout.write(JSON.stringify({ body: msg }));
NODE
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE_TO_COMMENT/comments" \
--data-binary @/tmp/proposer.skip.comment.json || true
- name: NPM harden
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || exit 0
cd "$APP_DIR"
npm config set fetch-retries 5
npm config set fetch-retry-mintimeout 20000
npm config set fetch-retry-maxtimeout 120000
npm config set registry https://registry.npmjs.org
- name: Install deps
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || exit 0
cd "$APP_DIR"
npm ci --no-audit --no-fund
- name: Build dist baseline
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || exit 0
cd "$APP_DIR"
npm run build
- name: Apply proposer batch on bot branch
continue-on-error: true
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
BOT_GIT_NAME: ${{ secrets.BOT_GIT_NAME }}
BOT_GIT_EMAIL: ${{ secrets.BOT_GIT_EMAIL }}
run: |
set -euo pipefail
source /tmp/proposer.env
[[ "${SKIP:-0}" != "1" ]] || { echo "Skipped"; exit 0; }
git config user.name "${BOT_GIT_NAME:-archicratie-bot}"
git config user.email "${BOT_GIT_EMAIL:-bot@archicratie.local}"
START_SHA="$(git rev-parse HEAD)"
BR="$BATCH_BRANCH"
echo "BRANCH=$BR" >> /tmp/proposer.env
git checkout -b "$BR"
export GITEA_OWNER="$OWNER"
export GITEA_REPO="$REPO"
export FORGE_API="$API_BASE"
LOG="/tmp/proposer-apply.log"
: > "$LOG"
RC=0
FAILED_ISSUE=""
for ISSUE in $TARGET_ISSUES; do
echo "" >> "$LOG"
echo "== ticket #$ISSUE ==" >> "$LOG"
set +e
(cd "$APP_DIR" && node scripts/apply-ticket.mjs "$ISSUE" --alias --commit) >> "$LOG" 2>&1
STEP_RC=$?
set -e
if [[ "$STEP_RC" -ne 0 ]]; then
RC="$STEP_RC"
FAILED_ISSUE="$ISSUE"
break
fi
done
echo "APPLY_RC=$RC" >> /tmp/proposer.env
echo "FAILED_ISSUE=${FAILED_ISSUE}" >> /tmp/proposer.env
echo "Apply log (tail):"
tail -n 220 "$LOG" || true
END_SHA="$(git rev-parse HEAD)"
if [[ "$RC" -ne 0 ]]; then
echo "NOOP=0" >> /tmp/proposer.env
exit 0
fi
if [[ "$START_SHA" == "$END_SHA" ]]; then
echo "NOOP=1" >> /tmp/proposer.env
else
echo "NOOP=0" >> /tmp/proposer.env
echo "END_SHA=$END_SHA" >> /tmp/proposer.env
fi
- name: Rebase bot branch on latest main
continue-on-error: true
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
[[ "${NOOP:-0}" == "0" ]] || exit 0
LOG="/tmp/proposer-apply.log"
git fetch origin "$DEFAULT_BRANCH"
set +e
git rebase "origin/$DEFAULT_BRANCH" >> "$LOG" 2>&1
RC=$?
set -e
if [[ "$RC" -ne 0 ]]; then
git rebase --abort || true
fi
echo "REBASE_RC=$RC" >> /tmp/proposer.env
echo "Rebase log (tail):"
tail -n 220 "$LOG" || true
- name: Comment issues on failure
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
APPLY_RC="${APPLY_RC:-0}"
REBASE_RC="${REBASE_RC:-0}"
if [[ "$APPLY_RC" == "0" && "$REBASE_RC" == "0" ]]; then
echo "No failure detected"
exit 0
fi
test -n "${FORGE_TOKEN:-}" || exit 0
if [[ -f /tmp/proposer-apply.log ]]; then
BODY="$(tail -n 160 /tmp/proposer-apply.log | sed 's/\r$//')"
else
BODY="(no proposer log found)"
fi
export BODY APPLY_RC REBASE_RC FAILED_ISSUE
if [[ "$APPLY_RC" != "0" ]]; then
export FAILURE_KIND="apply"
else
export FAILURE_KIND="rebase"
fi
node --input-type=module - <<'NODE' > /tmp/proposer.failure.comment.json
const body = process.env.BODY || "";
const applyRc = process.env.APPLY_RC || "0";
const rebaseRc = process.env.REBASE_RC || "0";
const failedIssue = process.env.FAILED_ISSUE || "unknown";
const kind = process.env.FAILURE_KIND || "apply";
const msg =
kind === "apply"
? `Batch proposer failed on ticket #${failedIssue} (rc=${applyRc}).\n\n\`\`\`\n${body}\n\`\`\`\n`
: `Rebase proposer failed on main (rc=${rebaseRc}).\n\n\`\`\`\n${body}\n\`\`\`\n`;
process.stdout.write(JSON.stringify({ body: msg }));
NODE
for ISSUE in ${TARGET_ISSUES:-}; do
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE/comments" \
--data-binary @/tmp/proposer.failure.comment.json || true
done
- name: Late guard against duplicate batch materialization
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
[[ "${REBASE_RC:-0}" == "0" ]] || exit 0
[[ "${NOOP:-0}" == "0" ]] || exit 0
REMOTE_SHA="$(git ls-remote --heads origin "$BATCH_BRANCH" | awk 'NR==1 {print $1}')"
if [[ -n "${REMOTE_SHA:-}" && "${REMOTE_SHA}" != "${END_SHA:-}" ]]; then
echo 'SKIP=1' >> /tmp/proposer.env
echo 'SKIP_REASON="batch_branch_already_materialized"' >> /tmp/proposer.env
echo "OPEN_PR_BRANCH=${BATCH_BRANCH}" >> /tmp/proposer.env
echo "Remote batch branch already exists at $REMOTE_SHA -> skip duplicate push/PR"
exit 0
fi
echo "Late guard OK"
- name: Push bot branch
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || { echo "Apply failed -> skip push"; exit 0; }
[[ "${REBASE_RC:-0}" == "0" ]] || { echo "Rebase failed -> skip push"; exit 0; }
[[ "${NOOP:-0}" == "0" ]] || { echo "No-op -> skip push"; exit 0; }
[[ -n "${BRANCH:-}" ]] || { echo "BRANCH unset -> skip push"; exit 0; }
AUTH_URL="$(node --input-type=module -e '
const [clone, tok] = process.argv.slice(1);
const u = new URL(clone);
u.username = "oauth2";
u.password = tok;
console.log(u.toString());
' "$CLONE_URL" "$FORGE_TOKEN")"
git remote set-url origin "$AUTH_URL"
git push -u origin "$BRANCH"
- name: Create PR + comment issues + close issues
if: ${{ always() }}
env:
FORGE_TOKEN: ${{ secrets.FORGE_TOKEN }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
[[ "${APPLY_RC:-0}" == "0" ]] || exit 0
[[ "${REBASE_RC:-0}" == "0" ]] || exit 0
[[ "${NOOP:-0}" == "0" ]] || exit 0
[[ -n "${BRANCH:-}" ]] || { echo "BRANCH unset -> skip PR"; exit 0; }
test -n "${FORGE_TOKEN:-}" || { echo "Missing FORGE_TOKEN"; exit 1; }
OPEN_PRS_JSON="$(curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls?state=open&limit=100")"
export OPEN_PRS_JSON BATCH_BRANCH BATCH_KEY
EXISTING_PR_URL="$(node --input-type=module -e '
const pulls = JSON.parse(process.env.OPEN_PRS_JSON || "[]");
const branch = String(process.env.BATCH_BRANCH || "");
const key = String(process.env.BATCH_KEY || "");
const current = Array.isArray(pulls)
? pulls.find((pr) => {
const ref = String(pr?.head?.ref || "");
const body = String(pr?.body || "");
return (branch && ref === branch) || (key && body.includes(`Batch-Key: ${key}`));
})
: null;
process.stdout.write(current ? String(current.html_url || current.url || "") : "");
')"
if [[ -n "${EXISTING_PR_URL:-}" ]]; then
echo "PR already exists for this batch: $EXISTING_PR_URL"
exit 0
fi
if [[ "${TARGET_COUNT:-0}" == "1" ]]; then
PR_TITLE="proposer: apply ticket #${TARGET_PRIMARY_ISSUE}"
else
PR_TITLE="proposer: apply ${TARGET_COUNT} tickets on ${TARGET_CHEMIN}"
fi
export PR_TITLE TARGET_CHEMIN TARGET_ISSUES BRANCH END_SHA DEFAULT_BRANCH OWNER BATCH_KEY
node --input-type=module -e '
import fs from "node:fs";
const issues = String(process.env.TARGET_ISSUES || "")
.trim()
.split(/\s+/)
.filter(Boolean);
const body = [
`PR auto depuis ticket${issues.length > 1 ? "s" : ""} ${issues.map((n) => `#${n}`).join(", ")} (state/approved).`,
"",
`- Chemin: ${process.env.TARGET_CHEMIN || "(inconnu)"}`,
"- Tickets:",
...issues.map((n) => ` - #${n}`),
`- Branche: ${process.env.BRANCH || ""}`,
`- Commit: ${process.env.END_SHA || "unknown"}`,
`- Batch-Key: ${process.env.BATCH_KEY || ""}`,
"",
"Merge si CI OK."
].join("\n");
fs.writeFileSync(
"/tmp/proposer.pr.json",
JSON.stringify({
title: process.env.PR_TITLE || "proposer: apply tickets",
body,
base: process.env.DEFAULT_BRANCH || "main",
head: `${process.env.OWNER}:${process.env.BRANCH}`,
allow_maintainer_edit: true
})
);
'
PR_JSON="$(curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/pulls" \
--data-binary @/tmp/proposer.pr.json)"
PR_URL="$(node --input-type=module -e 'const pr = JSON.parse(process.argv[1] || "{}"); console.log(pr.html_url || pr.url || "");' "$PR_JSON")"
test -n "$PR_URL" || {
echo "PR URL missing. Raw: $PR_JSON"
exit 1
}
for ISSUE in $TARGET_ISSUES; do
export ISSUE PR_URL
node --input-type=module -e '
import fs from "node:fs";
const issue = process.env.ISSUE || "";
const url = process.env.PR_URL || "";
const msg =
`PR proposer creee pour le ticket #${issue} : ${url}\n\n` +
`Le ticket est cloture automatiquement ; la discussion peut se poursuivre dans la PR.`;
fs.writeFileSync(
"/tmp/proposer.issue.close.comment.json",
JSON.stringify({ body: msg })
);
'
curl -fsS -X POST \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE/comments" \
--data-binary @/tmp/proposer.issue.close.comment.json
curl -fsS -X PATCH \
-H "Authorization: token $FORGE_TOKEN" \
-H "Content-Type: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE" \
--data-binary '{"state":"closed"}'
ISSUE_STATE="$(curl -fsS \
-H "Authorization: token $FORGE_TOKEN" \
-H "Accept: application/json" \
"$API_BASE/api/v1/repos/$OWNER/$REPO/issues/$ISSUE" | \
node --input-type=module -e 'let s=""; process.stdin.on("data", d => s += d); process.stdin.on("end", () => { const j = JSON.parse(s || "{}"); process.stdout.write(String(j.state || "")); });')"
[[ "$ISSUE_STATE" == "closed" ]] || {
echo "Issue #$ISSUE is still not closed after PATCH"
exit 1
}
done
echo "PR: $PR_URL"
- name: Finalize
if: ${{ always() }}
run: |
set -euo pipefail
source /tmp/proposer.env || true
[[ "${SKIP:-0}" != "1" ]] || exit 0
if [[ "${APPLY_RC:-0}" != "0" ]]; then
echo "Apply failed (rc=${APPLY_RC})"
exit "${APPLY_RC}"
fi
if [[ "${REBASE_RC:-0}" != "0" ]]; then
echo "Rebase failed (rc=${REBASE_RC})"
exit "${REBASE_RC}"
fi
echo "Proposer queue OK"

View File

@@ -3,7 +3,7 @@ on: [push, workflow_dispatch]
jobs: jobs:
smoke: smoke:
runs-on: ubuntu-latest runs-on: mac-ci
steps: steps:
- run: node -v && npm -v - run: node -v && npm -v
- run: echo "runner OK" - run: echo "runner OK"

8
.gitignore vendored
View File

@@ -3,6 +3,10 @@
.env.* .env.*
!.env.example !.env.example
# dev-only
public/_auth/whoami
public/_auth/whoami/*
# --- local backups --- # --- local backups ---
*.bak *.bak
*.bak.* *.bak.*
@@ -24,3 +28,7 @@ public/favicon_io.zip
# macOS # macOS
.DS_Store .DS_Store
# local temp workspace
.tmp/
public/__ops/health.json

View File

@@ -86,6 +86,10 @@ function rehypeDedupeIds() {
} }
export default defineConfig({ export default defineConfig({
legacy: {
collectionsBackwardsCompat: true,
},
output: "static", output: "static",
trailingSlash: "always", trailingSlash: "always",
site: process.env.PUBLIC_SITE ?? "http://localhost:4321", site: process.env.PUBLIC_SITE ?? "http://localhost:4321",

View File

@@ -0,0 +1,11 @@
{
"accepted_resets": {
"archicrat-ia/prologue/index.html": "Reset intentionnel des ancres après réimport DOCX et révision substantielle du prologue depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
"archicrat-ia/chapitre-1/index.html": "Reset intentionnel des ancres après révision doctrinale substantielle du chapitre 1. Site neuf, sans annotations ni compatibilité descendante à préserver.",
"archicrat-ia/chapitre-2/index.html": "Reset intentionnel des ancres après restauration doctrinale substantielle du chapitre 2 depuis la bonne source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
"archicrat-ia/chapitre-3/index.html": "Reset intentionnel des ancres après réimport DOCX et perfectionnement doctrinal substantiel du chapitre 3 depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
"archicrat-ia/chapitre-4/index.html": "Reset intentionnel des ancres après réimport DOCX et stabilisation doctrinale substantielle du chapitre 4 depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
"archicrat-ia/chapitre-5/index.html": "Reset intentionnel des ancres après réimport DOCX et stabilisation doctrinale substantielle du chapitre 5 depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver.",
"archicrat-ia/conclusion/index.html": "Reset intentionnel des ancres après réimport DOCX et révision substantielle de la conclusion depuis la source officielle. Site neuf, sans annotations ni compatibilité descendante à préserver."
}
}

View File

@@ -25,6 +25,19 @@ Objectif : déployer une nouvelle version du site sur le NAS (DS220+) sans jamai
➡️ Déploiement = `docs/DEPLOY_PROD_SYNOLOGY_DS220.md` (procédure détaillée, à jour). ➡️ Déploiement = `docs/DEPLOY_PROD_SYNOLOGY_DS220.md` (procédure détaillée, à jour).
## Mise à jour (2026-03-03) — Gate CI de déploiement (SKIP / HOTPATCH / FULL) + preuves A/B
La procédure de déploiement “vivante” est désormais pilotée par **Gitea Actions** via le workflow :
- `.gitea/workflows/deploy-staging-live.yml`
Ce workflow décide automatiquement :
- **FULL** (rebuild + restart blue + green) dès quun changement impacte le build (ex: `src/content/`, `src/pages/`, `scripts/`, `src/anchors/`, etc.)
- **HOTPATCH** (patch JSON + copie media) quand le changement ne concerne que `src/annotations/` et/ou `public/media/`
- **SKIP** sinon
Les preuves et la procédure de test reproductible A/B sont documentées dans :
➡️ `docs/runbooks/DEPLOY-BLUE-GREEN.md` → section “CI Deploy gate (merge-proof) + Tests A/B + preuve alias injection”.
## Schéma (résumé, sans commandes) ## Schéma (résumé, sans commandes)
- Ne jamais toucher au slot live. - Ne jamais toucher au slot live.

File diff suppressed because it is too large Load Diff

View File

@@ -202,4 +202,33 @@ docker compose logs --tail=200 web_blue
docker compose logs --tail=200 web_green docker compose logs --tail=200 web_green
# Si tu veux suivre en live : # Si tu veux suivre en live :
docker compose logs -f web_green docker compose logs -f web_green
## Historique synthétique (2026-03-03) — Stabilisation CI/CD “zéro surprise”
### Problème initial observé
- Déploiement parfois lancé en “hotpatch” alors quun rebuild était nécessaire.
- Sur merge commits, la détection de fichiers modifiés pouvait être ambiguë.
- Résultat : besoin de `force=1` manuel pour éviter des incohérences.
### Correctif appliqué
- Gate CI rendu **merge-proof** :
- lecture de `BEFORE` et `AFTER` depuis `event.json`
- calcul des fichiers modifiés via `git diff --name-only BEFORE AFTER`
- Politique de décision stabilisée :
- FULL auto dès quun changement impacte build/runtime (content/pages/scripts/anchors/etc.)
- HOTPATCH auto uniquement pour annotations/media
### Preuves
- Test A (touch src/content) :
- Gate flags: HAS_FULL=1 HAS_HOTPATCH=0 → MODE=full
- Test B (touch src/annotations) :
- Gate flags: HAS_FULL=0 HAS_HOTPATCH=1 → MODE=hotpatch
### Audit post-déploiement (preuves côté NAS)
- 8081 + 8082 répondent HTTP 200
- `/para-index.json` + `/annotations-index.json` OK
- Aliases injectés visibles dans HTML via `.para-alias` quand alias présent

View File

@@ -1,51 +1,147 @@
# START-HERE — Archicratie / Édition Web (v2) # START-HERE — Archicratie / Édition Web (v3)
> Onboarding + exploitation “nickel chrome” (DEV → Gitea → CI → Release → Blue/Green → Edge/SSO) > Onboarding + exploitation “nickel chrome” (DEV → Gitea → CI → Release → Blue/Green → Edge/SSO → localhost auto-sync)
## 0) TL;DR (la règle dor) ## 0) TL;DR (la règle dor)
- **Gitea = source canonique**.
- **main est protégé** : toute modification passe par **branche → PR → CI → merge**. - **Gitea = source canonique**.
- **Le NAS nest pas la source** : si un hotfix est fait sur NAS, on **backporte** via PR immédiatement. - **`main` est protégée** : toute modification passe par **branche → PR → CI → merge**.
- **Le site est statique Astro** : la prod sert du HTML (nginx), laccès est contrôlé au niveau reverse-proxy (Traefik + Authelia). - **Le NAS nest pas la source** : si un hotfix est fait sur NAS, il doit être **backporté immédiatement** via PR.
- **Le site est statique Astro** : la prod sert du HTML via nginx ; laccès est contrôlé au niveau reverse-proxy (Traefik + Authelia).
- **Le localhost automatique nest pas le repo de dev** : il tourne depuis un **worktree dédié**, synchronisé sur `origin/main`.
---
## 1) Architecture mentale (ultra simple) ## 1) Architecture mentale (ultra simple)
- **DEV (Mac Studio)** : édition + tests + commit + push
- **Gitea** : dépôt canon + PR + CI (CI.yaml) - **DEV canonique (Mac Studio)** : édition, dev, tests, commits, pushes
- **NAS (DS220+)** : déploiement “blue/green” - **Gitea** : dépôt canonique, PR, CI, workflows éditoriaux
- `web_blue` (staging upstream) → `127.0.0.1:8081` - **NAS (DS220+)** : déploiement blue/green
- `web_green` (live upstream)`127.0.0.1:8082` - `web_blue` → staging upstream → `127.0.0.1:8081`
- **Edge (Traefik)** : route les hosts - `web_green` → live upstream → `127.0.0.1:8082`
- **Edge (Traefik)** : routage des hosts
- `staging.archicratie...` → 8081 - `staging.archicratie...` → 8081
- `archicratie...` → 8082 - `archicratie...` → 8082
- **Authelia** devant, via middleware `chain-auth@file` - **Authelia** devant, via middleware `chain-auth@file`
- **Localhost auto-sync**
- un **repo canonique de développement**
- un **worktree localhost miroir de `origin/main`**
- un **agent de sync**
- un **agent Astro**
---
## 2) Répertoires & conventions (repo) ## 2) Répertoires & conventions (repo)
### 2.1 Contenu canon (édition) ### 2.1 Contenu canon (édition)
- `src/content/**` : contenu MD / MDX canon (Astro content collections)
- `src/pages/**` : routes Astro (index, [...slug], etc.) - `src/content/**` : contenu MD / MDX canon
- `src/components/**` : composants UI (SiteNav, TOC, SidePanel, etc.) - `src/pages/**` : routes Astro
- `src/layouts/**` : layouts (EditionLayout, SiteLayout) - `src/components/**` : composants UI
- `src/layouts/**` : layouts
- `src/styles/**` : CSS global - `src/styles/**` : CSS global
### 2.2 Annotations (pré-Édition “tickets”) ### 2.2 Annotations (pré-Édition “tickets”)
- `src/annotations/<workKey>/<slug>.yml` - `src/annotations/<workKey>/<slug>.yml`
- Exemple : `src/annotations/archicrat-ia/prologue.yml` - Exemple :
- Objectif : stocker “Références / Médias / Commentaires” par page et par paragraphe (`p-...`). `src/annotations/archicrat-ia/prologue.yml`
Objectif :
stocker “Références / Médias / Commentaires” par page et par paragraphe (`p-...`).
### 2.3 Scripts (tooling / build) ### 2.3 Scripts (tooling / build)
- `scripts/inject-anchor-aliases.mjs` : injection aliases dans dist
- `scripts/dedupe-ids-dist.mjs` : retire IDs dupliqués dans dist - `scripts/inject-anchor-aliases.mjs` : injection aliases dans `dist`
- `scripts/build-para-index.mjs` : index paragraphes (postbuild / predev) - `scripts/dedupe-ids-dist.mjs` : retrait IDs dupliqués
- `scripts/build-annotations-index.mjs` : index annotations (postbuild / predev) - `scripts/build-para-index.mjs` : index paragraphes
- `scripts/check-anchors.mjs` : contrat stabilité dancres (CI) - `scripts/build-annotations-index.mjs` : index annotations
- `scripts/check-anchors.mjs` : contrat stabilité dancres
- `scripts/check-annotations*.mjs` : sanity YAML + médias - `scripts/check-annotations*.mjs` : sanity YAML + médias
> Important : les scripts sont **partie intégrante** de la stabilité (IDs/ancres/indexation). > Important : ces scripts ne sont pas accessoires.
> On évite “la magie” : tout est scripté + vérifié. > Ils font partie du contrat de stabilité éditoriale.
## 3) Workflow Git “pro” (main protégé) ---
### 3.1 Cycle standard (toute modif)
en bash :
## 3) Les trois espaces à ne jamais confondre
### 3.1 Repo canonique de développement
```text
/Volumes/FunIA/dev/archicratie-edition/site
```
Usage :
- développement normal
- branches de travail
- nouvelles fonctionnalités
- corrections manuelles
- commits
- pushes
- PR
### 3.2 Worktree localhost miroir de `main`
```text
/Users/s-funia/ops-local/archicratie/localhost-worktree
```
Branche attendue :
```text
localhost-sync
```
Usage :
- exécuter le localhost automatique
- refléter `origin/main`
- ne jamais servir despace de développement
### 3.3 Ops local hors repo
```text
/Users/s-funia/ops-local/archicratie
```
Usage :
- scripts dexploitation
- état
- logs
- automatisation `launchd`
---
## 4) Pourquoi cette séparation existe
Il ne faut pas utiliser le repo canonique de développement comme serveur localhost permanent.
Sinon on mélange :
- travail en cours
- commits non poussés
- essais temporaires
- état réellement publié sur `main`
Le résultat devient ambigu.
La séparation retenue est donc :
- **repo canonique** = espace de développement
- **worktree localhost** = miroir exécutable de `origin/main`
- **ops local** = scripts et automatisation
Cest cette séparation qui rend le système lisible, robuste et opérable.
---
## 5) Workflow Git “pro” (main protégée)
### 5.1 Cycle standard (toute modif)
```bash
git checkout main git checkout main
git pull --ff-only git pull --ff-only
@@ -60,37 +156,48 @@ npm run test:anchors
git add -A git add -A
git commit -m "xxx: description claire" git commit -m "xxx: description claire"
git push -u origin "$BR" git push -u origin "$BR"
```
### 3.2 PR vers main ### 5.2 PR vers `main`
Ouvrir PR dans Gitea - ouvrir une PR dans Gitea
- attendre une CI verte
- merger
- laisser les workflows faire le reste
CI doit être verte ### 5.3 Cas spécial : hotfix prod (NAS)
Merge PR → main On peut faire un hotfix durgence côté NAS si nécessaire.
### 3.3 Cas spécial : hotfix prod (NAS) Mais létat final doit toujours revenir dans Gitea :
On peut faire un hotfix “urgence” en prod/staging si nécessaire… - branche
- PR
- CI
- merge
MAIS : létat final doit revenir dans Gitea : branche → PR → CI → merge. ---
## 4) Déploiement (NAS) — principe ## 6) Déploiement (NAS) — principe
### 4.1 Release pack
On génère un pack “reproductible” (source + config + scripts) puis on déploie. ### 6.1 Release pack
### 4.2 Blue/Green On génère un pack reproductible, puis on déploie.
web_blue = staging upstream (8081) ### 6.2 Blue/Green
web_green = live upstream (8082) - `web_blue` = staging (`8081`)
- `web_green` = live (`8082`)
Edge Traefik sélectionne quel host pointe vers quel upstream. Le reverse-proxy choisit lupstream selon le host demandé.
## 5) Check-list “≤ 10 commandes” (happy path complet) ---
### 5.1 DEV (Mac)
## 7) Happy path complet
### 7.1 DEV (Mac)
```bash
git checkout main && git pull --ff-only git checkout main && git pull --ff-only
git checkout -b chore/my-change-$(date +%Y%m%d) git checkout -b chore/my-change-$(date +%Y%m%d)
@@ -99,55 +206,258 @@ rm -rf .astro node_modules/.vite dist
npm run build npm run build
npm run test:anchors npm run test:anchors
npm run dev npm run dev
```
### 5.2 Push + PR ### 7.2 Push + PR
```bash
git add -A git add -A
git commit -m "chore: my change" git commit -m "chore: my change"
git push -u origin chore/my-change-YYYYMMDD git push -u origin chore/my-change-YYYYMMDD
# ouvrir PR dans Gitea ```
### 5.3 Déploiement NAS (résumé) Puis ouvrir la PR dans Gitea.
Voir docs/runbooks/DEPLOY-BLUE-GREEN.md. ### 7.3 Déploiement NAS
## 6) Problèmes “classiques” + diagnostic rapide Voir :
### 6.1 “Le staging ne ressemble pas au local”
# Comparer upstream direct 8081 vs 8082 : ```text
docs/runbooks/DEPLOY-BLUE-GREEN.md
```
---
## 8) Localhost auto-sync — ce quil faut retenir
Le localhost automatique sert à voir **la vérité de `main`**, pas à développer du neuf.
### 8.1 Scripts principaux
#### Script de sync
```text
~/ops-local/archicratie/auto-sync-localhost.sh
```
Rôle :
- fetch `origin/main`
- réaligner le worktree localhost
- lancer `npm ci` si besoin
- redéclencher lagent Astro si nécessaire
#### Script Astro
```text
~/ops-local/archicratie/run-astro-localhost.sh
```
Rôle :
- lancer `astro dev`
- depuis le bon worktree
- avec le bon runtime Node
- sur `127.0.0.1:4321`
> Oui : ce script est nécessaire.
> Il isole proprement le lancement du serveur Astro dans un contexte `launchd` stable.
### 8.2 LaunchAgents
#### Agent sync
```text
~/Library/LaunchAgents/me.archicratie.localhost-sync.plist
```
#### Agent Astro
```text
~/Library/LaunchAgents/me.archicratie.localhost-astro.plist
```
### 8.3 Document de référence
Pour tout le détail dexploitation du localhost automatique, lire :
```text
docs/OPS-LOCALHOST-AUTO-SYNC.md
```
---
## 9) Règle dor : il y a deux usages locaux distincts
### 9.1 Voir ce qui est réellement sur `main`
Utiliser :
```text
http://127.0.0.1:4321
```
Ce localhost doit être considéré comme :
**un miroir local exécutable de `origin/main`**
### 9.2 Développer / tester une nouvelle fonctionnalité
Utiliser le repo canonique :
```bash
cd /Volumes/FunIA/dev/archicratie-edition/site
npm run dev
```
Donc :
- **localhost auto-sync** = vérité de `main`
- **localhost de dev manuel** = expérimentation en cours
Il ne faut pas les confondre.
---
## 10) Ce quil ne faut pas faire
### 10.1 Ne pas développer dans le worktree localhost
Le worktree localhost est piloté automatiquement.
Il peut être :
- réaligné
- nettoyé
- redémarré
Donc :
- pas de commits dedans
- pas de dev feature dedans
- pas dexpérimentation de fond dedans
### 10.2 Ne pas utiliser le repo canonique comme miroir auto-sync
Sinon on mélange :
- espace de dev
- état publié
- serveur local permanent
### 10.3 Ne pas remettre les scripts ops sur un volume externe
Les scripts dops doivent rester sous `HOME`.
Le fait de les mettre sous `/Volumes/...` a déjà provoqué des erreurs du type :
```text
Operation not permitted
```
### 10.4 Ne pas supprimer `run-astro-localhost.sh`
Ce script fait partie de larchitecture actuelle.
Le supprimer reviendrait à réintroduire le flou entre sync Git et exécution dAstro.
---
## 11) Commandes de contrôle essentielles
### 11.1 État global
```bash
~/ops-local/archicratie/doctor-localhost.sh
```
### 11.2 État Git
```bash
git -C ~/ops-local/archicratie/localhost-worktree rev-parse HEAD
git -C /Volumes/FunIA/dev/archicratie-edition/site ls-remote origin refs/heads/main
git -C ~/ops-local/archicratie/localhost-worktree branch --show-current
```
### 11.3 État LaunchAgents
```bash
launchctl print "gui/$(id -u)/me.archicratie.localhost-sync" | sed -n '1,160p'
launchctl print "gui/$(id -u)/me.archicratie.localhost-astro" | sed -n '1,160p'
```
### 11.4 État logs
```bash
tail -n 120 ~/ops-local/archicratie/logs/auto-sync-localhost.log
tail -n 120 ~/ops-local/archicratie/logs/astro-localhost.log
tail -n 80 ~/Library/Logs/archicratie-localhost-sync.err.log
tail -n 80 ~/Library/Logs/archicratie-localhost-astro.err.log
```
### 11.5 État serveur
```bash
lsof -nP -iTCP:4321 -sTCP:LISTEN
PID="$(lsof -tiTCP:4321 -sTCP:LISTEN | head -n 1)"
ps -p "$PID" -o pid=,command=
lsof -a -p "$PID" -d cwd
```
### 11.6 Vérification contenu
```bash
curl -s http://127.0.0.1:4321/archicrat-ia/prologue/ | grep -n "taxe Zucman"
```
---
## 12) Problèmes classiques + diagnostic
### 12.1 “Le staging ne ressemble pas au local”
Comparer les upstream directs :
```bash
curl -sS http://127.0.0.1:8081/ | head -n 2 curl -sS http://127.0.0.1:8081/ | head -n 2
curl -sS http://127.0.0.1:8082/ | head -n 2 curl -sS http://127.0.0.1:8082/ | head -n 2
```
# Vérifier quel routeur edge répond (header diag) : Vérifier le routeur edge :
```bash
curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ \ curl -sSI -H 'Host: staging.archicratie.trans-hands.synology.me' http://127.0.0.1:18080/ \
| grep -iE 'HTTP/|location:|x-archi-router' | grep -iE 'HTTP/|location:|x-archi-router'
```
# Lire docs/runbooks/EDGE-TRAEFIK.md. Voir :
### 6.2 Canonical incorrect (localhost en prod) ```text
docs/runbooks/EDGE-TRAEFIK.md
```
Cause racine : site dans Astro = PUBLIC_SITE non injecté au build. ### 12.2 Canonical incorrect
Fix canonique : voir docs/runbooks/ENV-PUBLIC_SITE.md. Cause probable : `PUBLIC_SITE` mal injecté au build.
Test : Test :
```bash
curl -sS http://127.0.0.1:8082/ | grep -oE 'rel="canonical" href="[^"]+"' | head -1 curl -sS http://127.0.0.1:8082/ | grep -oE 'rel="canonical" href="[^"]+"' | head -1
```
### 6.3 Contrat “anchors” en échec après migration dURL Voir :
Quand on déplace des routes (ex: /archicratie/archicrat-ia/* → /archicrat-ia/*), le test dancres peut échouer même si les IDs nont pas changé, car les pages ont changé de chemin. ```text
docs/runbooks/ENV-PUBLIC_SITE.md
```
# Procédure safe : ### 12.3 Contrat anchors en échec après migration dURL
Backup baseline : Procédure safe :
```bash
cp -a tests/anchors-baseline.json /tmp/anchors-baseline.json.bak.$(date +%F-%H%M%S) cp -a tests/anchors-baseline.json /tmp/anchors-baseline.json.bak.$(date +%F-%H%M%S)
Mettre à jour les clés (chemins) sans toucher aux IDs :
node - <<'NODE' node - <<'NODE'
import fs from 'fs'; import fs from 'fs';
const p='tests/anchors-baseline.json'; const p='tests/anchors-baseline.json';
@@ -161,16 +471,213 @@ fs.writeFileSync(p, JSON.stringify(out,null,2)+'\n');
console.log('updated keys:', Object.keys(j).length, '->', Object.keys(out).length); console.log('updated keys:', Object.keys(j).length, '->', Object.keys(out).length);
NODE NODE
Re-run :
npm run test:anchors npm run test:anchors
```
## 7) Ce que létape 9 doit faire (orientation) ### 12.4 “Le localhost auto-sync ne montre pas les dernières modifs”
Stabiliser le pipeline “tickets → YAML annotations” Commande réflexe :
Formaliser la spec YAML + merge + anti-doublon (voir docs/EDITORIAL-ANNOTATIONS-SPEC.md) ```bash
~/ops-local/archicratie/doctor-localhost.sh
```
Durcir lonboarding (ce START-HERE + runbooks) Puis :
Éviter les régressions par tests (anchors / annotations / smoke) ```bash
git -C ~/ops-local/archicratie/localhost-worktree rev-parse HEAD
git -C /Volumes/FunIA/dev/archicratie-edition/site ls-remote origin refs/heads/main
```
Si les SHA diffèrent :
- le sync na pas tourné
- ou lagent sync a un problème
### 12.5 “Le SHA est bon mais le contenu web est faux”
Vérifier quel Astro écoute réellement :
```bash
lsof -nP -iTCP:4321 -sTCP:LISTEN
PID="$(lsof -tiTCP:4321 -sTCP:LISTEN | head -n 1)"
ps -p "$PID" -o pid=,command=
lsof -a -p "$PID" -d cwd
```
Attendu :
- commande contenant `astro dev`
- cwd = `~/ops-local/archicratie/localhost-worktree`
### 12.6 Erreur `EBADENGINE`
Cause probable :
- Node 23 utilisé au lieu de Node 22
Résolution :
- forcer `node@22` dans les scripts et les LaunchAgents
### 12.7 Erreur `Operation not permitted`
Cause probable :
- scripts dops placés sous `/Volumes/...`
Résolution :
- garder les scripts sous :
```text
~/ops-local/archicratie
```
### 12.8 Erreur `EPERM` sur `astro.mjs`
Cause probable :
- ancien worktree sur volume externe
- ancien chemin résiduel
- Astro lancé depuis un mauvais emplacement
Résolution :
- worktree localhost sous :
```text
~/ops-local/archicratie/localhost-worktree
```
- scripts cohérents avec ce chemin
- réinstallation propre via :
```bash
~/ops-local/archicratie/install-localhost-sync.sh
```
---
## 13) Redémarrage machine
Après reboot, le comportement attendu est :
1. le LaunchAgent sync se recharge
2. le LaunchAgent Astro se recharge
3. le worktree localhost est réaligné
4. Astro redémarre sur `127.0.0.1:4321`
### Vérification rapide après reboot
```bash
~/ops-local/archicratie/doctor-localhost.sh
```
Si nécessaire :
```bash
~/ops-local/archicratie/install-localhost-sync.sh
```
---
## 14) Procédure de secours manuelle
### Forcer un sync
```bash
~/ops-local/archicratie/auto-sync-localhost.sh
```
### Réinstaller proprement le dispositif local
```bash
~/ops-local/archicratie/install-localhost-sync.sh
```
### Diagnostic complet
```bash
~/ops-local/archicratie/doctor-localhost.sh
```
---
## 15) Décision dexploitation finale
La politique retenue est la suivante :
- **repo canonique** = espace de développement
- **worktree localhost** = miroir automatique de `main`
- **ops sous HOME** = scripts, logs, automation
- **LaunchAgent sync** = réalignement Git
- **LaunchAgent Astro** = exécution stable du serveur local
- **Astro local** = lancé uniquement depuis le worktree localhost
Cette séparation rend le dispositif plus :
- lisible
- robuste
- opérable
- antifragile
---
## 16) Résumé opératoire
### Pour voir la vérité de `main`
Ouvrir :
```text
http://127.0.0.1:4321
```
Le serveur doit provenir de :
```text
/Users/s-funia/ops-local/archicratie/localhost-worktree
```
### Pour développer
Travailler dans :
```text
/Volumes/FunIA/dev/archicratie-edition/site
```
avec les commandes habituelles.
### Pour réparer vite
```bash
~/ops-local/archicratie/doctor-localhost.sh
~/ops-local/archicratie/auto-sync-localhost.sh
```
---
## 17) Mémoire courte
Si un jour plus rien nest clair, repartir de ces commandes :
```bash
~/ops-local/archicratie/doctor-localhost.sh
git -C ~/ops-local/archicratie/localhost-worktree rev-parse HEAD
git -C /Volumes/FunIA/dev/archicratie-edition/site ls-remote origin refs/heads/main
lsof -nP -iTCP:4321 -sTCP:LISTEN
```
Puis lire :
```bash
tail -n 120 ~/ops-local/archicratie/logs/auto-sync-localhost.log
tail -n 120 ~/ops-local/archicratie/logs/astro-localhost.log
```
---
## 18) Statut actuel visé
Quand tout fonctionne correctement :
- le worktree localhost pointe sur le même SHA que `origin/main`
- `astro dev` écoute sur `127.0.0.1:4321`
- son cwd est `~/ops-local/archicratie/localhost-worktree`
- le contenu servi correspond au contenu mergé sur `main`
Cest létat de référence à préserver.

View File

@@ -199,4 +199,348 @@ Ne jamais modifier dist/ “à la main” sur NAS.
Si un hotfix prod est indispensable : documenter et backporter via PR Gitea. Si un hotfix prod est indispensable : documenter et backporter via PR Gitea.
Le canonical dépend du build : PUBLIC_SITE doit être injecté (voir runbook ENV-PUBLIC_SITE). Le canonical dépend du build : PUBLIC_SITE doit être injecté (voir runbook ENV-PUBLIC_SITE).
## 10) CI Deploy (Gitea Actions) — Gate SKIP / HOTPATCH / FULL (merge-proof) + preuves
Cette section documente le comportement **canonique** du workflow :
- `.gitea/workflows/deploy-staging-live.yml`
Objectif : **zéro surprise**.
On ne veut plus “penser à force=1”.
Le gate doit décider automatiquement, y compris sur des **merge commits**.
### 10.1 — Principe (ce que fait réellement le gate)
Le job `deploy` calcule les fichiers modifiés entre :
- `BEFORE` = commit précédent (avant le push sur main)
- `AFTER` = commit actuel (après le push / merge sur main)
Puis il classe le déploiement dans un mode :
- **MODE=full**
- rebuild image + restart `archicratie-web-blue` (8081) + `archicratie-web-green` (8082)
- warmup endpoints (para-index, annotations-index, pagefind.js)
- vérification canonical staging + live
- **MODE=hotpatch**
- rebuild dun `annotations-index.json` consolidé depuis `src/annotations/**`
- patch direct dans les conteneurs en cours dexécution (blue+green)
- copie des médias modifiés `public/media/**` vers `/usr/share/nginx/html/media/**`
- smoke sur `/annotations-index.json` des deux ports
- **MODE=skip**
- pas de déploiement (on évite le bruit)
⚠️ Important : le mode “hotpatch” **ne rebuild pas** Astro.
Donc toute modification de contenu, routes, scripts, anchors, etc. doit déclencher **full**.
### 10.2 — Matrice de décision (règles officielles)
Le gate définit deux flags :
- `HAS_FULL=1` si changement “build-impacting”
- `HAS_HOTPATCH=1` si changement “annotations/media only”
Règle de priorité :
1) Si `HAS_FULL=1`**MODE=full**
2) Sinon si `HAS_HOTPATCH=1`**MODE=hotpatch**
3) Sinon → **MODE=skip**
#### 10.2.1 — Changements qui déclenchent FULL (build-impacting)
Exemples typiques (non exhaustif, mais on couvre le cœur) :
- `src/content/**` (contenu MD/MDX)
- `src/pages/**` (routes Astro)
- `src/anchors/**` (aliases dancres)
- `scripts/**` (tooling postbuild : injection, index, tests)
- `src/layouts/**`, `src/components/**`, `src/styles/**` (rendu et scripts inline)
- `astro.config.mjs`, `package.json`, `package-lock.json`
- `Dockerfile`, `docker-compose.yml`, `nginx.conf`
- `.gitea/workflows/**` (changement infra CI/CD)
=> On veut **full** pour garantir cohérence et éviter “site partiellement mis à jour”.
#### 10.2.2 — Changements qui déclenchent HOTPATCH (sans rebuild)
Uniquement :
- `src/annotations/**` (shards YAML)
- `public/media/**` (assets média)
=> On veut hotpatch pour vitesse et éviter rebuild NAS.
### 10.3 — “Merge-proof” : pourquoi on ne lit PAS seulement `git show $SHA`
Sur un merge commit, `git show --name-only $SHA` peut être trompeur selon le contexte.
La méthode robuste est :
- utiliser `event.json` (Gitea Actions) pour récupérer `before` et `after`
- calculer `git diff --name-only BEFORE AFTER`
Cest ce qui rend le gate **merge-proof**.
### 10.4 — Tests de preuve A/B (reproductibles)
Ces tests valident le gate sans ambiguïté.
But : vérifier que le mode choisi est EXACTEMENT celui attendu.
#### Test A — toucher `src/content/...` (FULL auto)
1) Créer une branche test
2) Modifier 1 fichier dans `src/content/` (ex : ajouter une ligne de commentaire non destructive)
3) PR → merge dans `main`
4) Vérifier dans `deploy-staging-live.yml` :
Attendus :
- `Gate flags: HAS_FULL=1 HAS_HOTPATCH=0`
- `✅ build-impacting change -> MODE=full (rebuild+restart)`
- Les étapes FULL (blue puis green) sexécutent réellement
#### Test B — toucher `src/annotations/...` uniquement (HOTPATCH auto)
1) Créer une branche test
2) Modifier 1 fichier sous `src/annotations/**` (ex: un champ comment, ts, etc.)
3) PR → merge dans `main`
4) Vérifier dans `deploy-staging-live.yml` :
Attendus :
- `Gate flags: HAS_FULL=0 HAS_HOTPATCH=1`
- `✅ annotations/media change -> MODE=hotpatch`
- Les étapes FULL sont “skip” (durée 0s)
- Létape HOTPATCH sexécute réellement
### 10.5 — Preuve opérationnelle côté NAS (2 URLs + 2 commandes)
But : prouver que staging+live servent bien les endpoints essentiels (et que le déploiement na pas “fait semblant”).
#### 10.5.1 — Deux URLs à vérifier (staging et live)
- Staging (blue) : `http://127.0.0.1:8081/`
- Live (green) : `http://127.0.0.1:8082/`
#### 10.5.2 — Deux commandes minimales (zéro débat)
```bash
curl -fsSI http://127.0.0.1:8081/ | head -n 1
curl -fsSI http://127.0.0.1:8082/ | head -n 1
---
## 10) CI Deploy (Gitea Actions) — Gate SKIP / HOTPATCH / FULL (merge-proof) + preuves
Cette section documente le comportement **canonique** du workflow :
- `.gitea/workflows/deploy-staging-live.yml`
Objectif : **zéro surprise**.
On ne veut plus “penser à force=1”.
Le gate doit décider automatiquement, y compris sur des **merge commits**.
### 10.1 — Principe (ce que fait réellement le gate)
Le job `deploy` calcule les fichiers modifiés entre :
- `BEFORE` = commit précédent (avant le push sur main)
- `AFTER` = commit actuel (après le push / merge sur main)
Puis il classe le déploiement dans un mode :
- **MODE=full**
- rebuild image + restart `archicratie-web-blue` (8081) + `archicratie-web-green` (8082)
- warmup endpoints (para-index, annotations-index, pagefind.js)
- vérification canonical staging + live
- **MODE=hotpatch**
- rebuild dun `annotations-index.json` consolidé depuis `src/annotations/**`
- patch direct dans les conteneurs en cours dexécution (blue+green)
- copie des médias modifiés `public/media/**` vers `/usr/share/nginx/html/media/**`
- smoke sur `/annotations-index.json` des deux ports
- **MODE=skip**
- pas de déploiement (on évite le bruit)
⚠️ Important : le mode “hotpatch” **ne rebuild pas** Astro.
Donc toute modification de contenu, routes, scripts, anchors, etc. doit déclencher **full**.
### 10.2 — Matrice de décision (règles officielles)
Le gate définit deux flags :
- `HAS_FULL=1` si changement “build-impacting”
- `HAS_HOTPATCH=1` si changement “annotations/media only”
Règle de priorité :
1) Si `HAS_FULL=1` → **MODE=full**
2) Sinon si `HAS_HOTPATCH=1` → **MODE=hotpatch**
3) Sinon → **MODE=skip**
#### 10.2.1 — Changements qui déclenchent FULL (build-impacting)
Exemples typiques (non exhaustif, mais on couvre le cœur) :
- `src/content/**` (contenu MD/MDX)
- `src/pages/**` (routes Astro)
- `src/anchors/**` (aliases dancres)
- `scripts/**` (tooling postbuild : injection, index, tests)
- `src/layouts/**`, `src/components/**`, `src/styles/**` (rendu et scripts inline)
- `astro.config.mjs`, `package.json`, `package-lock.json`
- `Dockerfile`, `docker-compose.yml`, `nginx.conf`
- `.gitea/workflows/**` (changement infra CI/CD)
=> On veut **full** pour garantir cohérence et éviter “site partiellement mis à jour”.
#### 10.2.2 — Changements qui déclenchent HOTPATCH (sans rebuild)
Uniquement :
- `src/annotations/**` (shards YAML)
- `public/media/**` (assets média)
=> On veut hotpatch pour vitesse et éviter rebuild NAS.
### 10.3 — “Merge-proof” : pourquoi on ne lit PAS seulement `git show $SHA`
Sur un merge commit, `git show --name-only $SHA` peut être trompeur selon le contexte.
La méthode robuste est :
- utiliser `event.json` (Gitea Actions) pour récupérer `before` et `after`
- calculer `git diff --name-only BEFORE AFTER`
Cest ce qui rend le gate **merge-proof**.
### 10.4 — Tests de preuve A/B (reproductibles)
Ces tests valident le gate sans ambiguïté.
But : vérifier que le mode choisi est EXACTEMENT celui attendu.
#### Test A — toucher `src/content/...` (FULL auto)
1) Créer une branche test
2) Modifier 1 fichier dans `src/content/` (ex : ajouter une ligne de commentaire non destructive)
3) PR → merge dans `main`
4) Vérifier dans `deploy-staging-live.yml` :
Attendus :
- `Gate flags: HAS_FULL=1 HAS_HOTPATCH=0`
- `✅ build-impacting change -> MODE=full (rebuild+restart)`
- Les étapes FULL (blue puis green) sexécutent réellement
#### Test B — toucher `src/annotations/...` uniquement (HOTPATCH auto)
1) Créer une branche test
2) Modifier 1 fichier sous `src/annotations/**` (ex: un champ comment, ts, etc.)
3) PR → merge dans `main`
4) Vérifier dans `deploy-staging-live.yml` :
Attendus :
- `Gate flags: HAS_FULL=0 HAS_HOTPATCH=1`
- `✅ annotations/media change -> MODE=hotpatch`
- Les étapes FULL sont “skip” (durée 0s)
- Létape HOTPATCH sexécute réellement
### 10.5 — Preuve opérationnelle côté NAS (2 URLs + 2 commandes)
But : prouver que staging+live servent bien les endpoints essentiels (et que le déploiement na pas “fait semblant”).
#### 10.5.1 — Deux URLs à vérifier (staging et live)
- Staging (blue) : `http://127.0.0.1:8081/`
- Live (green) : `http://127.0.0.1:8082/`
#### 10.5.2 — Deux commandes minimales (zéro débat)
en bash :
curl -fsSI http://127.0.0.1:8081/ | head -n 1
curl -fsSI http://127.0.0.1:8082/ | head -n 1
Attendu : HTTP/1.1 200 OK des deux côtés.
10.6 — Preuve “alias injection” (ancre ancienne → nouvelle) sur une page
Contexte : lorsquun paragraphe change (ex: ticket “Proposer” appliqué),
lID de paragraphe peut changer, mais on doit préserver les liens anciens via :
src/anchors/anchor-aliases.json
injection build-time dans dist (span .para-alias)
10.6.1 — Check rapide (staging + live)
Remplacer OLD/NEW par tes ids réels :
Attendu : HTTP/1.1 200 OK des deux côtés.
10.6 — Preuve “alias injection” (ancre ancienne → nouvelle) sur une page
Contexte : lorsquun paragraphe change (ex: ticket “Proposer” appliqué),
lID de paragraphe peut changer, mais on doit préserver les liens anciens via :
src/anchors/anchor-aliases.json
injection build-time dans dist (span .para-alias)
10.6.1 — Check rapide (staging + live)
Remplacer OLD/NEW par tes ids réels :
OLD="p-1-60c7ea48"
NEW="p-1-a21087b0"
for P in 8081 8082; do
echo "=== $P ==="
HTML="$(curl -fsS "http://127.0.0.1:${P}/archicrat-ia/chapitre-3/" | tr -d '\r')"
echo "OLD count: $(printf '%s' "$HTML" | grep -o "$OLD" | wc -l | tr -d ' ')"
echo "NEW count: $(printf '%s' "$HTML" | grep -o "$NEW" | wc -l | tr -d ' ')"
printf '%s\n' "$HTML" | grep -nE "$OLD|$NEW|class=\"para-alias\"" | head -n 40 || true
done
Attendu :
présence dun alias : <span id="$OLD" class="para-alias"...>
présence du nouveau paragraphe : <p id="$NEW">...
10.6.2 — Check “lien ancien ne casse pas” (HTTP 200)
for P in 8081 8082; do
curl -fsSI "http://127.0.0.1:${P}/archicrat-ia/chapitre-3/#${OLD}" | head -n 1
done
Attendu : HTTP/1.1 200 OK et navigation fonctionnelle côté navigateur.
10.7 — Troubleshooting gate (symptômes typiques)
Symptom 1 : job bloqué “Set up job” très longtemps
Causes fréquentes :
runner indisponible / capacity saturée
runner ne récupère pas les tâches (fetch_timeout trop court + réseau instable)
erreur dans “Gate — decide …” qui casse bash (et donne limpression dun hang)
Commandes NAS (diagnostic rapide) :
docker ps --format 'table {{.Names}}\t{{.Status}}\t{{.Image}}' | grep -E 'gitea-act-runner|registry|archicratie-web'
docker logs --since 30m --tail 400 gitea-act-runner | tail -n 200
Symptom 2 : conditional binary operator expected
Cause :
test bash du type [[ "$X" == "1" && "$Y" == "2" ]] mal formé
variable vide non quotée
usage dun opérateur non supporté dans la shell effective
Fix :
set -euo pipefail
toujours quoter : [[ "${VAR:-}" == "..." ]]
logguer BEFORE/AFTER/FORCE et sassurer quils ne sont pas vides
Symptom 3 : le gate liste “trop de fichiers” alors quon a changé 1 seul fichier
Cause :
comparaison faite sur le mauvais range (ex: git show sur merge, ou mauvais parent)
Fix :
toujours utiliser git diff --name-only "$BEFORE" "$AFTER" (merge-proof)
confirmer dans le log : Gate ctx: BEFORE=... AFTER=...

1330
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@@ -4,35 +4,33 @@
"version": "0.0.1", "version": "0.0.1",
"private": true, "private": true,
"scripts": { "scripts": {
"dev": "astro dev", "dev": "node scripts/write-dev-whoami.mjs && astro dev",
"preview": "astro preview", "preview": "astro preview",
"astro": "astro", "astro": "astro",
"clean": "rm -rf dist", "clean": "rm -rf dist",
"build": "astro build", "build": "astro build",
"build:clean": "npm run clean && npm run build", "build:clean": "npm run clean && npm run build",
"build:search": "pagefind --site dist",
"postbuild": "node scripts/inject-anchor-aliases.mjs && node scripts/dedupe-ids-dist.mjs && npx pagefind --site dist", "postbuild": "node scripts/inject-anchor-aliases.mjs && node scripts/dedupe-ids-dist.mjs && node scripts/build-para-index.mjs && node scripts/build-annotations-index.mjs && node scripts/purge-dist-dev-whoami.mjs && npm run build:search",
"import": "node scripts/import-docx.mjs", "import": "node scripts/import-docx.mjs",
"apply:ticket": "node scripts/apply-ticket.mjs", "apply:ticket": "node scripts/apply-ticket.mjs",
"audit:dist": "node scripts/audit-dist.mjs", "audit:dist": "node scripts/audit-dist.mjs",
"build:para-index": "node scripts/build-para-index.mjs",
"build:annotations-index": "node scripts/build-annotations-index.mjs",
"test:aliases": "node scripts/check-anchor-aliases.mjs", "test:aliases": "node scripts/check-anchor-aliases.mjs",
"test:anchors": "node scripts/check-anchors.mjs", "test:anchors": "node scripts/check-anchors.mjs",
"test:anchors:update": "node scripts/check-anchors.mjs --update", "test:anchors:update": "node scripts/check-anchors.mjs --update",
"test:annotations": "node scripts/check-annotations.mjs",
"test": "npm run test:aliases && npm run build:clean && npm run audit:dist && node scripts/verify-anchor-aliases-in-dist.mjs && npm run test:anchors && node scripts/check-inline-js.mjs", "test:annotations:media": "node scripts/check-annotations-media.mjs",
"test": "npm run test:aliases && npm run build:clean && npm run audit:dist && node scripts/verify-anchor-aliases-in-dist.mjs && npm run test:anchors && npm run test:annotations && npm run test:annotations:media && node scripts/check-inline-js.mjs",
"ci": "CI=1 npm test" "ci": "CI=1 npm test"
}, },
"dependencies": { "dependencies": {
"@astrojs/mdx": "^4.3.13", "@astrojs/mdx": "^5.0.0",
"astro": "^5.16.11" "astro": "^6.0.2"
}, },
"devDependencies": { "devDependencies": {
"@astrojs/sitemap": "^3.7.0", "@astrojs/sitemap": "^3.7.1",
"mammoth": "^1.11.0", "mammoth": "^1.11.0",
"pagefind": "^1.4.0", "pagefind": "^1.4.0",
"rehype-autolink-headings": "^7.1.0", "rehype-autolink-headings": "^7.1.0",

View File

@@ -1 +1 @@
{"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone"} {"name":"","short_name":"","icons":[{"src":"/android-chrome-192x192.png","sizes":"192x192","type":"image/png"},{"src":"/android-chrome-512x512.png","sizes":"512x512","type":"image/png"}],"theme_color":"#ffffff","background_color":"#ffffff","display":"standalone","orientation":"any"}

View File

@@ -0,0 +1,899 @@
#!/usr/bin/env node
// scripts/apply-annotation-ticket.mjs
//
// Applique un ticket Gitea "type/media | type/reference | type/comment" vers:
//
// ✅ src/annotations/<oeuvre>/<chapitre>/<paraId>.yml (sharding par paragraphe)
// ✅ public/media/<oeuvre>/<chapitre>/<paraId>/<file>
//
// Compat rétro : lit (si présent) l'ancien monolithe:
// src/annotations/<oeuvre>/<chapitre>.yml
// et deep-merge NON destructif dans le shard lors d'une nouvelle application,
// pour permettre une migration progressive sans perte.
//
// Robuste, idempotent, non destructif.
// DRY RUN si --dry-run
// Options: --dry-run --no-download --verify --strict --commit --close
//
// Env requis:
// FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
// FORGE_TOKEN = PAT Gitea (repo + issues)
//
// Env optionnel:
// GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
// ANNO_DIR (défaut: src/annotations)
// PUBLIC_DIR (défaut: public)
// MEDIA_ROOT (défaut URL: /media)
//
// Ticket attendu (body):
// Chemin: /archicrat-ia/chapitre-4/
// Ancre: #p-0-xxxxxxxx
// Type: type/media | type/reference | type/comment
//
// Exit codes:
// 0 ok
// 1 erreur fatale
// 2 refus (strict/verify/usage)
import fs from "node:fs/promises";
import path from "node:path";
import process from "node:process";
import { spawnSync } from "node:child_process";
import YAML from "yaml";
/* ---------------------------------- usage --------------------------------- */
function usage(exitCode = 0) {
console.log(`
apply-annotation-ticket — applique un ticket SidePanel (media/ref/comment) vers src/annotations/ (shard par paragraphe)
Usage:
node scripts/apply-annotation-ticket.mjs <issue_number> [--dry-run] [--no-download] [--verify] [--strict] [--commit] [--close]
Flags:
--dry-run : n'écrit rien (affiche un aperçu)
--no-download : n'essaie pas de télécharger les pièces jointes (media)
--verify : vérifie que (page, ancre) existent (dist/para-index.json si dispo, sinon baseline)
--strict : refuse si URL ref invalide (http/https) OU caption media vide OU verify impossible
--commit : git add + git commit (commit dans la branche courante)
--close : ferme le ticket (nécessite --commit)
Env requis:
FORGE_API = base API Gitea (LAN) ex: http://192.168.1.20:3000
FORGE_TOKEN = PAT Gitea (repo + issues)
Env optionnel:
GITEA_OWNER / GITEA_REPO (sinon auto-détecté via git remote)
ANNO_DIR (défaut: src/annotations)
PUBLIC_DIR (défaut: public)
MEDIA_ROOT (défaut URL: /media)
Exit codes:
0 ok
1 erreur fatale
2 refus (strict/verify/close sans commit / incohérence)
`);
process.exit(exitCode);
}
/* ---------------------------------- args ---------------------------------- */
const argv = process.argv.slice(2);
if (argv.length === 0 || argv.includes("--help") || argv.includes("-h")) usage(0);
const issueNum = Number(argv[0]);
if (!Number.isFinite(issueNum) || issueNum <= 0) {
console.error("❌ Numéro de ticket invalide.");
usage(2);
}
const DRY_RUN = argv.includes("--dry-run");
const NO_DOWNLOAD = argv.includes("--no-download");
const DO_VERIFY = argv.includes("--verify");
const STRICT = argv.includes("--strict");
const DO_COMMIT = argv.includes("--commit");
const DO_CLOSE = argv.includes("--close");
if (DO_CLOSE && !DO_COMMIT) {
console.error("❌ --close nécessite --commit.");
process.exit(2);
}
if (typeof fetch !== "function") {
console.error("❌ fetch() indisponible. Utilise Node 18+.");
process.exit(1);
}
/* --------------------------------- config --------------------------------- */
const CWD = process.cwd();
const ANNO_DIR = path.join(CWD, process.env.ANNO_DIR || "src", "annotations");
const PUBLIC_DIR = path.join(CWD, process.env.PUBLIC_DIR || "public");
const MEDIA_URL_ROOT = String(process.env.MEDIA_ROOT || "/media").replace(/\/+$/, "");
/* --------------------------------- helpers -------------------------------- */
function getEnv(name, fallback = "") {
return (process.env[name] ?? fallback).trim();
}
function run(cmd, args, opts = {}) {
const r = spawnSync(cmd, args, { stdio: "inherit", ...opts });
if (r.error) throw r.error;
if (r.status !== 0) throw new Error(`Command failed: ${cmd} ${args.join(" ")}`);
}
function runQuiet(cmd, args, opts = {}) {
const r = spawnSync(cmd, args, { encoding: "utf8", stdio: "pipe", ...opts });
if (r.error) throw r.error;
if (r.status !== 0) {
const out = (r.stdout || "") + (r.stderr || "");
throw new Error(`Command failed: ${cmd} ${args.join(" ")}\n${out}`);
}
return r.stdout || "";
}
async function exists(p) {
try {
await fs.access(p);
return true;
} catch {
return false;
}
}
function inferOwnerRepoFromGit() {
const r = spawnSync("git", ["remote", "get-url", "origin"], { encoding: "utf-8" });
if (r.status !== 0) return null;
const u = (r.stdout || "").trim();
const m = u.match(/[:/](?<owner>[^/]+)\/(?<repo>[^/]+?)(?:\.git)?$/);
if (!m?.groups) return null;
return { owner: m.groups.owner, repo: m.groups.repo };
}
function gitHasStagedChanges() {
const r = spawnSync("git", ["diff", "--cached", "--quiet"]);
return r.status === 1;
}
function escapeRegExp(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function pickLine(body, key) {
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = String(body || "").match(re);
return m ? m[1].trim() : "";
}
function pickSection(body, markers) {
const text = String(body || "").replace(/\r\n/g, "\n");
const idx = markers
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
.filter((x) => x.i >= 0)
.sort((a, b) => a.i - b.i)[0];
if (!idx) return "";
const start = idx.i + idx.m.length;
const tail = text.slice(start);
const stops = ["\n## ", "\n---", "\nJustification", "\nProposition", "\nSources"];
let end = tail.length;
for (const s of stops) {
const j = tail.toLowerCase().indexOf(s.toLowerCase());
if (j >= 0 && j < end) end = j;
}
return tail.slice(0, end).trim();
}
function normalizeChemin(chemin) {
let c = String(chemin || "").trim();
if (!c) return "";
if (!c.startsWith("/")) c = "/" + c;
if (!c.endsWith("/")) c = c + "/";
c = c.replace(/\/{2,}/g, "/");
return c;
}
function normalizePageKeyFromChemin(chemin) {
// ex: /archicrat-ia/chapitre-4/ => archicrat-ia/chapitre-4
return normalizeChemin(chemin).replace(/^\/+|\/+$/g, "");
}
function normalizeAnchorId(s) {
let a = String(s || "").trim();
if (a.startsWith("#")) a = a.slice(1);
return a;
}
function assert(cond, msg, code = 1) {
if (!cond) {
const e = new Error(msg);
e.__exitCode = code;
throw e;
}
}
function isPlainObject(x) {
return !!x && typeof x === "object" && !Array.isArray(x);
}
function paraIndexFromId(id) {
const m = String(id).match(/^p-(\d+)-/i);
return m ? Number(m[1]) : Number.NaN;
}
function isHttpUrl(u) {
try {
const x = new URL(String(u));
return x.protocol === "http:" || x.protocol === "https:";
} catch {
return false;
}
}
function stableSortByTs(arr) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(a?.ts || "") || 0;
const tb = Date.parse(b?.ts || "") || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function normPage(s) {
let x = String(s || "").trim();
if (!x) return "";
// retire origin si on a une URL complète
x = x.replace(/^https?:\/\/[^/]+/i, "");
// enlève query/hash
x = x.split("#")[0].split("?")[0];
// enlève index.html
x = x.replace(/index\.html$/i, "");
// enlève slashs de bord
x = x.replace(/^\/+/, "").replace(/\/+$/, "");
return x;
}
/* ------------------------------ para-index (verify + order) ------------------------------ */
async function loadParaOrderFromDist(pageKey) {
const distIdx = path.join(CWD, "dist", "para-index.json");
if (!(await exists(distIdx))) return null;
let j;
try {
j = JSON.parse(await fs.readFile(distIdx, "utf8"));
} catch {
return null;
}
const want = normPage(pageKey);
// Support A) { items:[{id,page,...}, ...] } (ou variantes)
const items = Array.isArray(j?.items)
? j.items
: Array.isArray(j?.index?.items)
? j.index.items
: null;
if (items) {
const ids = [];
for (const it of items) {
// page peut être dans plein de clés différentes
const pageCand = normPage(
it?.page ??
it?.pageKey ??
it?.path ??
it?.route ??
it?.href ??
it?.url ??
""
);
// id peut être dans plein de clés différentes
let id = String(it?.id ?? it?.paraId ?? it?.anchorId ?? it?.anchor ?? "");
if (id.startsWith("#")) id = id.slice(1);
if (pageCand === want && id) ids.push(id);
}
if (ids.length) return ids;
}
// Support B) { byId: { "p-...": { page:"...", ... }, ... } }
if (j?.byId && typeof j.byId === "object") {
const ids = Object.keys(j.byId)
.filter((id) => {
const meta = j.byId[id] || {};
const pageCand = normPage(meta.page ?? meta.pageKey ?? meta.path ?? meta.route ?? meta.url ?? "");
return pageCand === want;
});
if (ids.length) {
ids.sort((a, b) => {
const ia = paraIndexFromId(a);
const ib = paraIndexFromId(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
return ids;
}
}
// Support C) { pages: { "archicrat-ia/chapitre-4": { ids:[...] } } } (ou variantes)
if (j?.pages && typeof j.pages === "object") {
// essaie de trouver la bonne clé même si elle est /.../ ou .../index.html
const keys = Object.keys(j.pages);
const hit = keys.find((k) => normPage(k) === want);
if (hit) {
const pg = j.pages[hit];
if (Array.isArray(pg?.ids)) return pg.ids.map(String);
if (Array.isArray(pg?.paras)) return pg.paras.map(String);
}
}
return null;
}
async function tryVerifyAnchor(pageKey, anchorId) {
// 1) dist/para-index.json : order complet si possible
const order = await loadParaOrderFromDist(pageKey);
if (order) return order.includes(anchorId);
// 1bis) dist/para-index.json : fallback “best effort” => recherche brute (IDs quasi uniques)
const distIdx = path.join(CWD, "dist", "para-index.json");
if (await exists(distIdx)) {
try {
const raw = await fs.readFile(distIdx, "utf8");
if (raw.includes(`"${anchorId}"`) || raw.includes(`"#${anchorId}"`)) {
return true;
}
} catch {
// ignore
}
}
// 2) tests/anchors-baseline.json (fallback)
const base = path.join(CWD, "tests", "anchors-baseline.json");
if (await exists(base)) {
try {
const j = JSON.parse(await fs.readFile(base, "utf8"));
const candidates = [];
if (j?.pages && typeof j.pages === "object") {
for (const [k, v] of Object.entries(j.pages)) {
if (!Array.isArray(v)) continue;
if (normPage(k).includes(normPage(pageKey))) candidates.push(...v);
}
}
if (Array.isArray(j?.entries)) {
for (const it of j.entries) {
const p = String(it?.page || "");
const ids = it?.ids;
if (Array.isArray(ids) && normPage(p).includes(normPage(pageKey))) candidates.push(...ids);
}
}
if (candidates.length) return candidates.some((x) => String(x) === anchorId);
} catch {
// ignore
}
}
return null; // cannot verify
}
/* ----------------------------- deep merge helpers (non destructive) ----------------------------- */
function keyMedia(x) {
return String(x?.src || "");
}
function keyRef(x) {
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
}
function keyComment(x) {
return String(x?.text || "").trim();
}
function uniqUnion(dstArr, srcArr, keyFn) {
const out = Array.isArray(dstArr) ? [...dstArr] : [];
const seen = new Set(out.map((x) => keyFn(x)));
for (const it of (Array.isArray(srcArr) ? srcArr : [])) {
const k = keyFn(it);
if (!k) continue;
if (!seen.has(k)) {
seen.add(k);
out.push(it);
}
}
return out;
}
function deepMergeEntry(dst, src) {
if (!isPlainObject(dst) || !isPlainObject(src)) return;
for (const [k, v] of Object.entries(src)) {
if (k === "media" && Array.isArray(v)) {
dst.media = uniqUnion(dst.media, v, keyMedia);
continue;
}
if (k === "refs" && Array.isArray(v)) {
dst.refs = uniqUnion(dst.refs, v, keyRef);
continue;
}
if (k === "comments_editorial" && Array.isArray(v)) {
dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment);
continue;
}
if (isPlainObject(v)) {
if (!isPlainObject(dst[k])) dst[k] = {};
deepMergeEntry(dst[k], v);
continue;
}
if (Array.isArray(v)) {
const cur = Array.isArray(dst[k]) ? dst[k] : [];
const seen = new Set(cur.map((x) => JSON.stringify(x)));
const out = [...cur];
for (const it of v) {
const s = JSON.stringify(it);
if (!seen.has(s)) {
seen.add(s);
out.push(it);
}
}
dst[k] = out;
continue;
}
// scalar: set only if missing/empty
if (!(k in dst) || dst[k] == null || dst[k] === "") {
dst[k] = v;
}
}
}
/* ----------------------------- annotations I/O ----------------------------- */
async function loadAnnoDocYaml(fileAbs, pageKey) {
if (!(await exists(fileAbs))) {
return { schema: 1, page: pageKey, paras: {} };
}
const raw = await fs.readFile(fileAbs, "utf8");
let doc;
try {
doc = YAML.parse(raw);
} catch (e) {
throw new Error(`${path.relative(CWD, fileAbs)}: parse failed: ${String(e?.message ?? e)}`);
}
assert(isPlainObject(doc), `${path.relative(CWD, fileAbs)}: doc must be an object`, 2);
assert(doc.schema === 1, `${path.relative(CWD, fileAbs)}: schema must be 1`, 2);
assert(isPlainObject(doc.paras), `${path.relative(CWD, fileAbs)}: missing object key "paras"`, 2);
if (doc.page != null) {
const got = String(doc.page).replace(/^\/+/, "").replace(/\/+$/, "");
assert(got === pageKey, `${path.relative(CWD, fileAbs)}: page mismatch (page="${doc.page}" vs path="${pageKey}")`, 2);
} else {
doc.page = pageKey;
}
return doc;
}
function sortParasObject(paras, order) {
const keys = Object.keys(paras || {});
const idx = new Map();
if (Array.isArray(order)) order.forEach((id, i) => idx.set(String(id), i));
keys.sort((a, b) => {
const ha = idx.has(a);
const hb = idx.has(b);
if (ha && hb) return idx.get(a) - idx.get(b);
if (ha && !hb) return -1;
if (!ha && hb) return 1;
const ia = paraIndexFromId(a);
const ib = paraIndexFromId(b);
if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
const out = {};
for (const k of keys) out[k] = paras[k];
return out;
}
async function saveAnnoDocYaml(fileAbs, doc, order = null) {
await fs.mkdir(path.dirname(fileAbs), { recursive: true });
doc.paras = sortParasObject(doc.paras, order);
for (const e of Object.values(doc.paras || {})) {
if (!isPlainObject(e)) continue;
stableSortByTs(e.media);
stableSortByTs(e.refs);
stableSortByTs(e.comments_editorial);
}
const out = YAML.stringify(doc);
await fs.writeFile(fileAbs, out, "utf8");
}
/* ------------------------------ gitea helpers ------------------------------ */
function apiBaseNorm(forgeApiBase) {
return forgeApiBase.replace(/\/+$/, "");
}
async function giteaGET(url, token) {
const res = await fetch(url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/json",
"User-Agent": "archicratie-apply-annotation/1.0",
},
});
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`HTTP ${res.status} GET ${url}\n${t}`);
}
return await res.json();
}
async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
return await giteaGET(url, token);
}
async function fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum }) {
// Gitea: /issues/{index}/assets
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/assets`;
try {
const json = await giteaGET(url, token);
return Array.isArray(json) ? json : [];
} catch {
return [];
}
}
async function postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment }) {
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}/comments`;
const res = await fetch(url, {
method: "POST",
headers: {
Authorization: `token ${token}`,
Accept: "application/json",
"Content-Type": "application/json",
"User-Agent": "archicratie-apply-annotation/1.0",
},
body: JSON.stringify({ body: comment }),
});
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`HTTP ${res.status} POST comment ${url}\n${t}`);
}
}
async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment }) {
if (comment) await postIssueComment({ forgeApiBase, owner, repo, token, issueNum, comment });
const url = `${apiBaseNorm(forgeApiBase)}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
const res = await fetch(url, {
method: "PATCH",
headers: {
Authorization: `token ${token}`,
Accept: "application/json",
"Content-Type": "application/json",
"User-Agent": "archicratie-apply-annotation/1.0",
},
body: JSON.stringify({ state: "closed" }),
});
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`HTTP ${res.status} closing issue: ${url}\n${t}`);
}
}
/* ------------------------------ media helpers ------------------------------ */
function inferMediaTypeFromFilename(name) {
const n = String(name || "").toLowerCase();
if (/\.(png|jpe?g|webp|gif|svg)$/.test(n)) return "image";
if (/\.(mp4|webm|mov|m4v)$/.test(n)) return "video";
if (/\.(mp3|wav|ogg|m4a)$/.test(n)) return "audio";
return "link";
}
function sanitizeFilename(name) {
return String(name || "file")
.replace(/[\/\\]/g, "_")
.replace(/[^\w.\-]+/g, "_")
.replace(/_+/g, "_")
.slice(0, 180);
}
async function downloadToFile(url, token, destAbs) {
const res = await fetch(url, {
headers: {
Authorization: `token ${token}`,
"User-Agent": "archicratie-apply-annotation/1.0",
},
redirect: "follow",
});
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`download failed HTTP ${res.status}: ${url}\n${t}`);
}
const buf = Buffer.from(await res.arrayBuffer());
await fs.mkdir(path.dirname(destAbs), { recursive: true });
await fs.writeFile(destAbs, buf);
return buf.length;
}
/* ------------------------------ type parsers ------------------------------ */
function parseReferenceBlock(body) {
const block =
pickSection(body, ["Référence (à compléter):", "Reference (à compléter):"]) ||
pickSection(body, ["Référence:", "Reference:"]);
const lines = String(block || "").split(/\r?\n/).map((l) => l.trim());
const get = (k) => {
const re = new RegExp(`^[-*]\\s*${escapeRegExp(k)}\\s*:\\s*(.*)$`, "i");
const m = lines.map((l) => l.match(re)).find(Boolean);
return (m?.[1] ?? "").trim();
};
return {
url: get("URL") || "",
label: get("Label") || "",
kind: get("Kind") || "",
citation: get("Citation") || get("Passage") || get("Extrait") || "",
rawBlock: block || "",
};
}
/* ----------------------------------- main ---------------------------------- */
async function main() {
const token = getEnv("FORGE_TOKEN");
assert(token, "❌ FORGE_TOKEN manquant.", 2);
const forgeApiBase = getEnv("FORGE_API") || getEnv("FORGE_BASE");
assert(forgeApiBase, "❌ FORGE_API (ou FORGE_BASE) manquant.", 2);
const inferred = inferOwnerRepoFromGit() || {};
const owner = getEnv("GITEA_OWNER", inferred.owner || "");
const repo = getEnv("GITEA_REPO", inferred.repo || "");
assert(owner && repo, "❌ Impossible de déterminer owner/repo. Fix: export GITEA_OWNER=... GITEA_REPO=...", 2);
console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo}`);
const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum });
if (issue?.pull_request) {
console.error(`❌ #${issueNum} est une Pull Request, pas un ticket annotations.`);
process.exit(2);
}
const body = String(issue.body || "").replace(/\r\n/g, "\n");
const title = String(issue.title || "");
const type = pickLine(body, "Type").toLowerCase();
const chemin = normalizeChemin(pickLine(body, "Chemin"));
const ancre = normalizeAnchorId(pickLine(body, "Ancre"));
assert(chemin, "Ticket: Chemin manquant.", 2);
assert(ancre && /^p-\d+-/i.test(ancre), `Ticket: Ancre invalide ("${ancre}")`, 2);
assert(type, "Ticket: Type manquant.", 2);
const pageKey = normalizePageKeyFromChemin(chemin);
assert(pageKey, "Ticket: impossible de dériver pageKey.", 2);
const paraOrder = DO_VERIFY ? await loadParaOrderFromDist(pageKey) : null;
if (DO_VERIFY) {
const ok = await tryVerifyAnchor(pageKey, ancre);
if (ok === false) {
throw Object.assign(new Error(`Ticket verify: ancre introuvable pour page "${pageKey}" => ${ancre}`), { __exitCode: 2 });
}
if (ok === null) {
if (STRICT) {
throw Object.assign(
new Error(`Ticket verify (strict): impossible de vérifier (pas de dist/para-index.json ou baseline)`),
{ __exitCode: 2 }
);
}
console.warn("⚠️ verify: impossible de vérifier (pas de dist/para-index.json ou baseline) — on continue.");
}
}
// ✅ shard path: src/annotations/<pageKey>/<paraId>.yml
const shardAbs = path.join(ANNO_DIR, ...pageKey.split("/"), `${ancre}.yml`);
const shardRel = path.relative(CWD, shardAbs).replace(/\\/g, "/");
// legacy monolith: src/annotations/<pageKey>.yml (read-only, for migration)
const legacyAbs = path.join(ANNO_DIR, `${pageKey}.yml`);
console.log("✅ Parsed:", { type, chemin, ancre: `#${ancre}`, pageKey, annoFile: shardRel });
// load shard doc
const doc = await loadAnnoDocYaml(shardAbs, pageKey);
if (!isPlainObject(doc.paras[ancre])) doc.paras[ancre] = {};
const entry = doc.paras[ancre];
// merge legacy entry into shard in-memory (non destructive) to keep compat + enable progressive migration
if (await exists(legacyAbs)) {
try {
const legacy = await loadAnnoDocYaml(legacyAbs, pageKey);
const legacyEntry = legacy?.paras?.[ancre];
if (isPlainObject(legacyEntry)) {
deepMergeEntry(entry, legacyEntry);
}
} catch {
// ignore legacy parse issues; shard still applies new data
}
}
const touchedFiles = [];
const notes = [];
let changed = false;
const nowIso = new Date().toISOString();
if (type === "type/comment") {
const comment = pickSection(body, ["Commentaire:", "Comment:", "Commentaires:"]) || "";
const text = comment.trim();
assert(text.length >= 3, "Ticket comment: bloc 'Commentaire:' introuvable ou trop court.", 2);
if (!Array.isArray(entry.comments_editorial)) entry.comments_editorial = [];
const item = { text, status: "new", ts: nowIso, fromIssue: issueNum };
const before = entry.comments_editorial.length;
entry.comments_editorial = uniqUnion(entry.comments_editorial, [item], keyComment);
if (entry.comments_editorial.length !== before) {
changed = true;
notes.push(`+ comment added (len=${text.length})`);
} else {
notes.push(`~ comment already present (dedup)`);
}
stableSortByTs(entry.comments_editorial);
}
else if (type === "type/reference") {
const ref = parseReferenceBlock(body);
assert(ref.url || ref.label, "Ticket reference: renseigne au moins - URL: ou - Label: dans le ticket.", 2);
if (STRICT && ref.url && !isHttpUrl(ref.url)) {
throw Object.assign(new Error(`Ticket reference (strict): URL invalide (http/https requis): "${ref.url}"`), { __exitCode: 2 });
}
if (!Array.isArray(entry.refs)) entry.refs = [];
const item = {
url: ref.url || "",
label: ref.label || (ref.url ? ref.url : "Référence"),
kind: ref.kind || "",
ts: nowIso,
fromIssue: issueNum,
};
if (ref.citation) item.citation = ref.citation;
const before = entry.refs.length;
entry.refs = uniqUnion(entry.refs, [item], keyRef);
if (entry.refs.length !== before) {
changed = true;
notes.push(`+ reference added (${item.url ? "url" : "label"})`);
} else {
notes.push(`~ reference already present (dedup)`);
}
stableSortByTs(entry.refs);
}
else if (type === "type/media") {
if (!Array.isArray(entry.media)) entry.media = [];
const caption = (title || "").trim();
if (STRICT && !caption) {
throw Object.assign(new Error("Ticket media (strict): caption vide (titre de ticket requis)."), { __exitCode: 2 });
}
const captionFinal = caption || ".";
const atts = NO_DOWNLOAD ? [] : await fetchIssueAssets({ forgeApiBase, owner, repo, token, issueNum });
if (!atts.length) notes.push("! no assets found (nothing to download).");
for (const a of atts) {
const name = sanitizeFilename(a?.name || `asset-${a?.id || "x"}`);
const dl = a?.browser_download_url || a?.download_url || "";
if (!dl) { notes.push(`! asset missing download url: ${name}`); continue; }
const mediaDirAbs = path.join(PUBLIC_DIR, "media", ...pageKey.split("/"), ancre);
const destAbs = path.join(mediaDirAbs, name);
const urlPath = `${MEDIA_URL_ROOT}/${pageKey}/${ancre}/${name}`.replace(/\/{2,}/g, "/");
if (await exists(destAbs)) {
notes.push(`~ media already exists: ${urlPath}`);
} else if (!DRY_RUN) {
const bytes = await downloadToFile(dl, token, destAbs);
notes.push(`+ downloaded ${name} (${bytes} bytes) -> ${urlPath}`);
touchedFiles.push(path.relative(CWD, destAbs).replace(/\\/g, "/"));
changed = true;
} else {
notes.push(`(dry) would download ${name} -> ${urlPath}`);
changed = true;
}
const item = {
type: inferMediaTypeFromFilename(name),
src: urlPath,
caption: captionFinal,
credit: "",
ts: nowIso,
fromIssue: issueNum,
};
const before = entry.media.length;
entry.media = uniqUnion(entry.media, [item], keyMedia);
if (entry.media.length !== before) changed = true;
}
stableSortByTs(entry.media);
}
else {
throw Object.assign(new Error(`Type non supporté: "${type}"`), { __exitCode: 2 });
}
if (!changed) {
console.log(" No changes to apply.");
for (const n of notes) console.log(" ", n);
return;
}
if (DRY_RUN) {
console.log("\n--- DRY RUN (no write) ---");
console.log(`Would update: ${shardRel}`);
for (const n of notes) console.log(" ", n);
console.log("\nExcerpt (resulting entry):");
console.log(YAML.stringify({ [ancre]: doc.paras[ancre] }).trimEnd());
console.log("\n✅ Dry-run terminé.");
return;
}
await saveAnnoDocYaml(shardAbs, doc, paraOrder);
touchedFiles.unshift(shardRel);
console.log(`✅ Updated: ${shardRel}`);
for (const n of notes) console.log(" ", n);
if (DO_COMMIT) {
run("git", ["add", ...touchedFiles], { cwd: CWD });
if (!gitHasStagedChanges()) {
console.log(" Nothing to commit (aucun changement staged).");
return;
}
const msg = `anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})`;
run("git", ["commit", "-m", msg], { cwd: CWD });
const sha = runQuiet("git", ["rev-parse", "--short", "HEAD"], { cwd: CWD }).trim();
console.log(`✅ Committed: ${msg} (${sha})`);
if (DO_CLOSE) {
const comment = `✅ Appliqué par apply-annotation-ticket.\nCommit: ${sha}`;
await closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment });
console.log(`✅ Ticket #${issueNum} fermé.`);
}
} else {
console.log("\nNext (manuel) :");
console.log(` git diff -- ${touchedFiles[0]}`);
console.log(` git add ${touchedFiles.join(" ")}`);
console.log(` git commit -m "anno: apply ticket #${issueNum} (${pageKey}#${ancre} ${type})"`);
}
}
main().catch((e) => {
const code = e?.__exitCode || 1;
console.error("💥", e?.message || e);
process.exit(code);
});

View File

@@ -9,8 +9,9 @@ import { spawnSync } from "node:child_process";
* *
* Conçu pour: * Conçu pour:
* - prendre un ticket [Correction]/[Fact-check] (issue) avec Chemin + Ancre + Proposition * - prendre un ticket [Correction]/[Fact-check] (issue) avec Chemin + Ancre + Proposition
* - retrouver le bon paragraphe dans le .mdx * - retrouver le bon paragraphe dans le .mdx/.md
* - remplacer proprement * - remplacer proprement
* - ne JAMAIS toucher au frontmatter
* - optionnel: écrire un alias dancre old->new (build-time) dans src/anchors/anchor-aliases.json * - optionnel: écrire un alias dancre old->new (build-time) dans src/anchors/anchor-aliases.json
* - optionnel: committer automatiquement * - optionnel: committer automatiquement
* - optionnel: fermer le ticket (après commit) * - optionnel: fermer le ticket (après commit)
@@ -39,7 +40,7 @@ Env (recommandé):
Notes: Notes:
- Si dist/<chemin>/index.html est absent, le script lance "npm run build" sauf si --no-build. - Si dist/<chemin>/index.html est absent, le script lance "npm run build" sauf si --no-build.
- Sauvegarde automatique: <fichier>.bak.issue-<N> (uniquement si on écrit) - Sauvegarde automatique: .tmp/apply-ticket/<fichier>.bak.issue-<N> (uniquement si on écrit)
- Avec --alias : le script rebuild pour identifier le NOUVEL id, puis écrit l'alias old->new. - Avec --alias : le script rebuild pour identifier le NOUVEL id, puis écrit l'alias old->new.
- Refuse automatiquement les Pull Requests (PR) : ce ne sont pas des tickets éditoriaux. - Refuse automatiquement les Pull Requests (PR) : ce ne sont pas des tickets éditoriaux.
`); `);
@@ -89,6 +90,7 @@ const CWD = process.cwd();
const CONTENT_ROOT = path.join(CWD, "src", "content"); const CONTENT_ROOT = path.join(CWD, "src", "content");
const DIST_ROOT = path.join(CWD, "dist"); const DIST_ROOT = path.join(CWD, "dist");
const ALIASES_FILE = path.join(CWD, "src", "anchors", "anchor-aliases.json"); const ALIASES_FILE = path.join(CWD, "src", "anchors", "anchor-aliases.json");
const BACKUP_ROOT = path.join(CWD, ".tmp", "apply-ticket");
/* -------------------------- utils texte / matching -------------------------- */ /* -------------------------- utils texte / matching -------------------------- */
@@ -136,31 +138,26 @@ function scoreText(candidate, targetText) {
let hit = 0; let hit = 0;
for (const w of tgtSet) if (blkSet.has(w)) hit++; for (const w of tgtSet) if (blkSet.has(w)) hit++;
// Bonus si un long préfixe ressemble
const tgtNorm = normalizeText(stripMd(targetText)); const tgtNorm = normalizeText(stripMd(targetText));
const blkNorm = normalizeText(stripMd(candidate)); const blkNorm = normalizeText(stripMd(candidate));
const prefix = tgtNorm.slice(0, Math.min(180, tgtNorm.length)); const prefix = tgtNorm.slice(0, Math.min(180, tgtNorm.length));
const prefixBonus = prefix && blkNorm.includes(prefix) ? 1000 : 0; const prefixBonus = prefix && blkNorm.includes(prefix) ? 1000 : 0;
// Ratio bonus (0..100)
const ratio = hit / Math.max(1, tgtSet.size); const ratio = hit / Math.max(1, tgtSet.size);
const ratioBonus = Math.round(ratio * 100); const ratioBonus = Math.round(ratio * 100);
return prefixBonus + hit + ratioBonus; return prefixBonus + hit + ratioBonus;
} }
function bestBlockMatchIndex(blocks, targetText) { function rankedBlockMatches(blocks, targetText, limit = 5) {
let best = { i: -1, score: -1 }; return blocks
for (let i = 0; i < blocks.length; i++) { .map((b, i) => ({
const sc = scoreText(blocks[i], targetText); i,
if (sc > best.score) best = { i, score: sc }; score: scoreText(b, targetText),
} excerpt: stripMd(b).slice(0, 140),
return best; }))
} .sort((a, b) => b.score - a.score)
.slice(0, limit);
function splitParagraphBlocks(mdxText) {
const raw = String(mdxText ?? "").replace(/\r\n/g, "\n");
return raw.split(/\n{2,}/);
} }
function isLikelyExcerpt(s) { function isLikelyExcerpt(s) {
@@ -172,6 +169,89 @@ function isLikelyExcerpt(s) {
return false; return false;
} }
/* --------------------------- frontmatter / structure ------------------------ */
function normalizeNewlines(s) {
return String(s ?? "").replace(/^\uFEFF/, "").replace(/\r\n/g, "\n");
}
function splitMdxFrontmatter(src) {
const text = normalizeNewlines(src);
const m = text.match(/^---\n[\s\S]*?\n---\n?/);
if (!m) {
return {
hasFrontmatter: false,
frontmatter: "",
body: text,
};
}
const frontmatter = m[0];
const body = text.slice(frontmatter.length);
return {
hasFrontmatter: true,
frontmatter,
body,
};
}
function joinMdxFrontmatter(frontmatter, body) {
if (!frontmatter) return String(body ?? "");
return String(frontmatter) + String(body ?? "");
}
function assertFrontmatterIntegrity({ hadFrontmatter, originalFrontmatter, finalText, filePath }) {
if (!hadFrontmatter) return;
const text = normalizeNewlines(finalText);
if (!text.startsWith("---\n")) {
throw new Error(`Frontmatter perdu pendant la mise à jour de ${filePath}`);
}
if (!text.startsWith(originalFrontmatter)) {
throw new Error(`Frontmatter altéré pendant la mise à jour de ${filePath}`);
}
}
function splitParagraphBlocksPreserve(bodyText) {
const text = normalizeNewlines(bodyText);
if (!text) {
return { blocks: [], separators: [] };
}
const blocks = [];
const separators = [];
const re = /(\n{2,})/g;
let last = 0;
let m;
while ((m = re.exec(text))) {
blocks.push(text.slice(last, m.index));
separators.push(m[1]);
last = m.index + m[1].length;
}
blocks.push(text.slice(last));
return { blocks, separators };
}
function joinParagraphBlocksPreserve(blocks, separators) {
if (!Array.isArray(blocks) || blocks.length === 0) return "";
let out = "";
for (let i = 0; i < blocks.length; i++) {
out += blocks[i];
if (i < separators.length) out += separators[i];
}
return out;
}
/* ------------------------------ utils système ------------------------------ */ /* ------------------------------ utils système ------------------------------ */
function run(cmd, args, opts = {}) { function run(cmd, args, opts = {}) {
@@ -251,7 +331,9 @@ function pickSection(body, markers) {
.map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) })) .map((m) => ({ m, i: text.toLowerCase().indexOf(m.toLowerCase()) }))
.filter((x) => x.i >= 0) .filter((x) => x.i >= 0)
.sort((a, b) => a.i - b.i)[0]; .sort((a, b) => a.i - b.i)[0];
if (!idx) return ""; if (!idx) return "";
const start = idx.i + idx.m.length; const start = idx.i + idx.m.length;
const tail = text.slice(start); const tail = text.slice(start);
@@ -266,11 +348,13 @@ function pickSection(body, markers) {
"\n## Proposition", "\n## Proposition",
"\n## Problème", "\n## Problème",
]; ];
let end = tail.length; let end = tail.length;
for (const s of stops) { for (const s of stops) {
const j = tail.toLowerCase().indexOf(s.toLowerCase()); const j = tail.toLowerCase().indexOf(s.toLowerCase());
if (j >= 0 && j < end) end = j; if (j >= 0 && j < end) end = j;
} }
return tail.slice(0, end).trim(); return tail.slice(0, end).trim();
} }
@@ -298,8 +382,6 @@ function extractAnchorIdAnywhere(text) {
function extractCheminFromAnyUrl(text) { function extractCheminFromAnyUrl(text) {
const s = String(text || ""); const s = String(text || "");
// Exemple: http://localhost:4321/archicratie/prologue/#p-3-xxxx
// ou: /archicratie/prologue/#p-3-xxxx
const m = s.match(/(\/[a-z0-9\-]+\/[a-z0-9\-\/]+\/)#p-\d+-[0-9a-f]{8}/i); const m = s.match(/(\/[a-z0-9\-]+\/[a-z0-9\-\/]+\/)#p-\d+-[0-9a-f]{8}/i);
return m ? m[1] : ""; return m ? m[1] : "";
} }
@@ -400,7 +482,7 @@ async function fetchIssue({ forgeApiBase, owner, repo, token, issueNum }) {
headers: { headers: {
Authorization: `token ${token}`, Authorization: `token ${token}`,
Accept: "application/json", Accept: "application/json",
"User-Agent": "archicratie-apply-ticket/2.0", "User-Agent": "archicratie-apply-ticket/2.1",
}, },
}); });
if (!res.ok) { if (!res.ok) {
@@ -416,7 +498,7 @@ async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment
Authorization: `token ${token}`, Authorization: `token ${token}`,
Accept: "application/json", Accept: "application/json",
"Content-Type": "application/json", "Content-Type": "application/json",
"User-Agent": "archicratie-apply-ticket/2.0", "User-Agent": "archicratie-apply-ticket/2.1",
}; };
if (comment) { if (comment) {
@@ -425,7 +507,11 @@ async function closeIssue({ forgeApiBase, owner, repo, token, issueNum, comment
} }
const url = `${base}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`; const url = `${base}/api/v1/repos/${owner}/${repo}/issues/${issueNum}`;
const res = await fetch(url, { method: "PATCH", headers, body: JSON.stringify({ state: "closed" }) }); const res = await fetch(url, {
method: "PATCH",
headers,
body: JSON.stringify({ state: "closed" }),
});
if (!res.ok) { if (!res.ok) {
const t = await res.text().catch(() => ""); const t = await res.text().catch(() => "");
@@ -529,10 +615,9 @@ async function main() {
console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo}`); console.log(`🔎 Fetch ticket #${issueNum} from ${owner}/${repo}`);
const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum }); const issue = await fetchIssue({ forgeApiBase, owner, repo, token, issueNum });
// Guard PR (Pull Request = "Demande d'ajout" = pas un ticket éditorial)
if (issue?.pull_request) { if (issue?.pull_request) {
console.error(`❌ #${issueNum} est une Pull Request (demande dajout), pas un ticket éditorial.`); console.error(`❌ #${issueNum} est une Pull Request (demande dajout), pas un ticket éditorial.`);
console.error(`➡️ Ouvre un ticket [Correction]/[Fact-check] depuis le site (Proposer), puis relance apply-ticket sur ce numéro.`); console.error("➡️ Ouvre un ticket [Correction]/[Fact-check] depuis le site (Proposer), puis relance apply-ticket sur ce numéro.");
process.exit(2); process.exit(2);
} }
@@ -553,7 +638,6 @@ async function main() {
ancre = (ancre || "").trim(); ancre = (ancre || "").trim();
if (ancre.startsWith("#")) ancre = ancre.slice(1); if (ancre.startsWith("#")) ancre = ancre.slice(1);
// fallback si ticket mal formé
if (!ancre) ancre = extractAnchorIdAnywhere(title) || extractAnchorIdAnywhere(body); if (!ancre) ancre = extractAnchorIdAnywhere(title) || extractAnchorIdAnywhere(body);
chemin = normalizeChemin(chemin); chemin = normalizeChemin(chemin);
@@ -592,7 +676,6 @@ async function main() {
const distHtmlPath = path.join(DIST_ROOT, chemin.replace(/^\/+|\/+$/g, ""), "index.html"); const distHtmlPath = path.join(DIST_ROOT, chemin.replace(/^\/+|\/+$/g, ""), "index.html");
await ensureBuildIfNeeded(distHtmlPath); await ensureBuildIfNeeded(distHtmlPath);
// Texte cible: préférence au texte complet (ticket), sinon dist si extrait probable
let targetText = texteActuel; let targetText = texteActuel;
let distText = ""; let distText = "";
@@ -609,21 +692,24 @@ async function main() {
throw new Error("Impossible de reconstruire le texte du paragraphe (ni texte actuel, ni dist html)."); throw new Error("Impossible de reconstruire le texte du paragraphe (ni texte actuel, ni dist html).");
} }
const original = await fs.readFile(contentFile, "utf-8"); const originalRaw = await fs.readFile(contentFile, "utf-8");
const blocks = splitParagraphBlocks(original); const { hasFrontmatter, frontmatter, body: originalBody } = splitMdxFrontmatter(originalRaw);
const best = bestBlockMatchIndex(blocks, targetText); const split = splitParagraphBlocksPreserve(originalBody);
const blocks = split.blocks;
const separators = split.separators;
if (!blocks.length) {
throw new Error(`Aucun bloc éditorial exploitable dans ${path.relative(CWD, contentFile)}`);
}
const ranked = rankedBlockMatches(blocks, targetText, 5);
const best = ranked[0] || { i: -1, score: -1, excerpt: "" };
const runnerUp = ranked[1] || null;
// seuil de sécurité
if (best.i < 0 || best.score < 40) { if (best.i < 0 || best.score < 40) {
console.error("❌ Match trop faible: je refuse de remplacer automatiquement."); console.error("❌ Match trop faible: je refuse de remplacer automatiquement.");
console.error(`➡️ Score=${best.score}. Recommandation: ticket avec 'Texte actuel (copie exacte du paragraphe)'.`); console.error(`➡️ Score=${best.score}. Recommandation: ticket avec 'Texte actuel (copie exacte du paragraphe)'.`);
const ranked = blocks
.map((b, i) => ({ i, score: scoreText(b, targetText), excerpt: stripMd(b).slice(0, 140) }))
.sort((a, b) => b.score - a.score)
.slice(0, 5);
console.error("Top candidates:"); console.error("Top candidates:");
for (const r of ranked) { for (const r of ranked) {
console.error(` #${r.i + 1} score=${r.score} ${r.excerpt}${r.excerpt.length >= 140 ? "…" : ""}`); console.error(` #${r.i + 1} score=${r.score} ${r.excerpt}${r.excerpt.length >= 140 ? "…" : ""}`);
@@ -631,12 +717,34 @@ async function main() {
process.exit(2); process.exit(2);
} }
if (runnerUp) {
const ambiguityGap = best.score - runnerUp.score;
if (ambiguityGap < 15) {
console.error("❌ Match ambigu: le meilleur candidat est trop proche du second.");
console.error(`➡️ best=${best.score} / second=${runnerUp.score} / gap=${ambiguityGap}`);
console.error("Top candidates:");
for (const r of ranked) {
console.error(` #${r.i + 1} score=${r.score} ${r.excerpt}${r.excerpt.length >= 140 ? "…" : ""}`);
}
process.exit(2);
}
}
const beforeBlock = blocks[best.i]; const beforeBlock = blocks[best.i];
const afterBlock = proposition.trim(); const afterBlock = proposition.trim();
const nextBlocks = blocks.slice(); const nextBlocks = blocks.slice();
nextBlocks[best.i] = afterBlock; nextBlocks[best.i] = afterBlock;
const updated = nextBlocks.join("\n\n");
const updatedBody = joinParagraphBlocksPreserve(nextBlocks, separators);
const updatedRaw = joinMdxFrontmatter(frontmatter, updatedBody);
assertFrontmatterIntegrity({
hadFrontmatter: hasFrontmatter,
originalFrontmatter: frontmatter,
finalText: updatedRaw,
filePath: path.relative(CWD, contentFile),
});
console.log(`🧩 Matched block #${best.i + 1}/${blocks.length} score=${best.score}`); console.log(`🧩 Matched block #${best.i + 1}/${blocks.length} score=${best.score}`);
@@ -650,13 +758,15 @@ async function main() {
return; return;
} }
// backup uniquement si on écrit const relContentFile = path.relative(CWD, contentFile);
const bakPath = `${contentFile}.bak.issue-${issueNum}`; const bakPath = path.join(BACKUP_ROOT, `${relContentFile}.bak.issue-${issueNum}`);
await fs.mkdir(path.dirname(bakPath), { recursive: true });
if (!(await fileExists(bakPath))) { if (!(await fileExists(bakPath))) {
await fs.writeFile(bakPath, original, "utf-8"); await fs.writeFile(bakPath, originalRaw, "utf-8");
} }
await fs.writeFile(contentFile, updated, "utf-8"); await fs.writeFile(contentFile, updatedRaw, "utf-8");
console.log("✅ Applied."); console.log("✅ Applied.");
let aliasChanged = false; let aliasChanged = false;
@@ -677,13 +787,13 @@ async function main() {
if (aliasChanged) { if (aliasChanged) {
console.log(`✅ Alias ajouté: ${chemin} ${ancre} -> ${newId}`); console.log(`✅ Alias ajouté: ${chemin} ${ancre} -> ${newId}`);
// MàJ dist sans rebuild complet (inject seulement)
run("node", ["scripts/inject-anchor-aliases.mjs"], { cwd: CWD }); run("node", ["scripts/inject-anchor-aliases.mjs"], { cwd: CWD });
} else { } else {
console.log(` Alias déjà présent ou inutile (${ancre} -> ${newId}).`); console.log(` Alias déjà présent ou inutile (${ancre} -> ${newId}).`);
} }
// garde-fous rapides run("node", ["scripts/check-anchor-aliases.mjs"], { cwd: CWD });
run("node", ["scripts/verify-anchor-aliases-in-dist.mjs"], { cwd: CWD });
run("npm", ["run", "test:anchors"], { cwd: CWD }); run("npm", ["run", "test:anchors"], { cwd: CWD });
run("node", ["scripts/check-inline-js.mjs"], { cwd: CWD }); run("node", ["scripts/check-inline-js.mjs"], { cwd: CWD });
} }
@@ -713,7 +823,6 @@ async function main() {
return; return;
} }
// mode manuel
console.log("Next (manuel) :"); console.log("Next (manuel) :");
console.log(` git diff -- ${path.relative(CWD, contentFile)}`); console.log(` git diff -- ${path.relative(CWD, contentFile)}`);
console.log( console.log(
@@ -730,4 +839,4 @@ async function main() {
main().catch((e) => { main().catch((e) => {
console.error("💥", e?.message || e); console.error("💥", e?.message || e);
process.exit(1); process.exit(1);
}); });

72
scripts/audit-docx-source.py Executable file
View File

@@ -0,0 +1,72 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import sys
import unicodedata
import xml.etree.ElementTree as ET
from zipfile import ZipFile
NS = {"w": "http://schemas.openxmlformats.org/wordprocessingml/2006/main"}
FORBIDDEN = [
"coviabilité",
"sacroinstitutionnelle",
"technologistique",
"scripturonormative",
"textesrepères",
"ellemême",
"opérateur de darchicration",
"systèmes plusieurs statuts",
"celle-ci se donne à voir",
"Pour autant il serait",
"Telles peuvent être le cas de",
"la co-viabilité devient ,",
]
def norm(s: str) -> str:
return unicodedata.normalize("NFC", s or "")
def main() -> int:
parser = argparse.ArgumentParser(description="Audit simple dun DOCX source officiel.")
parser.add_argument("docx", help="Chemin du fichier .docx")
args = parser.parse_args()
try:
with ZipFile(args.docx) as zf:
data = zf.read("word/document.xml")
except FileNotFoundError:
print(f"ECHEC: fichier introuvable: {args.docx}", file=sys.stderr)
return 2
except KeyError:
print("ECHEC: word/document.xml introuvable dans le DOCX.", file=sys.stderr)
return 2
except Exception as e:
print(f"ECHEC: impossible douvrir le DOCX: {e}", file=sys.stderr)
return 2
root = ET.fromstring(data)
found = False
for i, p in enumerate(root.findall(".//w:p", NS), start=1):
txt = "".join(t.text or "" for t in p.findall(".//w:t", NS))
txt_n = norm(txt)
hits = [needle for needle in FORBIDDEN if needle in txt_n]
if hits:
found = True
print(f"\n[paragraphe {i}]")
print("Hits :", ", ".join(hits))
print(txt_n)
if found:
print("\nECHEC: formes interdites encore présentes dans le DOCX.")
return 1
print("OK: aucune forme interdite trouvée dans le DOCX.")
return 0
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -1,28 +1,106 @@
#!/usr/bin/env node
// scripts/build-annotations-index.mjs // scripts/build-annotations-index.mjs
// Construit dist/annotations-index.json à partir de src/annotations/**/*.yml
// Supporte:
// - monolith : src/annotations/<pageKey>.yml
// - shard : src/annotations/<pageKey>/<paraId>.yml (paraId = p-<n>-...)
// Invariants:
// - doc.schema === 1
// - doc.page (si présent) == pageKey déduit du chemin
// - shard: doc.paras doit contenir EXACTEMENT la clé paraId (sinon fail)
//
// Deep-merge non destructif (media/refs/comments dédupliqués), tri stable.
import fs from "node:fs/promises"; import fs from "node:fs/promises";
import path from "node:path"; import path from "node:path";
import YAML from "yaml"; import YAML from "yaml";
function parseArgs(argv) { const ROOT = process.cwd();
const out = { const ANNO_ROOT = path.join(ROOT, "src", "annotations");
inDir: "src/annotations", const DIST_DIR = path.join(ROOT, "dist");
outFile: "dist/annotations-index.json", const OUT = path.join(DIST_DIR, "annotations-index.json");
};
for (let i = 0; i < argv.length; i++) { function assert(cond, msg) {
const a = argv[i]; if (!cond) throw new Error(msg);
}
if (a === "--in" && argv[i + 1]) out.inDir = argv[++i]; function isObj(x) {
else if (a.startsWith("--in=")) out.inDir = a.slice("--in=".length); return !!x && typeof x === "object" && !Array.isArray(x);
}
function isArr(x) {
return Array.isArray(x);
}
if (a === "--out" && argv[i + 1]) out.outFile = argv[++i]; function normPath(s) {
else if (a.startsWith("--out=")) out.outFile = a.slice("--out=".length); return String(s || "")
.replace(/\\/g, "/")
.replace(/^\/+|\/+$/g, "");
}
function paraNum(pid) {
const m = String(pid).match(/^p-(\d+)-/i);
return m ? Number(m[1]) : Number.POSITIVE_INFINITY;
}
function stableSortByTs(arr) {
if (!Array.isArray(arr)) return;
arr.sort((a, b) => {
const ta = Date.parse(a?.ts || "") || 0;
const tb = Date.parse(b?.ts || "") || 0;
if (ta !== tb) return ta - tb;
return JSON.stringify(a).localeCompare(JSON.stringify(b));
});
}
function keyMedia(x) { return String(x?.src || ""); }
function keyRef(x) {
return `${x?.url || ""}||${x?.label || ""}||${x?.kind || ""}||${x?.citation || ""}`;
}
function keyComment(x) { return String(x?.text || "").trim(); }
function uniqUnion(dst, src, keyFn) {
const out = isArr(dst) ? [...dst] : [];
const seen = new Set(out.map((x) => keyFn(x)));
for (const it of (isArr(src) ? src : [])) {
const k = keyFn(it);
if (!k) continue;
if (!seen.has(k)) {
seen.add(k);
out.push(it);
}
} }
return out; return out;
} }
async function exists(p) { function deepMergeEntry(dst, src) {
try { await fs.access(p); return true; } catch { return false; } if (!isObj(dst) || !isObj(src)) return;
for (const [k, v] of Object.entries(src)) {
if (k === "media" && isArr(v)) { dst.media = uniqUnion(dst.media, v, keyMedia); continue; }
if (k === "refs" && isArr(v)) { dst.refs = uniqUnion(dst.refs, v, keyRef); continue; }
if (k === "comments_editorial" && isArr(v)) { dst.comments_editorial = uniqUnion(dst.comments_editorial, v, keyComment); continue; }
if (isObj(v)) {
if (!isObj(dst[k])) dst[k] = {};
deepMergeEntry(dst[k], v);
continue;
}
if (isArr(v)) {
const cur = isArr(dst[k]) ? dst[k] : [];
const seen = new Set(cur.map((x) => JSON.stringify(x)));
const out = [...cur];
for (const it of v) {
const s = JSON.stringify(it);
if (!seen.has(s)) { seen.add(s); out.push(it); }
}
dst[k] = out;
continue;
}
// scalar: set only if missing/empty
if (!(k in dst) || dst[k] == null || dst[k] === "") dst[k] = v;
}
} }
async function walk(dir) { async function walk(dir) {
@@ -30,111 +108,116 @@ async function walk(dir) {
const ents = await fs.readdir(dir, { withFileTypes: true }); const ents = await fs.readdir(dir, { withFileTypes: true });
for (const e of ents) { for (const e of ents) {
const p = path.join(dir, e.name); const p = path.join(dir, e.name);
if (e.isDirectory()) out.push(...(await walk(p))); if (e.isDirectory()) out.push(...await walk(p));
else out.push(p); else if (e.isFile() && /\.ya?ml$/i.test(e.name)) out.push(p);
} }
return out; return out;
} }
function inferPageKeyFromFile(inDirAbs, fileAbs) { function inferExpectedFromRel(relNoExt) {
// src/annotations/<page>.yml -> "<page>" const parts = relNoExt.split("/").filter(Boolean);
const rel = path.relative(inDirAbs, fileAbs).replace(/\\/g, "/"); const last = parts.at(-1) || "";
return rel.replace(/\.(ya?ml|json)$/i, ""); const isShard = parts.length > 1 && /^p-\d+-/i.test(last); // ✅ durcissement
const pageKey = isShard ? parts.slice(0, -1).join("/") : relNoExt;
const paraId = isShard ? last : null;
return { isShard, pageKey, paraId };
} }
function assert(cond, msg) { function validateAndNormalizeDoc(doc, relFile, expectedPageKey, expectedParaId) {
if (!cond) throw new Error(msg); assert(isObj(doc), `${relFile}: doc must be an object`);
} assert(doc.schema === 1, `${relFile}: schema must be 1`);
assert(isObj(doc.paras), `${relFile}: missing object key "paras"`);
function isPlainObject(x) { const gotPage = doc.page != null ? normPath(doc.page) : "";
return !!x && typeof x === "object" && !Array.isArray(x); const expPage = normPath(expectedPageKey);
}
function normalizePageKey(s) { if (gotPage) {
// pas de / en tête/fin
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
}
function validateAndNormalizeDoc(doc, pageKey, fileRel) {
assert(isPlainObject(doc), `${fileRel}: document must be an object`);
assert(doc.schema === 1, `${fileRel}: schema must be 1`);
if (doc.page != null) {
assert( assert(
normalizePageKey(doc.page) === pageKey, gotPage === expPage,
`${fileRel}: page mismatch (page="${doc.page}" vs path="${pageKey}")` `${relFile}: page mismatch (page="${doc.page}" vs path="${expectedPageKey}")`
);
} else {
doc.page = expPage;
}
if (expectedParaId) {
const keys = Object.keys(doc.paras || {}).map(String);
assert(
keys.includes(expectedParaId),
`${relFile}: shard mismatch: must contain paras["${expectedParaId}"]`
);
assert(
keys.length === 1 && keys[0] === expectedParaId,
`${relFile}: shard invariant violated: shard file must contain ONLY paras["${expectedParaId}"] (got: ${keys.join(", ")})`
); );
} }
assert(isPlainObject(doc.paras), `${fileRel}: missing object key "paras"`);
const parasOut = Object.create(null); return doc;
for (const [paraId, entry] of Object.entries(doc.paras)) {
assert(/^p-\d+-/i.test(paraId), `${fileRel}: invalid para id "${paraId}"`);
// entry peut être vide, mais doit être un objet si présent
assert(entry == null || isPlainObject(entry), `${fileRel}: paras.${paraId} must be an object`);
const e = entry ? { ...entry } : {};
// Sanity checks (non destructifs : on nécrase pas, on vérifie juste les types)
if (e.refs != null) assert(Array.isArray(e.refs), `${fileRel}: paras.${paraId}.refs must be an array`);
if (e.authors != null) assert(Array.isArray(e.authors), `${fileRel}: paras.${paraId}.authors must be an array`);
if (e.quotes != null) assert(Array.isArray(e.quotes), `${fileRel}: paras.${paraId}.quotes must be an array`);
if (e.media != null) assert(Array.isArray(e.media), `${fileRel}: paras.${paraId}.media must be an array`);
if (e.comments_editorial != null) assert(Array.isArray(e.comments_editorial), `${fileRel}: paras.${paraId}.comments_editorial must be an array`);
parasOut[paraId] = e;
}
return parasOut;
}
async function readDoc(fileAbs) {
const raw = await fs.readFile(fileAbs, "utf8");
if (/\.json$/i.test(fileAbs)) return JSON.parse(raw);
return YAML.parse(raw);
} }
async function main() { async function main() {
const { inDir, outFile } = parseArgs(process.argv.slice(2)); const pages = {};
const CWD = process.cwd(); const errors = [];
const inDirAbs = path.isAbsolute(inDir) ? inDir : path.join(CWD, inDir); await fs.mkdir(DIST_DIR, { recursive: true });
const outAbs = path.isAbsolute(outFile) ? outFile : path.join(CWD, outFile);
// antifragile const files = await walk(ANNO_ROOT);
if (!(await exists(inDirAbs))) {
console.log(` annotations-index: skip (input missing): ${inDir}`);
process.exit(0);
}
const files = (await walk(inDirAbs)).filter((p) => /\.(ya?ml|json)$/i.test(p)); for (const fp of files) {
if (!files.length) { const rel = normPath(path.relative(ANNO_ROOT, fp));
console.log(` annotations-index: skip (no .yml/.yaml/.json found in): ${inDir}`); const relNoExt = rel.replace(/\.ya?ml$/i, "");
process.exit(0); const { isShard, pageKey, paraId } = inferExpectedFromRel(relNoExt);
}
const pages = Object.create(null);
let paraCount = 0;
for (const f of files) {
const fileRel = path.relative(CWD, f).replace(/\\/g, "/");
const pageKey = normalizePageKey(inferPageKeyFromFile(inDirAbs, f));
assert(pageKey, `${fileRel}: cannot infer page key`);
let doc;
try { try {
doc = await readDoc(f); const raw = await fs.readFile(fp, "utf8");
const doc = YAML.parse(raw) || {};
if (!isObj(doc) || doc.schema !== 1) continue;
validateAndNormalizeDoc(
doc,
`src/annotations/${rel}`,
pageKey,
isShard ? paraId : null
);
const pg = (pages[pageKey] ??= { paras: {} });
if (isShard) {
const entry = doc.paras[paraId];
if (!isObj(pg.paras[paraId])) pg.paras[paraId] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[paraId], entry);
stableSortByTs(pg.paras[paraId].media);
stableSortByTs(pg.paras[paraId].refs);
stableSortByTs(pg.paras[paraId].comments_editorial);
} else {
for (const [pid, entry] of Object.entries(doc.paras || {})) {
const p = String(pid);
if (!isObj(pg.paras[p])) pg.paras[p] = {};
if (isObj(entry)) deepMergeEntry(pg.paras[p], entry);
stableSortByTs(pg.paras[p].media);
stableSortByTs(pg.paras[p].refs);
stableSortByTs(pg.paras[p].comments_editorial);
}
}
} catch (e) { } catch (e) {
throw new Error(`${fileRel}: parse failed: ${String(e?.message ?? e)}`); errors.push({ file: `src/annotations/${rel}`, error: String(e?.message || e) });
} }
}
const paras = validateAndNormalizeDoc(doc, pageKey, fileRel); for (const [pageKey, pg] of Object.entries(pages)) {
const keys = Object.keys(pg.paras || {});
// 1 fichier = 1 page (canon) keys.sort((a, b) => {
assert(!pages[pageKey], `${fileRel}: duplicate page "${pageKey}" (only one file per page)`); const ia = paraNum(a);
pages[pageKey] = { paras }; const ib = paraNum(b);
paraCount += Object.keys(paras).length; if (Number.isFinite(ia) && Number.isFinite(ib) && ia !== ib) return ia - ib;
return String(a).localeCompare(String(b));
});
const next = {};
for (const k of keys) next[k] = pg.paras[k];
pg.paras = next;
} }
const out = { const out = {
@@ -143,17 +226,21 @@ async function main() {
pages, pages,
stats: { stats: {
pages: Object.keys(pages).length, pages: Object.keys(pages).length,
paras: paraCount, paras: Object.values(pages).reduce((n, p) => n + Object.keys(p.paras || {}).length, 0),
errors: errors.length,
}, },
errors,
}; };
await fs.mkdir(path.dirname(outAbs), { recursive: true }); if (errors.length) {
await fs.writeFile(outAbs, JSON.stringify(out), "utf8"); throw new Error(`${errors[0].file}: ${errors[0].error}`);
}
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> ${path.relative(CWD, outAbs)}`); await fs.writeFile(OUT, JSON.stringify(out), "utf8");
console.log(`✅ annotations-index: pages=${out.stats.pages} paras=${out.stats.paras} -> dist/annotations-index.json`);
} }
main().catch((e) => { main().catch((e) => {
console.error("FAIL: build-annotations-index crashed:", e); console.error(`FAIL: build-annotations-index crashed: ${e?.stack || e?.message || e}`);
process.exit(1); process.exit(1);
}); });

View File

@@ -74,7 +74,24 @@ function loadAllowMissing() {
return new Set(arr.map(String)); return new Set(arr.map(String));
} }
function loadAcceptedResets() {
const p = path.resolve("config/anchor-churn-allowlist.json");
if (!fssync.existsSync(p)) return {};
const raw = fssync.readFileSync(p, "utf8").trim();
if (!raw) return {};
const data = JSON.parse(raw);
if (!data || typeof data !== "object" || Array.isArray(data)) {
throw new Error("anchor-churn-allowlist.json must be an object");
}
const accepted = data.accepted_resets || {};
if (!accepted || typeof accepted !== "object" || Array.isArray(accepted)) {
throw new Error("anchor-churn-allowlist.json: accepted_resets must be an object");
}
return accepted;
}
const ALLOW_MISSING = loadAllowMissing(); const ALLOW_MISSING = loadAllowMissing();
const ACCEPTED_RESETS = loadAcceptedResets();
async function buildSnapshot() { async function buildSnapshot() {
const absDist = path.resolve(DIST_DIR); const absDist = path.resolve(DIST_DIR);
@@ -139,6 +156,7 @@ function diffPage(prevIds, curIds) {
let failed = false; let failed = false;
let changedPages = 0; let changedPages = 0;
let acceptedPages = 0;
for (const p of pages) { for (const p of pages) {
const prevIds = base[p] || null; const prevIds = base[p] || null;
@@ -172,6 +190,7 @@ function diffPage(prevIds, curIds) {
const prevN = prevIds.length || 1; const prevN = prevIds.length || 1;
const churn = (added.length + removed.length) / prevN; const churn = (added.length + removed.length) / prevN;
const removedRatio = removed.length / prevN; const removedRatio = removed.length / prevN;
const acceptedReason = ACCEPTED_RESETS[p] || null;
console.log( console.log(
`~ ${p} prev=${prevIds.length} now=${curIds.length}` + `~ ${p} prev=${prevIds.length} now=${curIds.length}` +
@@ -182,11 +201,23 @@ function diffPage(prevIds, curIds) {
console.log(` removed: ${removed.slice(0, 20).join(", ")}${removed.length > 20 ? " …" : ""}`); console.log(` removed: ${removed.slice(0, 20).join(", ")}${removed.length > 20 ? " …" : ""}`);
} }
if (prevIds.length >= MIN_PREV && churn > THRESHOLD) failed = true; const exceeds =
if (prevIds.length >= MIN_PREV && removedRatio > THRESHOLD) failed = true; (prevIds.length >= MIN_PREV && churn > THRESHOLD) ||
(prevIds.length >= MIN_PREV && removedRatio > THRESHOLD);
if (exceeds && acceptedReason) {
acceptedPages += 1;
console.log(` ✅ accepted reset: ${acceptedReason}`);
continue;
}
if (exceeds) failed = true;
} }
console.log(`\nSummary: pages compared=${pages.length}, pages changed=${changedPages}`); console.log(
`\nSummary: pages compared=${pages.length}, pages changed=${changedPages}, accepted resets=${acceptedPages}`
);
if (failed) { if (failed) {
console.error(`FAIL: anchor churn above threshold (threshold=${pct(THRESHOLD)} minPrev=${MIN_PREV})`); console.error(`FAIL: anchor churn above threshold (threshold=${pct(THRESHOLD)} minPrev=${MIN_PREV})`);
process.exit(1); process.exit(1);

View File

@@ -48,6 +48,9 @@ async function main() {
let missing = 0; let missing = 0;
const notes = []; const notes = [];
// Optim: éviter de vérifier 100 fois le même fichier media
const seenMedia = new Set(); // src string
for (const f of files) { for (const f of files) {
const rel = path.relative(CWD, f).replace(/\\/g, "/"); const rel = path.relative(CWD, f).replace(/\\/g, "/");
const raw = await fs.readFile(f, "utf8"); const raw = await fs.readFile(f, "utf8");
@@ -70,6 +73,10 @@ async function main() {
const src = String(m?.src || ""); const src = String(m?.src || "");
if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures if (!src.startsWith("/media/")) continue; // externes ok, ou autres conventions futures
// dédupe
if (seenMedia.has(src)) continue;
seenMedia.add(src);
checked++; checked++;
const p = toPublicPathFromUrl(src); const p = toPublicPathFromUrl(src);
if (!p) continue; if (!p) continue;
@@ -94,4 +101,4 @@ async function main() {
main().catch((e) => { main().catch((e) => {
console.error("FAIL: check-annotations-media crashed:", e); console.error("FAIL: check-annotations-media crashed:", e);
process.exit(1); process.exit(1);
}); });

View File

@@ -27,11 +27,6 @@ function escRe(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
} }
function inferPageKeyFromFile(fileAbs) {
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
return rel.replace(/\.(ya?ml|json)$/i, "");
}
function normalizePageKey(s) { function normalizePageKey(s) {
return String(s || "").replace(/^\/+/, "").replace(/\/+$/, ""); return String(s || "").replace(/^\/+/, "").replace(/\/+$/, "");
} }
@@ -40,6 +35,31 @@ function isPlainObject(x) {
return !!x && typeof x === "object" && !Array.isArray(x); return !!x && typeof x === "object" && !Array.isArray(x);
} }
function isParaId(s) {
return /^p-\d+-/i.test(String(s || ""));
}
/**
* Supporte:
* - monolith: src/annotations/<pageKey>.yml -> pageKey = rel sans ext
* - shard : src/annotations/<pageKey>/<paraId>.yml -> pageKey = dirname(rel), paraId = basename
*
* shard seulement si le fichier est dans un sous-dossier (anti cas pathologique).
*/
function inferFromFile(fileAbs) {
const rel = path.relative(ANNO_DIR, fileAbs).replace(/\\/g, "/");
const relNoExt = rel.replace(/\.(ya?ml|json)$/i, "");
const parts = relNoExt.split("/").filter(Boolean);
const base = parts[parts.length - 1] || "";
const dirParts = parts.slice(0, -1);
const isShard = dirParts.length > 0 && isParaId(base);
const pageKey = isShard ? dirParts.join("/") : relNoExt;
const paraId = isShard ? base : "";
return { pageKey: normalizePageKey(pageKey), paraId };
}
async function loadAliases() { async function loadAliases() {
if (!(await exists(ALIASES_PATH))) return {}; if (!(await exists(ALIASES_PATH))) return {};
try { try {
@@ -60,10 +80,12 @@ function getAlias(aliases, pageKey, oldId) {
// supporte: // supporte:
// 1) { "<pageKey>": { "<old>": "<new>" } } // 1) { "<pageKey>": { "<old>": "<new>" } }
// 2) { "<old>": "<new>" } // 2) { "<old>": "<new>" }
const a1 = aliases?.[pageKey]?.[oldId]; const k1 = String(pageKey || "");
if (a1) return a1; const k2 = k1 ? ("/" + k1.replace(/^\/+|\/+$/g, "") + "/") : "";
const a1 = (aliases?.[k1]?.[oldId]) || (k2 ? aliases?.[k2]?.[oldId] : "");
if (a1) return String(a1);
const a2 = aliases?.[oldId]; const a2 = aliases?.[oldId];
if (a2) return a2; if (a2) return String(a2);
return ""; return "";
} }
@@ -81,7 +103,11 @@ async function main() {
const aliases = await loadAliases(); const aliases = await loadAliases();
const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p)); const files = (await walk(ANNO_DIR)).filter((p) => /\.(ya?ml|json)$/i.test(p));
let pages = 0; // perf: cache HTML par page (shards = beaucoup de fichiers pour 1 page)
const htmlCache = new Map(); // pageKey -> html
const missingDistPage = new Set(); // pageKey
let pagesSeen = new Set();
let checked = 0; let checked = 0;
let failures = 0; let failures = 0;
const notes = []; const notes = [];
@@ -105,7 +131,7 @@ async function main() {
continue; continue;
} }
const pageKey = normalizePageKey(inferPageKeyFromFile(f)); const { pageKey, paraId: shardParaId } = inferFromFile(f);
if (doc.page != null && normalizePageKey(doc.page) !== pageKey) { if (doc.page != null && normalizePageKey(doc.page) !== pageKey) {
failures++; failures++;
@@ -119,20 +145,44 @@ async function main() {
continue; continue;
} }
// shard invariant (fort) : doit contenir paras[paraId]
if (shardParaId) {
if (!Object.prototype.hasOwnProperty.call(doc.paras, shardParaId)) {
failures++;
notes.push(`- SHARD MISMATCH: ${rel} (expected paras["${shardParaId}"] present)`);
continue;
}
// si extras -> warning (non destructif)
const keys = Object.keys(doc.paras);
if (!(keys.length === 1 && keys[0] === shardParaId)) {
notes.push(`- WARN shard has extra paras: ${rel} (expected only "${shardParaId}", got ${keys.join(", ")})`);
}
}
pagesSeen.add(pageKey);
const distFile = path.join(DIST_DIR, pageKey, "index.html"); const distFile = path.join(DIST_DIR, pageKey, "index.html");
if (!(await exists(distFile))) { if (!(await exists(distFile))) {
failures++; if (!missingDistPage.has(pageKey)) {
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`); missingDistPage.add(pageKey);
failures++;
notes.push(`- MISSING PAGE: dist/${pageKey}/index.html (from ${rel})`);
} else {
notes.push(`- WARN missing page already reported: dist/${pageKey}/index.html (from ${rel})`);
}
continue; continue;
} }
pages++; let html = htmlCache.get(pageKey);
const html = await fs.readFile(distFile, "utf8"); if (!html) {
html = await fs.readFile(distFile, "utf8");
htmlCache.set(pageKey, html);
}
for (const paraId of Object.keys(doc.paras)) { for (const paraId of Object.keys(doc.paras)) {
checked++; checked++;
if (!/^p-\d+-/i.test(paraId)) { if (!isParaId(paraId)) {
failures++; failures++;
notes.push(`- INVALID ID: ${rel} (${paraId})`); notes.push(`- INVALID ID: ${rel} (${paraId})`);
continue; continue;
@@ -156,6 +206,7 @@ async function main() {
} }
const warns = notes.filter((x) => x.startsWith("- WARN")); const warns = notes.filter((x) => x.startsWith("- WARN"));
const pages = pagesSeen.size;
if (failures > 0) { if (failures > 0) {
console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`); console.error(`FAIL: annotations invalid (pages=${pages} checked=${checked} failures=${failures})`);
@@ -170,4 +221,4 @@ async function main() {
main().catch((e) => { main().catch((e) => {
console.error("FAIL: annotations check crashed:", e); console.error("FAIL: annotations check crashed:", e);
process.exit(1); process.exit(1);
}); });

241
scripts/convert_docx_to_mdx.py Executable file
View File

@@ -0,0 +1,241 @@
#!/usr/bin/env python3
import argparse
import os
import re
import shutil
import subprocess
import sys
from pathlib import Path
try:
import yaml
except ImportError:
print("Erreur : PyYAML n'est pas installé. Lance : pip3 install pyyaml")
sys.exit(1)
EDITION = "archicrat-ia"
STATUS = "essai_these"
VERSION = "0.1.0"
ORDER_MAP = {
"prologue": 10,
"chapitre-1": 20,
"chapitre-2": 30,
"chapitre-3": 40,
"chapitre-4": 50,
"chapitre-5": 60,
"conclusion": 70,
}
TITLE_MAP = {
"prologue": "Prologue — Fondation, finalité sociopolitique et historique",
"chapitre-1": "Chapitre 1 — Fondements épistémologiques et modélisation",
"chapitre-2": "Chapitre 2 — Archéogenèse des régimes de co-viabilité",
"chapitre-3": "Chapitre 3 — Philosophies du pouvoir et archicration",
"chapitre-4": "Chapitre 4 — Histoire archicratique des révolutions industrielles",
"chapitre-5": "Chapitre 5 — Tensions, co-viabilités et régulations",
"conclusion": "Conclusion — ArchiCraT-IA",
}
def slugify_name(path: Path) -> str:
stem = path.stem.lower().strip()
replacements = {
" ": "-",
"_": "-",
"": "-",
"": "-",
"é": "e",
"è": "e",
"ê": "e",
"ë": "e",
"à": "a",
"â": "a",
"ä": "a",
"î": "i",
"ï": "i",
"ô": "o",
"ö": "o",
"ù": "u",
"û": "u",
"ü": "u",
"ç": "c",
"'": "",
"": "",
}
for old, new in replacements.items():
stem = stem.replace(old, new)
stem = re.sub(r"-+", "-", stem).strip("-")
# normalisations spécifiques
stem = stem.replace("chapitre-1-fondements-epistemologiques-et-modelisation-archicratie-version-officielle-revise", "chapitre-1")
stem = stem.replace("chapitre-2", "chapitre-2")
stem = stem.replace("chapitre-3", "chapitre-3")
stem = stem.replace("chapitre-4", "chapitre-4")
stem = stem.replace("chapitre-5", "chapitre-5")
if "prologue" in stem:
return "prologue"
if "chapitre-1" in stem:
return "chapitre-1"
if "chapitre-2" in stem:
return "chapitre-2"
if "chapitre-3" in stem:
return "chapitre-3"
if "chapitre-4" in stem:
return "chapitre-4"
if "chapitre-5" in stem:
return "chapitre-5"
if "conclusion" in stem:
return "conclusion"
return stem
def extract_title_from_markdown(md_text: str) -> str | None:
for line in md_text.splitlines():
line = line.strip()
if not line:
continue
if line.startswith("# "):
return line[2:].strip()
return None
def remove_first_h1(md_text: str) -> str:
lines = md_text.splitlines()
out = []
removed = False
for line in lines:
if not removed and line.strip().startswith("# "):
removed = True
continue
out.append(line)
text = "\n".join(out).lstrip()
return text
def clean_markdown(md_text: str) -> str:
text = md_text.replace("\r\n", "\n").replace("\r", "\n")
# nettoyer espaces multiples
text = re.sub(r"\n{3,}", "\n\n", text)
# supprimer éventuels signets/artefacts de liens internes Pandoc
text = re.sub(r"\[\]\(#.*?\)", "", text)
# convertir astérismes parasites
text = re.sub(r"[ \t]+$", "", text, flags=re.MULTILINE)
return text.strip() + "\n"
def compute_level(slug: str) -> int:
if slug == "prologue":
return 1
if slug.startswith("chapitre-"):
return 1
if slug == "conclusion":
return 1
return 1
def convert_one_file(input_docx: Path, output_dir: Path, source_root: Path):
slug = slugify_name(input_docx)
output_mdx = output_dir / f"{slug}.mdx"
cmd = [
"pandoc",
str(input_docx),
"-f",
"docx",
"-t",
"gfm+smart",
]
result = subprocess.run(cmd, check=True, capture_output=True, text=True)
md_text = result.stdout
detected_title = extract_title_from_markdown(md_text)
md_body = remove_first_h1(md_text)
md_body = clean_markdown(md_body)
title = TITLE_MAP.get(slug) or detected_title or input_docx.stem
order = ORDER_MAP.get(slug, 999)
level = compute_level(slug)
relative_source = input_docx
try:
relative_source = input_docx.relative_to(source_root)
except ValueError:
relative_source = input_docx.name
frontmatter = {
"title": title,
"edition": EDITION,
"status": STATUS,
"level": level,
"version": VERSION,
"concepts": [],
"links": [],
"order": order,
"summary": "",
"source": {
"kind": "docx",
"path": str(relative_source),
},
}
yaml_block = yaml.safe_dump(
frontmatter,
allow_unicode=True,
sort_keys=False,
default_flow_style=False,
).strip()
final_text = f"---\n{yaml_block}\n---\n{md_body if md_body.startswith(chr(10)) else chr(10) + md_body}"
output_mdx.write_text(final_text, encoding="utf-8")
print(f"{input_docx.name} -> {output_mdx.name}")
def main():
parser = argparse.ArgumentParser(description="Convertit un dossier DOCX en MDX avec frontmatter.")
parser.add_argument("input_dir", help="Dossier source contenant les DOCX")
parser.add_argument("output_dir", help="Dossier de sortie pour les MDX")
args = parser.parse_args()
input_dir = Path(args.input_dir).expanduser().resolve()
output_dir = Path(args.output_dir).expanduser().resolve()
if not shutil.which("pandoc"):
print("Erreur : pandoc n'est pas installé. Lance : brew install pandoc")
sys.exit(1)
if not input_dir.exists() or not input_dir.is_dir():
print(f"Erreur : dossier source introuvable : {input_dir}")
sys.exit(1)
output_dir.mkdir(parents=True, exist_ok=True)
docx_files = sorted(input_dir.glob("*.docx"))
if not docx_files:
print(f"Aucun DOCX trouvé dans : {input_dir}")
sys.exit(1)
for docx_file in docx_files:
convert_one_file(docx_file, output_dir, input_dir)
print()
print("Conversion DOCX -> MDX terminée.")
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,304 @@
#!/usr/bin/env python3
import argparse
import re
import shutil
import subprocess
import sys
import tempfile
from pathlib import Path
import zipfile
try:
import yaml
except ImportError:
print("Erreur : PyYAML n'est pas installé. Lance : pip3 install pyyaml")
sys.exit(1)
try:
from docx import Document
except ImportError:
print("Erreur : python-docx n'est pas installé. Lance : pip3 install python-docx")
sys.exit(1)
def split_frontmatter(text: str):
if not text.startswith("---\n"):
return {}, text
match = re.match(r"^---\n(.*?)\n---\n(.*)$", text, flags=re.DOTALL)
if not match:
return {}, text
yaml_block = match.group(1)
body = match.group(2)
try:
metadata = yaml.safe_load(yaml_block) or {}
except Exception as e:
print(f"Avertissement : frontmatter YAML illisible : {e}")
metadata = {}
return metadata, body
def strip_mdx_artifacts(text: str):
# imports / exports MDX
text = re.sub(r"^\s*(import|export)\s+.+?$", "", text, flags=re.MULTILINE)
# composants autofermants : <Component />
text = re.sub(r"<[A-Z][A-Za-z0-9._-]*\b[^>]*\/>", "", text)
# composants bloc : <Component ...>...</Component>
text = re.sub(
r"<([A-Z][A-Za-z0-9._-]*)\b[^>]*>.*?</\1>",
"",
text,
flags=re.DOTALL,
)
# accolades seules résiduelles sur ligne
text = re.sub(r"^\s*{\s*}\s*$", "", text, flags=re.MULTILINE)
# lignes vides multiples
text = re.sub(r"\n{3,}", "\n\n", text)
return text.strip() + "\n"
def inject_h1_from_title(metadata: dict, body: str):
title = metadata.get("title", "")
if not title:
return body
if re.match(r"^\s*#\s+", body):
return body
return f"# {title}\n\n{body.lstrip()}"
def find_style_by_candidates(doc, candidates):
# Cherche d'abord par nom visible
for style in doc.styles:
for candidate in candidates:
if style.name == candidate:
return style
# Puis par style_id Word interne
for style in doc.styles:
style_id = getattr(style, "style_id", "")
if style_id in {"BodyText", "Heading1", "Heading2", "Heading3", "Heading4"}:
for candidate in candidates:
if candidate in {"Body Text", "Corps de texte"} and style_id == "BodyText":
return style
if candidate in {"Heading 1", "Titre 1"} and style_id == "Heading1":
return style
if candidate in {"Heading 2", "Titre 2"} and style_id == "Heading2":
return style
if candidate in {"Heading 3", "Titre 3"} and style_id == "Heading3":
return style
if candidate in {"Heading 4", "Titre 4"} and style_id == "Heading4":
return style
return None
def strip_leading_paragraph_numbers(text: str):
"""
Supprime les numéros de paragraphe du type :
2. Texte...
11. Texte...
101. Texte...
sans toucher aux titres Markdown (#, ##, ###).
"""
fixed_lines = []
for line in text.splitlines():
stripped = line.lstrip()
# Ne jamais toucher aux titres Markdown
if stripped.startswith("#"):
fixed_lines.append(line)
continue
# Supprime un numéro de paragraphe en début de ligne
line = re.sub(r"^\s*\d+\.\s+", "", line)
fixed_lines.append(line)
return "\n".join(fixed_lines) + "\n"
def normalize_non_heading_paragraphs(docx_path: Path):
"""
Force tous les paragraphes non-titres en Body Text / Corps de texte.
On laisse intacts les Heading 1-4.
"""
doc = Document(str(docx_path))
body_style = find_style_by_candidates(doc, ["Body Text", "Corps de texte"])
if body_style is None:
print(f"Avertissement : style 'Body Text / Corps de texte' introuvable dans {docx_path.name}")
return
heading_names = {
"Heading 1", "Heading 2", "Heading 3", "Heading 4",
"Titre 1", "Titre 2", "Titre 3", "Titre 4",
}
heading_ids = {"Heading1", "Heading2", "Heading3", "Heading4"}
changed = 0
for para in doc.paragraphs:
text = para.text.strip()
if not text:
continue
current_style = para.style
current_name = current_style.name if current_style else ""
current_id = getattr(current_style, "style_id", "") if current_style else ""
if current_name in heading_names or current_id in heading_ids:
continue
# Tout le reste passe en Body Text
para.style = body_style
changed += 1
doc.save(str(docx_path))
print(f" ↳ normalisation styles : {changed} paragraphe(s) mis en 'Body Text / Corps de texte'")
def remove_word_bookmarks(docx_path: Path):
"""
Supprime les bookmarks Word (signets) du DOCX.
Ce sont eux qui apparaissent comme crochets gris dans LibreOffice/Word
quand l'affichage des signets est activé.
"""
with tempfile.TemporaryDirectory() as tmpdir:
tmpdir = Path(tmpdir)
# Dézipper le docx
with zipfile.ZipFile(docx_path, "r") as zin:
zin.extractall(tmpdir)
xml_targets = [
tmpdir / "word" / "document.xml",
tmpdir / "word" / "footnotes.xml",
tmpdir / "word" / "endnotes.xml",
tmpdir / "word" / "comments.xml",
]
removed = 0
for xml_file in xml_targets:
if not xml_file.exists():
continue
text = xml_file.read_text(encoding="utf-8")
# enlever <w:bookmarkStart .../> et <w:bookmarkEnd .../>
text, c1 = re.subn(r"<w:bookmarkStart\b[^>]*/>", "", text)
text, c2 = re.subn(r"<w:bookmarkEnd\b[^>]*/>", "", text)
removed += c1 + c2
xml_file.write_text(text, encoding="utf-8")
# Rezipper
tmp_output = docx_path.with_suffix(".cleaned.docx")
with zipfile.ZipFile(tmp_output, "w", zipfile.ZIP_DEFLATED) as zout:
for file in tmpdir.rglob("*"):
if file.is_file():
zout.write(file, file.relative_to(tmpdir))
tmp_output.replace(docx_path)
print(f" ↳ suppression signets : {removed} balise(s) supprimée(s)")
def convert_one_file(input_path: Path, output_path: Path, reference_doc: Path | None):
raw = input_path.read_text(encoding="utf-8")
metadata, body = split_frontmatter(raw)
body = strip_mdx_artifacts(body)
body = strip_leading_paragraph_numbers(body)
body = inject_h1_from_title(metadata, body)
with tempfile.NamedTemporaryFile("w", suffix=".md", delete=False, encoding="utf-8") as tmp:
tmp.write(body)
tmp_md = Path(tmp.name)
cmd = [
"pandoc",
str(tmp_md),
"-f",
"markdown",
"-o",
str(output_path),
]
if reference_doc:
cmd.extend(["--reference-doc", str(reference_doc)])
try:
subprocess.run(cmd, check=True)
finally:
try:
tmp_md.unlink()
except FileNotFoundError:
pass
normalize_non_heading_paragraphs(output_path)
remove_word_bookmarks(output_path)
def main():
parser = argparse.ArgumentParser(
description="Convertit des fichiers MDX en DOCX en conservant H1/H2/H3/H4 et en forçant le corps en Body Text."
)
parser.add_argument("input_dir", help="Dossier contenant les .mdx")
parser.add_argument(
"--output-dir",
default=str(Path.home() / "Desktop" / "archicrat-ia-docx"),
help="Dossier de sortie DOCX"
)
parser.add_argument(
"--reference-doc",
default=None,
help="DOCX modèle Word à utiliser comme reference-doc"
)
args = parser.parse_args()
input_dir = Path(args.input_dir)
output_dir = Path(args.output_dir)
reference_doc = Path(args.reference_doc) if args.reference_doc else None
if not shutil.which("pandoc"):
print("Erreur : pandoc n'est pas installé. Installe-le avec : brew install pandoc")
sys.exit(1)
if not input_dir.exists() or not input_dir.is_dir():
print(f"Erreur : dossier introuvable : {input_dir}")
sys.exit(1)
if reference_doc and not reference_doc.exists():
print(f"Erreur : reference-doc introuvable : {reference_doc}")
sys.exit(1)
output_dir.mkdir(parents=True, exist_ok=True)
mdx_files = sorted(input_dir.glob("*.mdx"))
if not mdx_files:
print(f"Aucun fichier .mdx trouvé dans : {input_dir}")
sys.exit(1)
print(f"Conversion de {len(mdx_files)} fichier(s)...")
print(f"Entrée : {input_dir}")
print(f"Sortie : {output_dir}")
if reference_doc:
print(f"Modèle : {reference_doc}")
print()
for mdx_file in mdx_files:
docx_name = mdx_file.with_suffix(".docx").name
out_file = output_dir / docx_name
print(f"{mdx_file.name} -> {docx_name}")
convert_one_file(mdx_file, out_file, reference_doc)
print()
print("✅ Conversion terminée.")
if __name__ == "__main__":
main()

132
scripts/fix-docx-source.py Executable file
View File

@@ -0,0 +1,132 @@
#!/usr/bin/env python3
from __future__ import annotations
import argparse
import shutil
import tempfile
import unicodedata
import xml.etree.ElementTree as ET
from pathlib import Path
from zipfile import ZIP_DEFLATED, ZipFile
W_NS = "http://schemas.openxmlformats.org/wordprocessingml/2006/main"
XML_NS = "http://www.w3.org/XML/1998/namespace"
NS = {"w": W_NS}
ET.register_namespace("w", W_NS)
REPLACEMENTS = {
"coviabilité": "co-viabilité",
"sacroinstitutionnelle": "sacro-institutionnelle",
"technologistique": "techno-logistique",
"scripturonormative": "scripturo-normative",
"textesrepères": "textes-repères",
"ellemême": "elle-même",
"opérateur de darchicration": "opérateur darchicration",
"systèmes plusieurs statuts": "systèmes à plusieurs statuts",
"celle-ci se donne à voir": "Celle-ci se donne à voir",
"Pour autant il serait": "Pour autant, il serait",
"Telles peuvent être le cas de": "Tels peuvent être les cas de",
}
# volontairement NON auto-corrigé : "la co-viabilité devient ,"
# ce cas demande une décision éditoriale humaine.
def qn(tag: str) -> str:
prefix, local = tag.split(":")
if prefix != "w":
raise ValueError(tag)
return f"{{{W_NS}}}{local}"
def norm(s: str) -> str:
return unicodedata.normalize("NFC", s or "")
def paragraph_text(p: ET.Element) -> str:
return "".join(t.text or "" for t in p.findall(".//w:t", NS))
def replaced_text(s: str) -> str:
out = norm(s)
for bad, good in REPLACEMENTS.items():
out = out.replace(bad, good)
return out
def rewrite_paragraph_text(p: ET.Element, new_text: str) -> None:
ppr = p.find("w:pPr", NS)
for child in list(p):
if ppr is not None and child is ppr:
continue
p.remove(child)
r = ET.Element(qn("w:r"))
t = ET.SubElement(r, qn("w:t"))
t.set(f"{{{XML_NS}}}space", "preserve")
t.text = new_text
p.append(r)
def process_document_xml(xml_path: Path) -> int:
tree = ET.parse(xml_path)
root = tree.getroot()
changed = 0
for p in root.findall(".//w:p", NS):
old = paragraph_text(p)
new = replaced_text(old)
if new != old:
rewrite_paragraph_text(p, new)
changed += 1
tree.write(xml_path, encoding="utf-8", xml_declaration=True)
return changed
def repack_docx(tmpdir: Path, out_docx: Path) -> None:
tmp_out = out_docx.with_suffix(out_docx.suffix + ".tmp")
with ZipFile(tmp_out, "w", ZIP_DEFLATED) as zf:
for p in sorted(tmpdir.rglob("*")):
if p.is_file():
zf.write(p, p.relative_to(tmpdir))
shutil.move(tmp_out, out_docx)
def main() -> int:
parser = argparse.ArgumentParser(description="Répare mécaniquement certaines scories DOCX.")
parser.add_argument("docx", help="Chemin du DOCX")
parser.add_argument("--in-place", action="store_true", help="Réécrit le DOCX en place")
args = parser.parse_args()
src = Path(args.docx)
if not src.exists():
print(f"ECHEC: fichier introuvable: {src}", file=sys.stderr)
return 2
out = src if args.in_place else src.with_name(src.stem + ".fixed.docx")
with tempfile.TemporaryDirectory(prefix="docx-fix-") as td:
td_path = Path(td)
with ZipFile(src) as zf:
zf.extractall(td_path)
document_xml = td_path / "word" / "document.xml"
if not document_xml.exists():
print("ECHEC: word/document.xml absent.", file=sys.stderr)
return 2
changed = process_document_xml(document_xml)
repack_docx(td_path, out)
print(f"OK: DOCX réparé par réécriture paragraphe/XML. Paragraphes modifiés: {changed}")
return 0
if __name__ == "__main__":
import sys
raise SystemExit(main())

View File

@@ -114,7 +114,6 @@ async function runMammoth(docxPath, assetsOutDirWebRoot) {
); );
let html = result.value || ""; let html = result.value || "";
// Mammoth gives relative src="image-xx.png" ; we will prefix later // Mammoth gives relative src="image-xx.png" ; we will prefix later
return html; return html;
} }
@@ -182,17 +181,52 @@ async function exists(p) {
try { await fs.access(p); return true; } catch { return false; } try { await fs.access(p); return true; } catch { return false; }
} }
/**
* ✅ compat:
* - ancien : collection="archicratie" + slug="archicrat-ia/chapitre-3"
* - nouveau : collection="archicrat-ia" + slug="chapitre-3"
*
* But : toujours écrire dans src/content/archicrat-ia/<slugSansPrefix>.mdx
*/
function normalizeDest(collection, slug) {
let outCollection = String(collection || "").trim();
let outSlug = String(slug || "").trim().replace(/^\/+|\/+$/g, "");
if (outCollection === "archicratie" && outSlug.startsWith("archicrat-ia/")) {
outCollection = "archicrat-ia";
outSlug = outSlug.replace(/^archicrat-ia\//, "");
}
return { outCollection, outSlug };
}
async function main() { async function main() {
const args = parseArgs(process.argv); const args = parseArgs(process.argv);
const manifestPath = path.resolve(args.manifest); const manifestPath = path.resolve(args.manifest);
const items = await readManifest(manifestPath); const items = await readManifest(manifestPath);
const selected = args.all ? items : items.filter(it => args.only.includes(it.slug)); const selected = args.all
? items
: items.filter((it) => {
const rawSlug = String(it.slug || "").trim();
const rawCollection = String(it.collection || "").trim();
const qualified = `${rawCollection}/${rawSlug}`;
return args.only.includes(rawSlug) || args.only.includes(qualified);
});
if (!args.all && selected.length !== args.only.length) { if (!args.all) {
const found = new Set(selected.map(s => s.slug)); const found = new Set(
const missing = args.only.filter(s => !found.has(s)); selected.flatMap((s) => {
throw new Error(`Some --only slugs not found in manifest: ${missing.join(", ")}`); const rawSlug = String(s.slug || "").trim();
const rawCollection = String(s.collection || "").trim();
return [rawSlug, `${rawCollection}/${rawSlug}`];
})
);
const missing = args.only.filter((s) => !found.has(s));
if (missing.length > 0) {
throw new Error(`Some --only slugs not found in manifest: ${missing.join(", ")}`);
}
} }
const pandocOk = havePandoc(); const pandocOk = havePandoc();
@@ -203,11 +237,14 @@ async function main() {
for (const it of selected) { for (const it of selected) {
const docxPath = path.resolve(it.source); const docxPath = path.resolve(it.source);
const outFile = path.resolve("src/content", it.collection, `${it.slug}.mdx`);
const { outCollection, outSlug } = normalizeDest(it.collection, it.slug);
const outFile = path.resolve("src/content", outCollection, `${outSlug}.mdx`);
const outDir = path.dirname(outFile); const outDir = path.dirname(outFile);
const assetsPublicDir = path.posix.join("/imported", it.collection, it.slug); const assetsPublicDir = path.posix.join("/imported", outCollection, outSlug);
const assetsDiskDir = path.resolve("public", "imported", it.collection, it.slug); const assetsDiskDir = path.resolve("public", "imported", outCollection, outSlug);
if (!(await exists(docxPath))) { if (!(await exists(docxPath))) {
throw new Error(`Missing source docx: ${docxPath}`); throw new Error(`Missing source docx: ${docxPath}`);
@@ -241,18 +278,35 @@ async function main() {
html = rewriteLocalImageLinks(html, assetsPublicDir); html = rewriteLocalImageLinks(html, assetsPublicDir);
body = html.trim() ? html : "<p>(Import vide)</p>"; body = html.trim() ? html : "<p>(Import vide)</p>";
} }
const defaultVersion = process.env.PUBLIC_RELEASE || "0.1.0"; const defaultVersion = process.env.PUBLIC_RELEASE || "0.1.0";
// ✅ IMPORTANT: archicrat-ia partage edition/status avec archicratie (pas de migration frontmatter)
const schemaDefaultsByCollection = { const schemaDefaultsByCollection = {
archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 }, archicratie: { edition: "archicratie", status: "modele_sociopolitique", level: 1 },
ia: { edition: "ia", status: "cas_pratique", level: 1 }, "archicrat-ia": { edition: "archicrat-ia", status: "essai_these", level: 1 },
traite: { edition: "traite", status: "ontodynamique", level: 1 }, "cas-ia": { edition: "cas-ia", status: "application", level: 1 },
glossaire: { edition: "glossaire", status: "lexique", level: 1 }, traite: { edition: "traite", status: "ontodynamique", level: 1 },
atlas: { edition: "atlas", status: "atlas", level: 1 }, glossaire: { edition: "glossaire", status: "lexique", level: 1 },
atlas: { edition: "atlas", status: "atlas", level: 1 },
}; };
const defaults = schemaDefaultsByCollection[it.collection] || { edition: it.collection, status: "draft", level: 1 }; // Compat legacy :
// manifest collection="archicratie" + slug="archicrat-ia/..."
// => on écrit bien dans src/content/archicrat-ia/...
// => mais on conserve edition/status historiques de type archicratie/modele_sociopolitique
const defaultsKey =
String(it.collection || "").trim() === "archicratie" &&
String(it.slug || "").trim().startsWith("archicrat-ia/")
? "archicratie"
: outCollection;
const defaults =
schemaDefaultsByCollection[defaultsKey] || {
edition: defaultsKey,
status: "draft",
level: 1,
};
const fm = [ const fm = [
"---", "---",
@@ -282,4 +336,4 @@ async function main() {
main().catch((e) => { main().catch((e) => {
console.error("\nERROR:", e?.message || e); console.error("\nERROR:", e?.message || e);
process.exit(1); process.exit(1);
}); });

View File

@@ -14,6 +14,24 @@ const STRICT = argv.includes("--strict") || process.env.CI === "1" || process.en
function escRe(s) { function escRe(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&"); return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
} }
async function exists(p) {
try {
await fs.access(p);
return true;
} catch {
return false;
}
}
function normalizeRoute(route) {
let r = String(route || "").trim();
if (!r.startsWith("/")) r = "/" + r;
if (!r.endsWith("/")) r = r + "/";
r = r.replace(/\/{2,}/g, "/");
return r;
}
function countIdAttr(html, id) { function countIdAttr(html, id) {
const re = new RegExp(`\\bid=(["'])${escRe(id)}\\1`, "gi"); const re = new RegExp(`\\bid=(["'])${escRe(id)}\\1`, "gi");
let c = 0; let c = 0;
@@ -22,7 +40,6 @@ function countIdAttr(html, id) {
} }
function findStartTagWithId(html, id) { function findStartTagWithId(html, id) {
// 1er élément qui porte id="..."
const re = new RegExp( const re = new RegExp(
`<([a-zA-Z0-9:-]+)\\b[^>]*\\bid=(["'])${escRe(id)}\\2[^>]*>`, `<([a-zA-Z0-9:-]+)\\b[^>]*\\bid=(["'])${escRe(id)}\\2[^>]*>`,
"i" "i"
@@ -36,34 +53,10 @@ function isInjectedAliasSpan(html, id) {
const found = findStartTagWithId(html, id); const found = findStartTagWithId(html, id);
if (!found) return false; if (!found) return false;
if (found.tagName !== "span") return false; if (found.tagName !== "span") return false;
// class="... para-alias ..."
return /\bclass=(["'])(?:(?!\1).)*\bpara-alias\b(?:(?!\1).)*\1/i.test(found.tag); return /\bclass=(["'])(?:(?!\1).)*\bpara-alias\b(?:(?!\1).)*\1/i.test(found.tag);
} }
function normalizeRoute(route) {
let r = String(route || "").trim();
if (!r.startsWith("/")) r = "/" + r;
if (!r.endsWith("/")) r = r + "/";
r = r.replace(/\/{2,}/g, "/");
return r;
}
async function exists(p) {
try {
await fs.access(p);
return true;
} catch {
return false;
}
}
function hasId(html, id) {
const re = new RegExp(`\\bid=(["'])${escRe(id)}\\1`, "i");
return re.test(html);
}
function injectBeforeId(html, newId, injectHtml) { function injectBeforeId(html, newId, injectHtml) {
// insère juste avant la balise qui porte id="newId"
const re = new RegExp( const re = new RegExp(
`(<[^>]+\\bid=(["'])${escRe(newId)}\\2[^>]*>)`, `(<[^>]+\\bid=(["'])${escRe(newId)}\\2[^>]*>)`,
"i" "i"
@@ -82,6 +75,7 @@ async function main() {
} }
const raw = await fs.readFile(ALIASES_PATH, "utf-8"); const raw = await fs.readFile(ALIASES_PATH, "utf-8");
/** @type {Record<string, Record<string,string>>} */ /** @type {Record<string, Record<string,string>>} */
let aliases; let aliases;
try { try {
@@ -89,6 +83,7 @@ async function main() {
} catch (e) { } catch (e) {
throw new Error(`JSON invalide: ${ALIASES_PATH} (${e?.message || e})`); throw new Error(`JSON invalide: ${ALIASES_PATH} (${e?.message || e})`);
} }
if (!aliases || typeof aliases !== "object" || Array.isArray(aliases)) { if (!aliases || typeof aliases !== "object" || Array.isArray(aliases)) {
throw new Error(`Format invalide: attendu { route: { oldId: newId } } dans ${ALIASES_PATH}`); throw new Error(`Format invalide: attendu { route: { oldId: newId } } dans ${ALIASES_PATH}`);
} }
@@ -114,10 +109,10 @@ async function main() {
console.log(msg); console.log(msg);
warnCount++; warnCount++;
} }
if (entries.length === 0) continue; if (entries.length === 0) continue;
const rel = route.replace(/^\/+|\/+$/g, ""); // sans slash const rel = route.replace(/^\/+|\/+$/g, "");
const htmlPath = path.join(DIST_ROOT, rel, "index.html"); const htmlPath = path.join(DIST_ROOT, rel, "index.html");
if (!(await exists(htmlPath))) { if (!(await exists(htmlPath))) {
@@ -135,24 +130,8 @@ async function main() {
if (!oldId || !newId) continue; if (!oldId || !newId) continue;
const oldCount = countIdAttr(html, oldId); const oldCount = countIdAttr(html, oldId);
if (oldCount > 0) {
// ✅ déjà injecté (idempotent)
if (isInjectedAliasSpan(html, oldId)) continue;
// ⛔️ oldId existe déjà "en vrai" (ex: <p id="oldId">) // ✅ déjà injecté => idempotent
// => alias inutile / inversé / obsolète
const found = findStartTagWithId(html, oldId);
const where = found ? `<${found.tagName} … id="${oldId}" …>` : `id="${oldId}"`;
const msg =
`⚠️ alias inutile/inversé: oldId déjà présent dans la page (${where}). ` +
`Supprime l'alias ${oldId} -> ${newId} (ou corrige le sens) pour route=${route}`;
if (STRICT) throw new Error(msg);
console.log(msg);
warnCount++;
continue;
}
// juste après avoir calculé oldCount
if (oldCount > 0 && isInjectedAliasSpan(html, oldId)) { if (oldCount > 0 && isInjectedAliasSpan(html, oldId)) {
if (STRICT && oldCount !== 1) { if (STRICT && oldCount !== 1) {
throw new Error(`oldId dupliqué (${oldCount}) alors qu'il est censé être unique: ${route} id=${oldId}`); throw new Error(`oldId dupliqué (${oldCount}) alors qu'il est censé être unique: ${route} id=${oldId}`);
@@ -160,18 +139,23 @@ async function main() {
continue; continue;
} }
// avant l'injection, après hasId(newId) // ⛔️ oldId existe déjà "en vrai" => alias inutile/inversé
const newCount = countIdAttr(html, newId); if (oldCount > 0) {
if (newCount !== 1) { const found = findStartTagWithId(html, oldId);
const msg = `⚠️ newId non-unique (${newCount}) : ${route} new=${newId} (injection ambiguë)`; const where = found ? `<${found.tagName} … id="${oldId}" …>` : `id="${oldId}"`;
const msg =
`⚠️ alias inutile/inversé: oldId déjà présent (${where}). ` +
`Supprime ${oldId} -> ${newId} (ou corrige le sens) pour route=${route}`;
if (STRICT) throw new Error(msg); if (STRICT) throw new Error(msg);
console.log(msg); console.log(msg);
warnCount++; warnCount++;
continue; continue;
} }
if (!hasId(html, newId)) { // newId doit exister UNE fois (sinon injection ambiguë)
const msg = `⚠️ newId introuvable: ${route} old=${oldId} -> new=${newId}`; const newCount = countIdAttr(html, newId);
if (newCount !== 1) {
const msg = `⚠️ newId non-unique (${newCount}) : ${route} new=${newId} (injection ambiguë)`;
if (STRICT) throw new Error(msg); if (STRICT) throw new Error(msg);
console.log(msg); console.log(msg);
warnCount++; warnCount++;

View File

@@ -0,0 +1,241 @@
#!/usr/bin/env node
import process from "node:process";
function getEnv(name, fallback = "") {
return String(process.env[name] ?? fallback).trim();
}
function sh(value) {
return JSON.stringify(String(value ?? ""));
}
function escapeRegExp(s) {
return String(s).replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
}
function pickLine(body, key) {
const re = new RegExp(`^\\s*${escapeRegExp(key)}\\s*:\\s*([^\\n\\r]+)`, "mi");
const m = String(body || "").match(re);
return m ? m[1].trim() : "";
}
function pickHeadingValue(body, headingKey) {
const re = new RegExp(
`^##\\s*${escapeRegExp(headingKey)}[^\\n]*\\n([\\s\\S]*?)(?=\\n##\\s|\\n\\s*$)`,
"mi"
);
const m = String(body || "").match(re);
if (!m) return "";
const lines = m[1].split(/\r?\n/).map((l) => l.trim());
for (const l of lines) {
if (!l) continue;
if (l.startsWith("<!--")) continue;
return l.replace(/^\/?/, "/").trim();
}
return "";
}
function normalizeChemin(chemin) {
let c = String(chemin || "").trim();
if (!c) return "";
if (!c.startsWith("/")) c = "/" + c;
if (!c.endsWith("/")) c += "/";
return c;
}
function extractCheminFromAnyUrl(text) {
const s = String(text || "");
const m = s.match(/(\/[a-z0-9\-]+\/[a-z0-9\-\/]+\/)#p-\d+-[0-9a-f]{8}/i);
return m ? m[1] : "";
}
function inferType(issue) {
const title = String(issue?.title || "");
const body = String(issue?.body || "").replace(/\r\n/g, "\n");
const fromBody = String(pickLine(body, "Type") || "").trim().toLowerCase();
if (fromBody) return fromBody;
if (title.startsWith("[Correction]")) return "type/correction";
if (title.startsWith("[Fact-check]") || title.startsWith("[Vérification]")) return "type/fact-check";
return "";
}
function inferChemin(issue) {
const title = String(issue?.title || "");
const body = String(issue?.body || "").replace(/\r\n/g, "\n");
return normalizeChemin(
pickLine(body, "Chemin") ||
pickHeadingValue(body, "Chemin") ||
extractCheminFromAnyUrl(body) ||
extractCheminFromAnyUrl(title)
);
}
function labelsOf(issue) {
return Array.isArray(issue?.labels)
? issue.labels.map((l) => String(l?.name || "")).filter(Boolean)
: [];
}
function issueNumber(issue) {
return Number(issue?.number || issue?.index || 0);
}
function parseMeta(issue) {
const labels = labelsOf(issue);
const type = inferType(issue);
const chemin = inferChemin(issue);
const number = issueNumber(issue);
const hasApproved = labels.includes("state/approved");
const hasRejected = labels.includes("state/rejected");
const isProposer = type === "type/correction" || type === "type/fact-check";
const isOpen = String(issue?.state || "open") === "open";
const isPR = Boolean(issue?.pull_request);
const eligible =
number > 0 &&
isOpen &&
!isPR &&
hasApproved &&
!hasRejected &&
isProposer &&
Boolean(chemin);
return {
issue,
number,
type,
chemin,
labels,
hasApproved,
hasRejected,
eligible,
};
}
async function fetchJson(url, token) {
const res = await fetch(url, {
headers: {
Authorization: `token ${token}`,
Accept: "application/json",
"User-Agent": "archicratie-pick-proposer-issue/1.0",
},
});
if (!res.ok) {
const t = await res.text().catch(() => "");
throw new Error(`HTTP ${res.status} ${url}\n${t}`);
}
return await res.json();
}
async function fetchIssue(apiBase, owner, repo, token, n) {
const url = `${apiBase}/api/v1/repos/${owner}/${repo}/issues/${n}`;
return await fetchJson(url, token);
}
async function listOpenIssues(apiBase, owner, repo, token) {
const out = [];
let page = 1;
const limit = 100;
while (true) {
const url = `${apiBase}/api/v1/repos/${owner}/${repo}/issues?state=open&page=${page}&limit=${limit}`;
const batch = await fetchJson(url, token);
if (!Array.isArray(batch) || batch.length === 0) break;
out.push(...batch);
if (batch.length < limit) break;
page += 1;
}
return out;
}
function emitNone(reason) {
process.stdout.write(
[
`TARGET_FOUND="0"`,
`TARGET_REASON=${sh(reason)}`,
`TARGET_PRIMARY_ISSUE=""`,
`TARGET_ISSUES=""`,
`TARGET_COUNT="0"`,
`TARGET_CHEMIN=""`,
].join("\n") + "\n"
);
}
async function main() {
const token = getEnv("FORGE_TOKEN");
const owner = getEnv("GITEA_OWNER");
const repo = getEnv("GITEA_REPO");
const apiBase = (getEnv("FORGE_API") || getEnv("FORGE_BASE")).replace(/\/+$/, "");
const explicit = Number(process.argv[2] || 0);
if (!token) throw new Error("Missing FORGE_TOKEN");
if (!owner || !repo) throw new Error("Missing GITEA_OWNER / GITEA_REPO");
if (!apiBase) throw new Error("Missing FORGE_API / FORGE_BASE");
let metas = [];
if (explicit > 0) {
const issue = await fetchIssue(apiBase, owner, repo, token, explicit);
const meta = parseMeta(issue);
if (!meta.eligible) {
emitNone(
!meta.hasApproved
? "explicit_issue_not_approved"
: meta.hasRejected
? "explicit_issue_rejected"
: !meta.type
? "explicit_issue_missing_type"
: !meta.chemin
? "explicit_issue_missing_chemin"
: "explicit_issue_not_eligible"
);
return;
}
const openIssues = await listOpenIssues(apiBase, owner, repo, token);
metas = openIssues.map(parseMeta).filter((m) => m.eligible && m.chemin === meta.chemin);
} else {
const openIssues = await listOpenIssues(apiBase, owner, repo, token);
metas = openIssues.map(parseMeta).filter((m) => m.eligible);
if (metas.length === 0) {
emitNone("no_open_approved_proposer_issue");
return;
}
metas.sort((a, b) => a.number - b.number);
const first = metas[0];
metas = metas.filter((m) => m.chemin === first.chemin);
}
metas.sort((a, b) => a.number - b.number);
if (metas.length === 0) {
emitNone("no_batch_for_path");
return;
}
const primary = metas[0];
const issues = metas.map((m) => String(m.number));
process.stdout.write(
[
`TARGET_FOUND="1"`,
`TARGET_REASON="ok"`,
`TARGET_PRIMARY_ISSUE=${sh(primary.number)}`,
`TARGET_ISSUES=${sh(issues.join(" "))}`,
`TARGET_COUNT=${sh(issues.length)}`,
`TARGET_CHEMIN=${sh(primary.chemin)}`,
].join("\n") + "\n"
);
}
main().catch((e) => {
console.error("💥 pick-proposer-issue:", e?.message || e);
process.exit(1);
});

View File

@@ -0,0 +1,31 @@
// scripts/purge-dist-dev-whoami.mjs
import fs from "node:fs/promises";
import path from "node:path";
const CWD = process.cwd();
const targetDir = path.join(CWD, "dist", "_auth", "whoami");
const targetIndex = path.join(CWD, "dist", "_auth", "whoami", "index.html");
// Purge idempotente (force=true => pas d'erreur si absent)
async function rmSafe(p) {
try {
await fs.rm(p, { recursive: true, force: true });
return true;
} catch {
return false;
}
}
async function main() {
const removedIndex = await rmSafe(targetIndex);
const removedDir = await rmSafe(targetDir);
// Optionnel: si dist/_auth devient vide, on laisse tel quel (pas besoin de toucher)
const any = removedIndex || removedDir;
console.log(`✅ purge-dist-dev-whoami: ${any ? "purged" : "nothing to purge"}`);
}
main().catch((e) => {
console.error("❌ purge-dist-dev-whoami failed:", e);
process.exit(1);
});

29
scripts/refresh-chapter2.sh Executable file
View File

@@ -0,0 +1,29 @@
#!/usr/bin/env bash
set -euo pipefail
DOCX="sources/docx/archicrat-ia/Chapitre_2Archeogenese_des_regimes_de_co-viabilite-version_officielle.docx"
MANIFEST="sources/manifest.yml"
ONLY="archicrat-ia/chapitre-2"
echo "== Audit source avant fix =="
if ! python3 scripts/audit-docx-source.py "$DOCX"; then
echo
echo "== Fix source =="
python3 scripts/fix-docx-source.py --in-place "$DOCX"
echo
echo "== Audit source après fix =="
python3 scripts/audit-docx-source.py "$DOCX"
fi
echo
echo "== Réimport =="
node scripts/import-docx.mjs --manifest "$MANIFEST" --only "$ONLY" --force
echo
echo "== Build =="
npm run build
echo
echo "== Tests =="
npm test

View File

@@ -205,7 +205,7 @@ for (const [route, mapping] of Object.entries(data)) {
newId, newId,
htmlPath, htmlPath,
msg: msg:
`oldId present but is NOT an injected alias span (<span class="para-alias">).</n` + `oldId present but is NOT an injected alias span (<span class="para-alias">).\n` +
`Saw: ${seen}`, `Saw: ${seen}`,
}); });
continue; continue;

View File

@@ -0,0 +1,26 @@
import fs from "node:fs/promises";
import path from "node:path";
const OUT = path.join(process.cwd(), "public", "_auth", "whoami");
const groupsRaw = process.env.PUBLIC_WHOAMI_GROUPS ?? "editors";
const user = process.env.PUBLIC_WHOAMI_USER ?? "dev";
const name = process.env.PUBLIC_WHOAMI_NAME ?? "Dev Local";
const email = process.env.PUBLIC_WHOAMI_EMAIL ?? "area.technik@proton.me";
const groups = groupsRaw
.split(/[;,]/)
.map((s) => s.trim())
.filter(Boolean)
.join(",");
const body =
`Remote-User: ${user}\n` +
`Remote-Name: ${name}\n` +
`Remote-Email: ${email}\n` +
`Remote-Groups: ${groups}\n`;
await fs.mkdir(path.dirname(OUT), { recursive: true });
await fs.writeFile(OUT, body, "utf8");
console.log(`✅ dev whoami written: ${path.relative(process.cwd(), OUT)} (${groups})`);

View File

@@ -0,0 +1,20 @@
import fs from "node:fs";
import path from "node:path";
const root = process.cwd();
const outDir = path.join(root, "public", "__ops");
const outFile = path.join(outDir, "health.json");
const payload = {
service: "archicratie-site",
env: process.env.PUBLIC_OPS_ENV || "unknown",
upstream: process.env.PUBLIC_OPS_UPSTREAM || "unknown",
buildSha: process.env.PUBLIC_BUILD_SHA || "unknown",
builtAt: process.env.PUBLIC_BUILD_TIME || new Date().toISOString(),
};
fs.mkdirSync(outDir, { recursive: true });
fs.writeFileSync(outFile, `${JSON.stringify(payload, null, 2)}\n`, "utf8");
console.log(`✅ ops health written: ${outFile}`);
console.log(payload);

Binary file not shown.

View File

@@ -1,161 +1,123 @@
version: 1 version: 1
docs: docs:
# =========================
# Document dentrée
# =========================
- source: sources/docx/commencer/document-de-presentation.docx
collection: commencer
slug: document-de-presentation
title: "Document de présentation"
order: 0
# ========================= # =========================
# Archicratie — Essai-thèse "ArchiCraT-IA" # Archicratie — Essai-thèse "ArchiCraT-IA"
# ========================= # =========================
- source: sources/docx/archicrat-ia/Prologue—Archicratie-fondation_et_finalite_sociopolitique_et_historique-version_officielle.docx - source: sources/docx/archicrat-ia/Prologue—Archicratie-fondation_et_finalite_sociopolitique_et_historique-version_officielle.docx
collection: archicratie collection: archicrat-ia
slug: archicrat-ia/prologue slug: prologue
title: "Prologue — Fondation et finalité sociopolitique et historique" title: "Prologue — Fondation, finalité sociopolitique et historique"
order: 10 order: 10
- source: sources/docx/archicrat-ia/Chapitre_1—Fondements_epistemologiques_et_modelisation_Archicratie-version_officielle.docx - source: sources/docx/archicrat-ia/Chapitre_1—Fondements_epistemologiques_et_modelisation_Archicratie-version_officielle.docx
collection: archicratie collection: archicrat-ia
slug: archicrat-ia/chapitre-1 slug: chapitre-1
title: "Chapitre 1 — Fondements épistémologiques et modélisation" title: "Chapitre 1 — Fondements épistémologiques et modélisation"
order: 20 order: 20
- source: sources/docx/archicrat-ia/Chapitre_2Archeogenese_des_regimes_de_co-viabilite-version_officielle.docx - source: sources/docx/archicrat-ia/Chapitre_2Archeogenese_des_regimes_de_co-viabilite-version_officielle.docx
collection: archicratie collection: archicrat-ia
slug: archicrat-ia/chapitre-2 slug: chapitre-2
title: "Chapitre 2 — Archéogenèse des régimes de co-viabilité" title: "Chapitre 2 — Archéogenèse des régimes de co-viabilité"
order: 30 order: 30
- source: sources/docx/archicrat-ia/Chapitre_3—Philosophies_du_pouvoir_et_Archicration-pour_une_topologie_differenciee_des_regimes_regulateurs-version_officielle.docx - source: sources/docx/archicrat-ia/Chapitre_3—Philosophies_du_pouvoir_et_Archicration-pour_une_topologie_differenciee_des_regimes_regulateurs-version_officielle.docx
collection: archicratie collection: archicrat-ia
slug: archicrat-ia/chapitre-3 slug: chapitre-3
title: "Chapitre 3 — Philosophies du pouvoir et archicration" title: "Chapitre 3 — Philosophies du pouvoir et archicration"
order: 40 order: 40
- source: sources/docx/archicrat-ia/Chapitre_4—Vers_une_histoire_archicratique_des_revolutions_industrielles-version_officielle.docx - source: sources/docx/archicrat-ia/Chapitre_4—Vers_une_histoire_archicratique_des_revolutions_industrielles-version_officielle.docx
collection: archicratie collection: archicrat-ia
slug: archicrat-ia/chapitre-4 slug: chapitre-4
title: "Chapitre 4 — Histoire archicratique des révolutions industrielles" title: "Chapitre 4 — Histoire archicratique des révolutions industrielles"
order: 50 order: 50
- source: sources/docx/archicrat-ia/Chapitre_5—Problematiques_des_tensions_des_co-viabilites_et_des_regulations_archicratiques-version_officielle.docx - source: sources/docx/archicrat-ia/Chapitre_5—Problematiques_des_tensions_des_co-viabilites_et_des_regulations_archicratiques-version_officielle.docx
collection: archicratie collection: archicrat-ia
slug: archicrat-ia/chapitre-5 slug: chapitre-5
title: "Chapitre 5 — Tensions, co-viabilités et régulations" title: "Chapitre 5 — Tensions, co-viabilités et régulations"
order: 60 order: 60
- source: sources/docx/archicrat-ia/Conclusion-Archicrat-IA-version_officielle.docx - source: sources/docx/archicrat-ia/Conclusion-Archicrat-IA-version_officielle.docx
collection: archicratie collection: archicrat-ia
slug: archicrat-ia/conclusion slug: conclusion
title: "Conclusion — ArchiCraT-IA" title: "Conclusion — ArchiCraT-IA"
order: 70 order: 70
# ========================= # =========================
# IA — Cas pratique (1 page = 1 chapitre) # Cas pratique — Gouvernance des systèmes IA
# NOTE: on n'inclut PAS le monolithe "Cas_IA-... .docx" dans le manifeste.
# ========================= # =========================
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Introduction_generale—Mettre_en_scene_un_systeme_IA.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Introduction.docx
collection: ia collection: cas-ia
slug: cas-pratique/introduction slug: introduction
title: "Cas pratique — Introduction générale : Mettre en scène un système IA" title: "Introduction générale Mettre un système dIA en scène"
order: 110 order: 110
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_I—Epreuve_de_detectabilite.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_1_Epreuve_de_detectabilite.docx
collection: ia collection: cas-ia
slug: cas-pratique/chapitre-1 slug: chapitre-1
title: "Cas pratique — Chapitre I : Épreuve de détectabilité" title: "Chapitre I Épreuve de détectabilité"
order: 120 order: 120
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_II—Epreuve_topologique.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_2_Epreuve_Topologique.docx
collection: ia collection: cas-ia
slug: cas-pratique/chapitre-2 slug: chapitre-2
title: "Cas pratique — Chapitre II : Épreuve topologique" title: "Chapitre II Épreuve topologique"
order: 130 order: 130
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_III—Epreuve_archeogenetique.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_3_Epreuve_archeogenetique.docx
collection: ia collection: cas-ia
slug: cas-pratique/chapitre-3 slug: chapitre-3
title: "Cas pratique — Chapitre III : Épreuve archéogénétique" title: "Chapitre III Épreuve archéogénétique"
order: 140 order: 140
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_IV—Epreuve_morphologique.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_4_Epreuve_Morphologique.docx
collection: ia collection: cas-ia
slug: cas-pratique/chapitre-4 slug: chapitre-4
title: "Cas pratique — Chapitre IV : Épreuve morphologique" title: "Chapitre IV Épreuve morphologique"
order: 150 order: 150
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_V—Epreuve_historique.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_5_Epreuve_Historique.docx
collection: ia collection: cas-ia
slug: cas-pratique/chapitre-5 slug: chapitre-5
title: "Cas pratique — Chapitre V : Épreuve historique" title: "Chapitre V Épreuve historique"
order: 160 order: 160
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_VI—Epreuve_de_co-viabilite.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_6_Epreuve_de_Co-viabilite.docx
collection: ia collection: cas-ia
slug: cas-pratique/chapitre-6 slug: chapitre-6
title: "Cas pratique — Chapitre VI : Épreuve de co-viabilité" title: "Chapitre VI Épreuve de co-viabilité"
order: 170 order: 170
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_VII—Gestes_archicratiques_concrets_pour_un_systeme_IA.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Chapitre_7_Gestes_archicratiques_concrets_pour_un_systeme_IA.docx
collection: ia collection: cas-ia
slug: cas-pratique/chapitre-7 slug: chapitre-7
title: "Cas pratique — Chapitre VII : Gestes archicratiques concrets" title: "Chapitre VII Gestes archicratiques concrets pour un système dIA"
order: 180 order: 180
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-Conclusion.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Conclusion.docx
collection: ia collection: cas-ia
slug: cas-pratique/conclusion slug: conclusion
title: "Cas pratique — Conclusion" title: "Conclusion"
order: 190 order: 190
- source: sources/docx/cas-ia/Cas_IA-Archicratie_et_gouvernance_des_systemes_IA-AnnexeGlossaire_archicratique_pour_audit_des_systemes_IA.docx - source: sources/docx/cas-ia/Cas_Pratique-Archicratie_et_gouvernance_des_systemes_IA-Annexe_Glossaire_Archicratique_Cas_IA.docx
collection: ia collection: cas-ia
slug: cas-pratique/annexe-glossaire-audit slug: annexe-glossaire-audit
title: "Cas pratique — Annexe : Glossaire archicratique pour audit des systèmes IA" title: "Annexe Glossaire archicratique pour laudit des systèmes dIA"
order: 195 order: 195
# =========================
# Traité — Ontodynamique générative (1 page = 1 chapitre)
# NOTE: on n'inclut PAS le monolithe "Traite-...-version_officielle.docx" dans le manifeste.
# =========================
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Introduction-version_officielle.docx
collection: traite
slug: ontodynamique/introduction
title: "Traité — Introduction"
order: 210
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_1—Le_flux_ontogenetique-version_officielle.docx
collection: traite
slug: ontodynamique/chapitre-1
title: "Traité — Chapitre 1 : Le flux ontogénétique"
order: 220
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_2—economie_du_reel-version_officielle.docx
collection: traite
slug: ontodynamique/chapitre-2
title: "Traité — Chapitre 2 : Économie du réel"
order: 230
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_3—Le_reel_comme_systeme_regulateur-version_officielle.docx
collection: traite
slug: ontodynamique/chapitre-3
title: "Traité — Chapitre 3 : Le réel comme système régulateur"
order: 240
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_4—Arcalite-structures_formes_invariants-version_officielle.docx
collection: traite
slug: ontodynamique/chapitre-4
title: "Traité — Chapitre 4 : Arcalité — structures, formes, invariants"
order: 250
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_5-Cratialite-forces_flux_gradients-version_officielle.docx
collection: traite
slug: ontodynamique/chapitre-5
title: "Traité — Chapitre 5 : Cratialité — forces, flux, gradients"
order: 260
- source: sources/docx/traite/Traite-Ontodynamique_Generative-Fondements_Archicratie-Chapitre_6—Archicration-version_officielle.docx
collection: traite
slug: ontodynamique/chapitre-6
title: "Traité — Chapitre 6 : Archicration"
order: 270
# ========================= # =========================
# Glossaire / Lexique # Glossaire / Lexique
# ========================= # =========================
@@ -169,4 +131,4 @@ docs:
collection: glossaire collection: glossaire
slug: mini-glossaire-verbes slug: mini-glossaire-verbes
title: "Mini-glossaire des verbes de la scène archicratique" title: "Mini-glossaire des verbes de la scène archicratique"
order: 910 order: 910

View File

@@ -1,2 +1 @@
{} {}

0
src/annotations/.gitkeep Normal file
View File

View File

@@ -1,59 +0,0 @@
schema: 1
# optionnel (si présent, doit matcher le chemin du fichier)
page: archicratie/archicrat-ia/prologue
paras:
p-0-d7974f88:
refs:
- label: "Happycratie — (Cabanas & Illouz) via Cairn"
url: "https://shs.cairn.info/revue-ethnologie-francaise-2019-4-page-813?lang=fr"
kind: "article"
- label: "Techno-féodalisme — Variations (OpenEdition)"
url: "https://journals.openedition.org/variations/2290"
kind: "article"
authors:
- "Eva Illouz"
- "Yanis Varoufakis"
quotes:
- text: "Dans Happycratie, Edgar Cabanas et Eva Illouz..."
source: "Happycratie, p.1"
- text: "En eux-mêmes, les actifs ne sont ni féodaux ni capitalistes..."
source: "Entretien Morozov/Varoufakis — techno-féodalisme"
media:
- type: "image"
src: "/public/media/archicratie/archicrat-ia/prologue/p-0-d7974f88/schema-1.svg"
caption: "Tableau explicatif"
credit: "ChatGPT"
- type: "image"
src: "/public/media/archicratie/archicrat-ia/prologue/p-0-d7974f88/schema-2.svg"
caption: "Diagramme dévolution"
credit: "Yanis Varoufakis"
comments_editorial:
- text: "TODO: nuancer / préciser — commentaire éditorial versionné (pas public)."
status: "draft"
p-1-2ef25f29:
refs:
- label: "Kafka et le pouvoir — Bernard Lahire (Cairn)"
url: "https://shs.cairn.info/franz-kafka--9782707159410-page-475?lang=fr"
kind: "book"
authors:
- "Bernard Lahire"
quotes:
- text: "Si lon voulait chercher quelque chose comme une vision du monde chez Kafka..."
source: "Bernard Lahire, Franz Kafka, p.475+"
media:
- type: "video"
src: "/media/prologue/p-1-2ef25f29/bien_commun.mp4"
caption: "Entretien avec Bernard Lahire"
credit: "Cairn.info"
comments_editorial: []

View File

@@ -1,51 +1,79 @@
--- ---
import { getCollection } from "astro:content"; import { getCollection } from "astro:content";
const { currentSlug } = Astro.props; const {
currentSlug,
collection = "archicrat-ia",
basePath = "/archicrat-ia",
label = "Table des matières"
} = Astro.props;
const entries = (await getCollection("archicratie")) const slugOf = (entry) => String(entry.id).replace(/\.(md|mdx)$/i, "");
.filter((e) => e.slug.startsWith("archicrat-ia/")) const hrefOf = (entry) => `${basePath}/${slugOf(entry)}/`;
.sort((a, b) => (a.data.order ?? 0) - (b.data.order ?? 0));
// ✅ On route lEssai-thèse sur /archicrat-ia/<slug-sans-prefix>/ const collator = new Intl.Collator("fr", { sensitivity: "base", numeric: true });
// (Astro trailingSlash = always → on garde le "/" final)
const strip = (s) => String(s || "").replace(/^archicrat-ia\//, ""); const entries = [...await getCollection(collection)].sort((a, b) => {
const href = (slug) => `/archicrat-ia/${strip(slug)}/`; const ao = Number(a.data.order ?? 9999);
const bo = Number(b.data.order ?? 9999);
if (ao !== bo) return ao - bo;
const at = String(a.data.title ?? a.data.term ?? slugOf(a));
const bt = String(b.data.title ?? b.data.term ?? slugOf(b));
return collator.compare(at, bt);
});
const tocId = `toc-global-${collection}-${String(basePath).replace(/[^\w-]+/g, "-")}`;
--- ---
<nav class="toc-global" aria-label="Table des matières — ArchiCraT-IA"> <nav
<div class="toc-global__head"> class="toc-global"
<div class="toc-global__title">Table des matières</div> aria-label={label}
</div> data-toc-global
data-toc-key={`global:${collection}:${basePath}`}
>
<button
class="toc-global__head toc-global__toggle"
type="button"
aria-expanded="true"
aria-controls={tocId}
>
<span class="toc-global__title">{label}</span>
<span class="toc-global__chevron" aria-hidden="true">▾</span>
</button>
<ol class="toc-global__list"> <div class="toc-global__body-clip" id={tocId}>
{entries.map((e) => { <div class="toc-global__body">
const active = e.slug === currentSlug; <ol class="toc-global__list">
return ( {entries.map((e) => {
<li class={`toc-item ${active ? "is-active" : ""}`}> const slug = slugOf(e);
<a class="toc-link" href={href(e.slug)} aria-current={active ? "page" : undefined}> const active = slug === currentSlug;
<span class="toc-link__row">
{active ? (
<span class="toc-active-indicator" aria-hidden="true">👉</span>
) : (
<span class="toc-active-spacer" aria-hidden="true"></span>
)}
<span class="toc-link__title">{e.data.title}</span> return (
<li class={`toc-item ${active ? "is-active" : ""}`}>
<a class="toc-link" href={hrefOf(e)} aria-current={active ? "page" : undefined}>
<span class="toc-link__row">
<span class={`toc-active-mark ${active ? "is-on" : ""}`} aria-hidden="true">
<span class="toc-active-mark__dot"></span>
</span>
{active && ( <span class="toc-link__title">{e.data.title}</span>
<span class="toc-badge" aria-label="Chapitre en cours">
En cours {active && (
<span class="toc-badge" aria-label="Chapitre en cours">
En cours
</span>
)}
</span> </span>
)}
</span>
{active && <span class="toc-underline" aria-hidden="true"></span>} {active && <span class="toc-underline" aria-hidden="true"></span>}
</a> </a>
</li> </li>
); );
})} })}
</ol> </ol>
</div>
</div>
</nav> </nav>
<style> <style>
@@ -56,7 +84,22 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
background: rgba(127,127,127,0.06); background: rgba(127,127,127,0.06);
} }
.toc-global__toggle{
width: 100%;
appearance: none;
border: 0;
background: transparent;
color: inherit;
text-align: left;
padding: 0;
cursor: pointer;
}
.toc-global__head{ .toc-global__head{
display: flex;
align-items: center;
justify-content: space-between;
gap: 10px;
margin-bottom: 10px; margin-bottom: 10px;
padding-bottom: 10px; padding-bottom: 10px;
border-bottom: 1px dashed rgba(127,127,127,0.25); border-bottom: 1px dashed rgba(127,127,127,0.25);
@@ -69,11 +112,36 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
opacity: .88; opacity: .88;
} }
.toc-global__chevron{
font-size: 12px;
opacity: .7;
transition: transform 180ms ease;
}
.toc-global__body-clip{
display: grid;
grid-template-rows: 1fr;
transition:
grid-template-rows 220ms ease,
opacity 160ms ease,
margin-top 220ms ease;
}
.toc-global__body{
min-height: 0;
overflow: hidden;
}
.toc-global__list{ .toc-global__list{
list-style: none; list-style: none;
margin: 0; margin: 0;
padding: 0; padding: 0;
max-height: 44vh;
overflow: auto;
padding-right: 8px;
scrollbar-gutter: stable;
} }
.toc-global__list li::marker{ content: ""; } .toc-global__list li::marker{ content: ""; }
.toc-item{ margin: 6px 0; } .toc-item{ margin: 6px 0; }
@@ -99,13 +167,33 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
align-items: center; align-items: center;
} }
.toc-active-indicator{ .toc-active-mark{
font-size: 14px; width: 14px;
line-height: 1; height: 14px;
display: inline-grid;
place-items: center;
border-radius: 999px;
border: 1px solid transparent;
opacity: .55;
} }
.toc-active-spacer{ .toc-active-mark__dot{
width: 14px; width: 5px;
height: 5px;
border-radius: 999px;
background: currentColor;
opacity: .65;
}
.toc-active-mark.is-on{
border-color: rgba(127,127,127,0.34);
opacity: 1;
}
.toc-active-mark.is-on .toc-active-mark__dot{
width: 6px;
height: 6px;
opacity: 1;
} }
.toc-link__title{ .toc-link__title{
@@ -143,11 +231,66 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
border-radius: 999px; border-radius: 999px;
} }
.toc-global__list{ @media (max-width: 980px){
max-height: 44vh; .toc-global{
overflow: auto; padding: 10px 12px;
padding-right: 8px; border-radius: 14px;
scrollbar-gutter: stable; }
.toc-global__head{
margin-bottom: 0;
padding-bottom: 0;
border-bottom: 0;
min-height: 28px;
}
.toc-global__title{
font-size: 13px;
}
.toc-global__body-clip{
margin-top: 10px;
}
.toc-global.is-collapsed .toc-global__body-clip{
grid-template-rows: 0fr;
opacity: 0;
margin-top: 0;
}
.toc-global__body{
min-height: 0;
overflow: hidden;
transition: opacity 180ms ease;
}
.toc-global.is-collapsed .toc-global__body{
opacity: 0;
}
.toc-global.is-collapsed .toc-global__chevron{
transform: rotate(-90deg);
}
.toc-link{
padding: 7px 9px;
border-radius: 12px;
}
.toc-link__title{
font-size: 12.5px;
line-height: 1.22;
}
.toc-badge{
font-size: 10px;
padding: 2px 7px;
}
.toc-global__list{
max-height: min(42vh, 360px);
padding-right: 4px;
}
} }
@media (prefers-color-scheme: dark){ @media (prefers-color-scheme: dark){
@@ -155,12 +298,88 @@ const href = (slug) => `/archicrat-ia/${strip(slug)}/`;
.toc-link:hover{ background: rgba(255,255,255,0.06); } .toc-link:hover{ background: rgba(255,255,255,0.06); }
.toc-item.is-active .toc-link{ background: rgba(255,255,255,0.06); } .toc-item.is-active .toc-link{ background: rgba(255,255,255,0.06); }
.toc-badge{ background: rgba(255,255,255,0.06); } .toc-badge{ background: rgba(255,255,255,0.06); }
.toc-active-mark.is-on{ border-color: rgba(255,255,255,0.22); }
} }
</style> </style>
<script is:inline> <script is:inline>
(() => { (() => {
const active = document.querySelector(".toc-global .toc-item.is-active"); function init() {
if (active) active.scrollIntoView({ block: "nearest" }); document.querySelectorAll("[data-toc-global]").forEach((nav) => {
if (nav.dataset.tocReady === "1") return;
nav.dataset.tocReady = "1";
const toggle = nav.querySelector(".toc-global__toggle");
const bodyClip = nav.querySelector(".toc-global__body-clip");
const active = nav.querySelector(".toc-item.is-active");
const mq = window.matchMedia("(max-width: 980px)");
const key = `archicratie:${nav.dataset.tocKey || "toc-global"}`;
if (!toggle || !bodyClip) return;
const read = () => {
try {
const v = localStorage.getItem(key);
if (v === "open") return true;
if (v === "closed") return false;
} catch {}
return null;
};
const write = (open) => {
try { localStorage.setItem(key, open ? "open" : "closed"); } catch {}
};
const setOpen = (open, { persist = true } = {}) => {
const isMobile = mq.matches;
nav.classList.toggle("is-collapsed", isMobile && !open);
toggle.setAttribute("aria-expanded", open ? "true" : "false");
if (persist && isMobile) write(open);
};
const initState = () => {
if (!mq.matches) {
setOpen(true, { persist: false });
if (active) active.scrollIntoView({ block: "nearest" });
return;
}
const stored = read();
const open = stored == null ? false : stored;
setOpen(open, { persist: false });
if (open && active) active.scrollIntoView({ block: "nearest" });
};
toggle.addEventListener("click", () => {
const open = toggle.getAttribute("aria-expanded") !== "true";
setOpen(open);
if (open && active) active.scrollIntoView({ block: "nearest" });
if (open) {
window.dispatchEvent(new CustomEvent("archicratie:tocGlobalOpen"));
}
});
window.addEventListener("archicratie:tocLocalOpen", () => {
if (!mq.matches) return;
setOpen(false);
});
if (mq.addEventListener) {
mq.addEventListener("change", initState);
} else if (mq.addListener) {
mq.addListener(initState);
}
initState();
});
}
if (document.readyState === "loading") {
window.addEventListener("DOMContentLoaded", init, { once: true });
} else {
init();
}
})(); })();
</script> </script>

View File

@@ -0,0 +1,505 @@
---
import {
getGlossaryEntryAsideData,
getGlossaryPortalLinks,
hrefOfGlossaryEntry,
slugOfGlossaryEntry,
} from "../lib/glossary";
const {
currentEntry,
allEntries = [],
} = Astro.props;
const currentSlug = slugOfGlossaryEntry(currentEntry);
const {
displayFamily,
displayDomain,
displayLevel,
showNoyau,
showSameFamily,
fondamentaux,
sameFamilyTitle,
sameFamilyEntries,
relationSections,
contextualTheory,
} = getGlossaryEntryAsideData(currentEntry, allEntries);
const portalLinks = getGlossaryPortalLinks();
---
<nav class="glossary-aside" aria-label="Navigation du glossaire">
<div class="glossary-aside__block glossary-aside__block--intro">
<a class="glossary-aside__back" href="/glossaire/">← Retour au glossaire</a>
<div class="glossary-aside__title">Glossaire archicratique</div>
<div class="glossary-aside__pills" aria-label="Repères de lecture">
<span class="glossary-aside__pill glossary-aside__pill--family">
{displayFamily}
</span>
{displayDomain && (
<span class="glossary-aside__pill">{displayDomain}</span>
)}
{displayLevel && (
<span class="glossary-aside__pill">{displayLevel}</span>
)}
</div>
</div>
<details class="glossary-aside__block glossary-aside__disclosure" open>
<summary class="glossary-aside__summary">
<span class="glossary-aside__heading">Portails</span>
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
</summary>
<div class="glossary-aside__panel">
<ul class="glossary-aside__list">
{portalLinks.map((item) => (
<li><a href={item.href}>{item.label}</a></li>
))}
</ul>
</div>
</details>
{showNoyau && (
<details class="glossary-aside__block glossary-aside__disclosure" open>
<summary class="glossary-aside__summary">
<span class="glossary-aside__heading">Noyau archicratique</span>
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
</summary>
<div class="glossary-aside__panel">
<ul class="glossary-aside__list">
{fondamentaux.map((entry) => {
const active = slugOfGlossaryEntry(entry) === currentSlug;
return (
<li>
<a
href={hrefOfGlossaryEntry(entry)}
aria-current={active ? "page" : undefined}
class={active ? "is-active" : undefined}
>
{entry.data.term}
</a>
</li>
);
})}
</ul>
</div>
</details>
)}
{showSameFamily && (
<details class="glossary-aside__block glossary-aside__disclosure" open>
<summary class="glossary-aside__summary">
<span class="glossary-aside__heading">{sameFamilyTitle}</span>
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
</summary>
<div class="glossary-aside__panel">
<ul class="glossary-aside__list">
{sameFamilyEntries.map((entry) => {
const active = slugOfGlossaryEntry(entry) === currentSlug;
return (
<li>
<a
href={hrefOfGlossaryEntry(entry)}
aria-current={active ? "page" : undefined}
class={active ? "is-active" : undefined}
>
{entry.data.term}
</a>
</li>
);
})}
</ul>
</div>
</details>
)}
{relationSections.length > 0 && (
<details class="glossary-aside__block glossary-aside__disclosure" open>
<summary class="glossary-aside__summary">
<span class="glossary-aside__heading">Autour de cette fiche</span>
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
</summary>
<div class="glossary-aside__panel">
{relationSections.map((section) => (
<>
<h3 class="glossary-aside__subheading">{section.title}</h3>
<ul class="glossary-aside__list">
{section.items.map((entry) => (
<li><a href={hrefOfGlossaryEntry(entry)}>{entry.data.term}</a></li>
))}
</ul>
</>
))}
</div>
</details>
)}
{contextualTheory.length > 0 && (
<details class="glossary-aside__block glossary-aside__disclosure" open>
<summary class="glossary-aside__summary">
<span class="glossary-aside__heading">Paysage théorique</span>
<span class="glossary-aside__chevron" aria-hidden="true">▾</span>
</summary>
<div class="glossary-aside__panel">
<ul class="glossary-aside__list">
{contextualTheory.map((entry) => (
<li><a href={hrefOfGlossaryEntry(entry)}>{entry.data.term}</a></li>
))}
</ul>
</div>
</details>
)}
</nav>
<style>
.glossary-aside{
display: flex;
flex-direction: column;
gap: 14px;
min-width: 0;
}
.glossary-aside__block{
border: 1px solid rgba(127,127,127,0.22);
border-radius: 16px;
padding: 14px;
background: rgba(127,127,127,0.05);
min-width: 0;
}
.glossary-aside__block--intro{
padding-top: 13px;
padding-bottom: 13px;
}
.glossary-aside__back{
display: inline-block;
margin-bottom: 10px;
font-size: 14px;
font-weight: 700;
line-height: 1.35;
text-decoration: none;
}
.glossary-aside__title{
font-size: 18px;
font-weight: 850;
letter-spacing: .1px;
line-height: 1.22;
}
.glossary-aside__pills{
display: flex;
flex-wrap: wrap;
gap: 7px;
margin-top: 10px;
}
.glossary-aside__pill{
display: inline-flex;
align-items: center;
padding: 5px 10px;
border: 1px solid rgba(127,127,127,0.24);
border-radius: 999px;
background: rgba(127,127,127,0.04);
font-size: 13px;
line-height: 1.35;
opacity: .92;
min-width: 0;
}
.glossary-aside__pill--family{
border-color: rgba(127,127,127,0.38);
font-weight: 800;
}
.glossary-aside__disclosure{
padding: 0;
overflow: hidden;
}
.glossary-aside__summary{
list-style: none;
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
padding: 14px;
cursor: pointer;
user-select: none;
}
.glossary-aside__summary::-webkit-details-marker{
display: none;
}
.glossary-aside__summary:hover{
background: rgba(127,127,127,0.035);
}
.glossary-aside__heading{
margin: 0;
font-size: 16px;
font-weight: 850;
line-height: 1.28;
opacity: .97;
}
.glossary-aside__chevron{
flex: 0 0 auto;
font-size: 14px;
line-height: 1;
opacity: .72;
transform: rotate(0deg);
transition: transform 160ms ease, opacity 160ms ease;
}
.glossary-aside__disclosure[open] .glossary-aside__chevron{
transform: rotate(180deg);
opacity: .96;
}
.glossary-aside__panel{
padding: 0 14px 14px;
}
.glossary-aside__subheading{
margin: 13px 0 8px;
font-size: 12.5px;
font-weight: 800;
line-height: 1.35;
opacity: .82;
text-transform: uppercase;
letter-spacing: .04em;
}
.glossary-aside__list{
list-style: none;
margin: 0;
padding: 0;
}
.glossary-aside__list li{
margin: 7px 0;
}
.glossary-aside__list a{
text-decoration: none;
font-size: 14px;
line-height: 1.4;
word-break: break-word;
}
.glossary-aside__list a.is-active{
font-weight: 800;
}
@media (max-width: 860px){
.glossary-aside{
gap: 10px;
}
.glossary-aside__block{
border-radius: 14px;
}
.glossary-aside__block--intro{
padding: 12px;
}
.glossary-aside__back{
margin-bottom: 8px;
font-size: 13px;
line-height: 1.28;
}
.glossary-aside__title{
font-size: 19px;
line-height: 1.18;
}
.glossary-aside__pills{
gap: 6px;
margin-top: 8px;
}
.glossary-aside__pill{
padding: 4px 9px;
font-size: 12px;
line-height: 1.26;
}
.glossary-aside__summary{
padding: 12px;
}
.glossary-aside__heading{
font-size: 17px;
line-height: 1.2;
}
.glossary-aside__panel{
padding: 0 12px 12px;
}
.glossary-aside__subheading{
margin: 10px 0 6px;
font-size: 11.5px;
line-height: 1.26;
}
.glossary-aside__list li{
margin: 5px 0;
}
.glossary-aside__list a{
font-size: 14px;
line-height: 1.34;
}
.glossary-aside__disclosure:not([open]) .glossary-aside__panel{
display: none;
}
}
@media (max-width: 860px){
.glossary-aside__disclosure{
background: rgba(127,127,127,0.045);
}
.glossary-aside__disclosure[open] .glossary-aside__summary{
border-bottom: 1px solid rgba(127,127,127,0.12);
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-aside{
gap: 8px;
}
.glossary-aside__block{
border-radius: 12px;
}
.glossary-aside__block--intro{
padding: 10px 11px;
}
.glossary-aside__back{
margin-bottom: 6px;
font-size: 12px;
line-height: 1.2;
}
.glossary-aside__title{
font-size: 16px;
line-height: 1.14;
}
.glossary-aside__pills{
gap: 5px;
margin-top: 7px;
}
.glossary-aside__pill{
padding: 3px 8px;
font-size: 11px;
line-height: 1.2;
}
.glossary-aside__summary{
padding: 10px 11px;
}
.glossary-aside__heading{
font-size: 15px;
line-height: 1.16;
}
.glossary-aside__panel{
padding: 0 11px 10px;
}
.glossary-aside__subheading{
margin: 8px 0 5px;
font-size: 11px;
line-height: 1.18;
}
.glossary-aside__list li{
margin: 4px 0;
}
.glossary-aside__list a{
font-size: 13px;
line-height: 1.28;
}
}
@media (min-width: 861px){
.glossary-aside__summary{
cursor: default;
}
.glossary-aside__chevron{
display: none;
}
}
@media (prefers-color-scheme: dark){
.glossary-aside__block,
.glossary-aside__pill{
background: rgba(255,255,255,0.04);
}
.glossary-aside__summary:hover{
background: rgba(255,255,255,0.03);
}
}
</style>
<script is:inline>
(() => {
const syncMobileDisclosure = () => {
const mobile = window.matchMedia("(max-width: 860px)").matches;
const smallLandscape = window.matchMedia(
"(orientation: landscape) and (max-width: 920px) and (max-height: 520px)"
).matches;
const compact = mobile || smallLandscape;
document
.querySelectorAll(".glossary-aside__disclosure")
.forEach((el, index) => {
if (!(el instanceof HTMLDetailsElement)) return;
if (compact) {
if (!el.dataset.mobileInit) {
el.open = index === 0;
el.dataset.mobileInit = "true";
}
} else {
el.open = true;
}
});
};
if (document.readyState === "loading") {
document.addEventListener("DOMContentLoaded", syncMobileDisclosure, { once: true });
} else {
syncMobileDisclosure();
}
window.addEventListener("resize", syncMobileDisclosure);
window.addEventListener("pageshow", syncMobileDisclosure);
})();
</script>

View File

@@ -0,0 +1,110 @@
---
import { hrefOfGlossaryEntry, type GlossaryEntry } from "../lib/glossary";
export interface Props {
entries?: GlossaryEntry[];
wide?: boolean;
}
const {
entries = [],
wide = false,
} = Astro.props;
---
<div class="glossary-cards">
{entries.map((entry) => (
<a
class:list={[
"glossary-card",
wide && "glossary-card--wide",
]}
href={hrefOfGlossaryEntry(entry)}
>
<strong>{entry.data.term}</strong>
<span>{entry.data.definitionShort}</span>
</a>
))}
</div>
<style>
.glossary-cards{
display: grid;
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
gap: 12px;
margin-top: 12px;
}
.glossary-card{
display: flex;
flex-direction: column;
gap: 7px;
padding: 13px 14px;
border: 1px solid var(--glossary-border);
border-radius: 16px;
background: var(--glossary-bg-soft);
text-decoration: none;
transition: transform 120ms ease, background 120ms ease, border-color 120ms ease;
}
.glossary-card:hover{
transform: translateY(-1px);
background: var(--glossary-bg-soft-strong);
border-color: rgba(0,217,255,0.16);
text-decoration: none;
}
.glossary-card--wide{
grid-column: 1 / -1;
}
.glossary-card strong{
color: var(--glossary-accent);
font-size: 1.02rem;
line-height: 1.24;
}
.glossary-card span{
color: inherit;
font-size: .98rem;
line-height: 1.46;
opacity: .94;
}
@media (max-width: 760px){
.glossary-cards{
grid-template-columns: 1fr;
gap: 10px;
margin-top: 10px;
}
.glossary-card{
gap: 6px;
padding: 12px 12px;
border-radius: 14px;
}
.glossary-card strong{
font-size: .98rem;
}
.glossary-card span{
font-size: .94rem;
line-height: 1.42;
}
.glossary-card--wide{
grid-column: auto;
}
}
@media (prefers-color-scheme: dark){
.glossary-card{
background: rgba(255,255,255,0.04);
}
.glossary-card:hover{
background: rgba(255,255,255,0.07);
}
}
</style>

View File

@@ -0,0 +1,16 @@
<div class="glossary-entry-body">
<slot />
</div>
<style>
.glossary-entry-body{
margin-bottom: 28px;
}
:global(.glossary-entry-body h2),
:global(.glossary-entry-body h3),
:global(.glossary-relations h2),
:global(.glossary-relations h3){
scroll-margin-top: calc(var(--sticky-offset-px, 96px) + 18px);
}
</style>

View File

@@ -0,0 +1,260 @@
---
interface Props {
term: string;
definitionShort: string;
displayFamily: string;
displayDomain?: string;
displayLevel?: string;
mobilizedAuthors?: string[];
comparisonTraditions?: string[];
}
const {
term,
definitionShort,
displayFamily,
displayDomain = "",
displayLevel = "",
mobilizedAuthors = [],
comparisonTraditions = [],
} = Astro.props;
const hasScholarlyMeta =
mobilizedAuthors.length > 0 ||
comparisonTraditions.length > 0;
---
<header class="glossary-entry-head" data-ge-hero>
<div class="glossary-entry-head__title">
<h1>{term}</h1>
</div>
<div class="glossary-entry-summary">
<p class="glossary-entry-dek">
<em>{definitionShort}</em>
</p>
<div class="glossary-entry-signals" aria-label="Repères de lecture">
<span class="glossary-pill glossary-pill--family">
<strong>Famille :</strong> {displayFamily}
</span>
{displayDomain && (
<span class="glossary-pill">
<strong>Domaine :</strong> {displayDomain}
</span>
)}
{displayLevel && (
<span class="glossary-pill">
<strong>Niveau :</strong> {displayLevel}
</span>
)}
</div>
{hasScholarlyMeta && (
<div class="glossary-entry-meta">
{mobilizedAuthors.length > 0 && (
<p>
<strong>Auteurs mobilisés :</strong> {mobilizedAuthors.join(" / ")}
</p>
)}
{comparisonTraditions.length > 0 && (
<p>
<strong>Traditions de comparaison :</strong> {comparisonTraditions.join(" / ")}
</p>
)}
</div>
)}
</div>
</header>
<style>
.glossary-entry-head{
position: sticky;
top: calc(var(--sticky-header-h, 0px) + var(--page-gap, 12px));
z-index: 11;
margin: 0 0 22px;
border: 1px solid rgba(127,127,127,0.18);
border-radius: 24px;
background:
linear-gradient(180deg, rgba(0,0,0,0.60), rgba(0,0,0,0.92)),
radial-gradient(900px 240px at 20% 0%, rgba(0,217,255,0.08), transparent 60%);
backdrop-filter: blur(10px);
-webkit-backdrop-filter: blur(10px);
overflow: hidden;
transition:
border-radius 180ms ease,
box-shadow 180ms ease,
border-color 180ms ease;
}
.glossary-entry-head__title{
padding:
var(--entry-hero-pad-top, 18px)
var(--entry-hero-pad-x, 18px)
calc(var(--entry-hero-pad-top, 18px) - 2px);
transition: padding 180ms ease;
}
.glossary-entry-head h1{
margin: 0;
font-size: var(--entry-hero-h1-size, clamp(2.2rem, 4vw, 3.15rem));
line-height: 1.02;
letter-spacing: -.04em;
font-weight: 850;
transition: font-size 180ms ease;
}
.glossary-entry-summary{
display: grid;
gap: var(--entry-hero-gap, 14px);
padding:
calc(var(--entry-hero-pad-bottom, 18px) - 2px)
var(--entry-hero-pad-x, 18px)
var(--entry-hero-pad-bottom, 18px);
border-top: 1px solid rgba(127,127,127,0.14);
background: rgba(255,255,255,0.02);
transition: gap 180ms ease, padding 180ms ease;
}
.glossary-entry-dek{
margin: 0;
max-width: var(--entry-hero-dek-maxw, 76ch);
font-size: var(--entry-hero-dek-size, 1.04rem);
line-height: var(--entry-hero-dek-lh, 1.55);
opacity: .94;
transition:
max-width 180ms ease,
font-size 180ms ease,
line-height 180ms ease;
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-line-clamp: 4;
overflow: hidden;
}
.glossary-entry-signals{
display: flex;
flex-wrap: wrap;
gap: 7px;
margin: 0;
transition: gap 180ms ease;
}
.glossary-pill{
display: inline-flex;
align-items: center;
gap: 5px;
padding: 5px 9px;
border: 1px solid rgba(127,127,127,0.24);
border-radius: 999px;
background: rgba(127,127,127,0.05);
font-size: 12.5px;
line-height: 1.28;
transition:
padding 180ms ease,
font-size 180ms ease,
background 120ms ease,
border-color 120ms ease;
}
.glossary-pill--family{
border-color: rgba(127,127,127,0.36);
font-weight: 700;
}
.glossary-entry-meta{
margin: 0;
padding: 10px 12px;
border: 1px solid rgba(127,127,127,0.18);
border-radius: 12px;
background: rgba(127,127,127,0.04);
max-height: var(--entry-hero-meta-max-h, 12rem);
opacity: var(--entry-hero-meta-opacity, 1);
overflow: hidden;
transition:
max-height 180ms ease,
opacity 140ms ease,
padding 180ms ease,
border-color 180ms ease;
}
.glossary-entry-meta p{
margin: 0;
font-size: 13.5px;
line-height: 1.45;
}
.glossary-entry-meta p + p{
margin-top: 6px;
}
@media (max-width: 860px){
.glossary-entry-head{
position: static;
border-radius: 18px;
margin-bottom: 16px;
}
.glossary-entry-head__title{
padding: 12px 12px 10px;
}
.glossary-entry-summary{
gap: 9px;
padding: 10px 12px 12px;
}
.glossary-entry-dek{
max-width: none;
-webkit-line-clamp: 3;
}
.glossary-entry-signals{
gap: 6px;
}
.glossary-pill{
font-size: 12px;
padding: 4px 8px;
}
}
@media (max-width: 520px){
.glossary-entry-head{
border-radius: 16px;
margin-bottom: 14px;
}
.glossary-entry-head__title{
padding: 10px 10px 9px;
}
.glossary-entry-summary{
gap: 8px;
padding: 9px 10px 10px;
}
.glossary-entry-dek{
-webkit-line-clamp: 2;
}
.glossary-pill{
font-size: 11.5px;
padding: 3px 7px;
}
}
@media (prefers-color-scheme: dark){
.glossary-entry-meta{
background: rgba(255,255,255,0.03);
}
.glossary-pill{
background: rgba(255,255,255,0.04);
}
}
</style>

View File

@@ -0,0 +1,31 @@
---
interface Props {
canonicalHref: string;
term: string;
}
const { canonicalHref, term } = Astro.props;
---
<p class="glossary-legacy-note">
Cette entrée a été renommée. Lintitulé canonique est :
<a href={canonicalHref}>{term}</a>.
</p>
<style>
.glossary-legacy-note{
padding: 10px 12px;
border: 1px solid rgba(127,127,127,0.22);
border-radius: 12px;
background: rgba(127,127,127,0.05);
font-size: 14px;
line-height: 1.45;
margin-bottom: 18px;
}
@media (prefers-color-scheme: dark){
.glossary-legacy-note{
background: rgba(255,255,255,0.04);
}
}
</style>

View File

@@ -0,0 +1,289 @@
<script is:inline>
(() => {
const boot = () => {
const body = document.body;
const root = document.documentElement;
const hero = document.querySelector("[data-ge-hero]");
const follow = document.getElementById("reading-follow");
const mqMobile = window.matchMedia("(max-width: 860px)");
const mqSmallLandscape = window.matchMedia(
"(orientation: landscape) and (max-width: 920px) and (max-height: 520px)"
);
if (!body || !root || !hero || !follow) return;
const BODY_CLASS = "is-glossary-entry-page";
const FOLLOW_ON_CLASS = "glossary-entry-follow-on";
let lastHeight = -1;
let lastFollowOn = null;
let raf = 0;
body.classList.add(BODY_CLASS);
const isCompactViewport = () =>
mqMobile.matches || mqSmallLandscape.matches;
const heroHeight = () =>
Math.max(0, Math.round(hero.getBoundingClientRect().height || 0));
const neutralizeGlobalFollowIfCompact = () => {
if (!isCompactViewport()) {
follow.style.display = "";
return;
}
follow.classList.remove("is-on");
follow.setAttribute("aria-hidden", "true");
follow.style.display = "none";
root.style.setProperty("--followbar-h", "0px");
};
const computeFollowOn = () =>
!isCompactViewport() &&
follow.classList.contains("is-on") &&
follow.style.display !== "none" &&
follow.getAttribute("aria-hidden") !== "true";
const stripLocalSticky = () => {
document
.querySelectorAll(
".glossary-entry-body h2, .glossary-entry-body h3, .glossary-relations h2, .glossary-relations h3"
)
.forEach((el) => {
el.classList.remove("is-sticky");
el.removeAttribute("data-sticky-active");
});
};
const applyLocalStickyHeight = () => {
const h = isCompactViewport() ? 0 : heroHeight();
if (h === lastHeight) return;
lastHeight = h;
if (typeof window.__archiSetLocalStickyHeight === "function") {
window.__archiSetLocalStickyHeight(h);
} else {
root.style.setProperty("--glossary-local-sticky-h", `${h}px`);
}
};
const syncFollowState = () => {
const on = computeFollowOn();
if (on === lastFollowOn) return;
lastFollowOn = on;
body.classList.toggle(FOLLOW_ON_CLASS, on);
};
const syncAll = () => {
neutralizeGlobalFollowIfCompact();
stripLocalSticky();
syncFollowState();
applyLocalStickyHeight();
};
const schedule = () => {
if (raf) return;
raf = requestAnimationFrame(() => {
raf = 0;
syncAll();
});
};
const followObserver = new MutationObserver(schedule);
followObserver.observe(follow, {
attributes: true,
attributeFilter: ["class", "style", "aria-hidden"],
subtree: false,
});
const heroResizeObserver =
typeof ResizeObserver !== "undefined"
? new ResizeObserver(schedule)
: null;
heroResizeObserver?.observe(hero);
window.addEventListener("resize", schedule);
window.addEventListener("pageshow", schedule);
if (document.fonts?.ready) {
document.fonts.ready.then(schedule).catch(() => {});
}
if (mqMobile.addEventListener) {
mqMobile.addEventListener("change", schedule);
} else if (mqMobile.addListener) {
mqMobile.addListener(schedule);
}
if (mqSmallLandscape.addEventListener) {
mqSmallLandscape.addEventListener("change", schedule);
} else if (mqSmallLandscape.addListener) {
mqSmallLandscape.addListener(schedule);
}
schedule();
};
if (document.readyState === "loading") {
document.addEventListener("DOMContentLoaded", boot, { once: true });
} else {
boot();
}
})();
</script>
<style>
:global(body.is-glossary-entry-page #reading-follow){
z-index: 10;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-head){
margin-bottom: 0;
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
box-shadow: 0 8px 20px rgba(0,0,0,0.10);
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-head h1){
letter-spacing: -.03em;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-summary){
gap: 8px;
padding-top: 10px;
padding-bottom: 8px;
border-top-color: rgba(127,127,127,0.10);
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-dek){
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-line-clamp: 1;
overflow: hidden;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-signals){
gap: 5px;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-pill){
gap: 4px;
padding: 3px 7px;
font-size: 11px;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-meta){
padding: 0;
border-color: transparent;
max-height: 0;
opacity: 0;
overflow: hidden;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on #reading-follow){
transform: translateY(-1px);
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on #reading-follow .reading-follow__inner){
margin-top: -1px;
border-top-left-radius: 0;
border-top-right-radius: 0;
}
:global(body.is-glossary-entry-page .glossary-entry-body h2.is-sticky),
:global(body.is-glossary-entry-page .glossary-entry-body h2[data-sticky-active="true"]),
:global(body.is-glossary-entry-page .glossary-entry-body h3.is-sticky),
:global(body.is-glossary-entry-page .glossary-entry-body h3[data-sticky-active="true"]),
:global(body.is-glossary-entry-page .glossary-relations h2.is-sticky),
:global(body.is-glossary-entry-page .glossary-relations h2[data-sticky-active="true"]),
:global(body.is-glossary-entry-page .glossary-relations h3.is-sticky),
:global(body.is-glossary-entry-page .glossary-relations h3[data-sticky-active="true"]){
position: static !important;
top: auto !important;
z-index: auto !important;
padding: 0 !important;
border: 0 !important;
background: transparent !important;
box-shadow: none !important;
backdrop-filter: none !important;
-webkit-backdrop-filter: none !important;
}
@media (max-width: 860px){
:global(body.is-glossary-entry-page #reading-follow),
:global(body.is-glossary-entry-page #reading-follow .reading-follow__inner){
display: none !important;
opacity: 0 !important;
pointer-events: none !important;
visibility: hidden !important;
}
:global(body.is-glossary-entry-page){
--followbar-h: 0px !important;
--sticky-offset-px: calc(var(--sticky-header-h, 0px) + var(--page-gap, 12px)) !important;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-head){
margin-bottom: 18px;
border-radius: 20px;
box-shadow: none;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-summary){
gap: 6px;
padding-top: 8px;
padding-bottom: 8px;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-dek){
display: none;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-signals){
gap: 5px;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-pill){
padding: 3px 6px;
font-size: 10.5px;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
:global(body.is-glossary-entry-page #reading-follow),
:global(body.is-glossary-entry-page #reading-follow .reading-follow__inner){
display: none !important;
opacity: 0 !important;
pointer-events: none !important;
visibility: hidden !important;
}
:global(body.is-glossary-entry-page){
--followbar-h: 0px !important;
--sticky-offset-px: calc(var(--sticky-header-h, 0px) + var(--page-gap, 12px)) !important;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-head){
margin-bottom: 14px;
border-radius: 16px;
box-shadow: none;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-summary){
gap: 5px;
padding-top: 6px;
padding-bottom: 6px;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-entry-dek){
display: none;
}
:global(body.is-glossary-entry-page.glossary-entry-follow-on .glossary-pill){
padding: 2px 6px;
font-size: 10px;
}
}
</style>

View File

@@ -0,0 +1,382 @@
---
import {
getFondamentaux,
getGlossaryHomeStats,
getGlossaryPortalLinks,
hrefOfGlossaryEntry,
} from "../lib/glossary";
const {
allEntries = [],
} = Astro.props;
const fondamentaux = getFondamentaux(allEntries);
const portalLinks = getGlossaryPortalLinks();
const {
totalEntries,
paradigmesCount,
doctrinesCount,
metaRegimesCount,
} = getGlossaryHomeStats(allEntries);
---
<nav class="glossary-home-aside" aria-label="Navigation du portail du glossaire">
<div class="glossary-home-aside__block glossary-home-aside__block--intro">
<div class="glossary-home-aside__title">Glossaire archicratique</div>
<div class="glossary-home-aside__meta">
portail de lecture · cartographie conceptuelle
</div>
<div class="glossary-home-aside__pills" aria-label="Repères de navigation">
<span class="glossary-home-aside__pill">{totalEntries} entrées</span>
<span class="glossary-home-aside__pill">{metaRegimesCount} méta-régimes</span>
<span class="glossary-home-aside__pill">
{doctrinesCount} doctrine{doctrinesCount > 1 ? "s" : ""} · {paradigmesCount} paradigme{paradigmesCount > 1 ? "s" : ""}
</span>
</div>
</div>
<details class="glossary-home-aside__block glossary-home-aside__disclosure" open>
<summary class="glossary-home-aside__summary">
<span class="glossary-home-aside__heading">Parcours du glossaire</span>
<span class="glossary-home-aside__chevron" aria-hidden="true">▾</span>
</summary>
<div class="glossary-home-aside__panel">
<ul class="glossary-home-aside__list">
{portalLinks.map((item) => (
<li><a href={item.href}>{item.label}</a></li>
))}
</ul>
</div>
</details>
{fondamentaux.length > 0 && (
<details class="glossary-home-aside__block glossary-home-aside__disclosure" open>
<summary class="glossary-home-aside__summary">
<span class="glossary-home-aside__heading">Noyau archicratique</span>
<span class="glossary-home-aside__chevron" aria-hidden="true">▾</span>
</summary>
<div class="glossary-home-aside__panel">
<ul class="glossary-home-aside__list">
{fondamentaux.map((entry) => (
<li><a href={hrefOfGlossaryEntry(entry)}>{entry.data.term}</a></li>
))}
</ul>
</div>
</details>
)}
</nav>
<style>
.glossary-home-aside{
display: flex;
flex-direction: column;
gap: 14px;
min-width: 0;
}
.glossary-home-aside__block{
border: 1px solid rgba(127,127,127,0.22);
border-radius: 16px;
padding: 14px;
background: rgba(127,127,127,0.05);
min-width: 0;
}
.glossary-home-aside__block--intro{
padding-top: 13px;
padding-bottom: 13px;
}
.glossary-home-aside__title{
font-size: 18px;
font-weight: 850;
letter-spacing: .1px;
line-height: 1.22;
}
.glossary-home-aside__meta{
margin-top: 8px;
font-size: 13px;
line-height: 1.4;
opacity: .8;
}
.glossary-home-aside__pills{
display: flex;
flex-wrap: wrap;
gap: 7px;
margin-top: 11px;
}
.glossary-home-aside__pill{
display: inline-flex;
align-items: center;
padding: 5px 10px;
border: 1px solid rgba(127,127,127,0.24);
border-radius: 999px;
background: rgba(127,127,127,0.04);
font-size: 13px;
line-height: 1.35;
opacity: .92;
min-width: 0;
}
.glossary-home-aside__disclosure{
padding: 0;
overflow: hidden;
}
.glossary-home-aside__summary{
list-style: none;
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
padding: 14px;
cursor: pointer;
user-select: none;
}
.glossary-home-aside__summary::-webkit-details-marker{
display: none;
}
.glossary-home-aside__summary:hover{
background: rgba(127,127,127,0.035);
}
.glossary-home-aside__heading{
margin: 0;
font-size: 16px;
font-weight: 850;
line-height: 1.28;
opacity: .97;
}
.glossary-home-aside__chevron{
flex: 0 0 auto;
font-size: 14px;
line-height: 1;
opacity: .72;
transform: rotate(0deg);
transition: transform 160ms ease, opacity 160ms ease;
}
.glossary-home-aside__disclosure[open] .glossary-home-aside__chevron{
transform: rotate(180deg);
opacity: .96;
}
.glossary-home-aside__panel{
padding: 0 14px 14px;
}
.glossary-home-aside__list{
list-style: none;
margin: 0;
padding: 0;
}
.glossary-home-aside__list li{
margin: 7px 0;
}
.glossary-home-aside__list a{
text-decoration: none;
font-size: 14px;
line-height: 1.42;
word-break: break-word;
}
@media (max-width: 860px){
.glossary-home-aside{
gap: 10px;
}
.glossary-home-aside__block{
border-radius: 14px;
}
.glossary-home-aside__block--intro{
padding: 12px;
}
.glossary-home-aside__title{
font-size: 19px;
line-height: 1.18;
}
.glossary-home-aside__meta{
margin-top: 6px;
font-size: 12px;
line-height: 1.32;
}
.glossary-home-aside__pills{
gap: 6px;
margin-top: 9px;
}
.glossary-home-aside__pill{
padding: 4px 9px;
font-size: 12px;
line-height: 1.28;
}
.glossary-home-aside__summary{
padding: 12px;
}
.glossary-home-aside__heading{
font-size: 17px;
line-height: 1.2;
}
.glossary-home-aside__panel{
padding: 0 12px 12px;
}
.glossary-home-aside__list li{
margin: 5px 0;
}
.glossary-home-aside__list a{
font-size: 14px;
line-height: 1.34;
}
.glossary-home-aside__disclosure:not([open]) .glossary-home-aside__panel{
display: none;
}
}
@media (max-width: 860px){
.glossary-home-aside__disclosure{
background: rgba(127,127,127,0.045);
}
.glossary-home-aside__disclosure[open] .glossary-home-aside__summary{
border-bottom: 1px solid rgba(127,127,127,0.12);
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-home-aside{
gap: 8px;
}
.glossary-home-aside__block{
border-radius: 12px;
}
.glossary-home-aside__block--intro{
padding: 10px 11px;
}
.glossary-home-aside__title{
font-size: 16px;
line-height: 1.14;
}
.glossary-home-aside__meta{
font-size: 11px;
line-height: 1.26;
margin-top: 5px;
}
.glossary-home-aside__pills{
gap: 5px;
margin-top: 8px;
}
.glossary-home-aside__pill{
padding: 3px 8px;
font-size: 11px;
line-height: 1.2;
}
.glossary-home-aside__summary{
padding: 10px 11px;
}
.glossary-home-aside__heading{
font-size: 15px;
line-height: 1.16;
}
.glossary-home-aside__panel{
padding: 0 11px 10px;
}
.glossary-home-aside__list li{
margin: 4px 0;
}
.glossary-home-aside__list a{
font-size: 13px;
line-height: 1.28;
}
}
@media (min-width: 861px){
.glossary-home-aside__summary{
cursor: default;
}
.glossary-home-aside__chevron{
display: none;
}
}
@media (prefers-color-scheme: dark){
.glossary-home-aside__block,
.glossary-home-aside__pill{
background: rgba(255,255,255,0.04);
}
.glossary-home-aside__summary:hover{
background: rgba(255,255,255,0.03);
}
}
</style>
<script is:inline>
(() => {
const syncMobileDisclosure = () => {
const mobile = window.matchMedia("(max-width: 860px)").matches;
const smallLandscape = window.matchMedia(
"(orientation: landscape) and (max-width: 920px) and (max-height: 520px)"
).matches;
const compact = mobile || smallLandscape;
document
.querySelectorAll(".glossary-home-aside__disclosure")
.forEach((el, index) => {
if (!(el instanceof HTMLDetailsElement)) return;
if (compact) {
if (!el.dataset.mobileInit) {
el.open = index === 0;
el.dataset.mobileInit = "true";
}
} else {
el.open = true;
}
});
};
if (document.readyState === "loading") {
document.addEventListener("DOMContentLoaded", syncMobileDisclosure, { once: true });
} else {
syncMobileDisclosure();
}
window.addEventListener("resize", syncMobileDisclosure);
window.addEventListener("pageshow", syncMobileDisclosure);
})();
</script>

View File

@@ -0,0 +1,364 @@
---
export interface Props {
kicker?: string;
title?: string;
intro?: string;
}
const {
kicker = "Référentiel terminologique",
title = "Glossaire archicratique",
intro = "Ce glossaire nest pas seulement un index de définitions. Il constitue une porte dentrée dans la pensée archicratique : une cartographie raisonnée des concepts fondamentaux, des scènes, des dynamiques et des méta-régimes à partir desquels une société peut être décrite comme organisation de tensions et recherche de co-viabilité.",
} = Astro.props;
---
<header class="glossary-hero" id="glossary-hero">
<p class="glossary-kicker">{kicker}</p>
<h1>{title}</h1>
<div class="glossary-hero__collapsible">
<p
class="glossary-intro"
id="glossary-hero-intro"
aria-hidden="false"
>
{intro}
</p>
<button
class="glossary-hero__toggle"
id="glossary-hero-toggle"
type="button"
aria-controls="glossary-hero-intro"
aria-expanded="false"
hidden
>
lire la suite
</button>
</div>
<h2
class="glossary-hero-follow"
id="glossary-hero-follow"
aria-hidden="true"
></h2>
</header>
<style>
.glossary-hero{
position: sticky;
top: var(--glossary-sticky-top);
z-index: 12;
margin-bottom: 28px;
padding: 14px 16px 18px;
border: 1px solid rgba(127,127,127,0.18);
border-radius: 28px;
background:
linear-gradient(180deg, rgba(0,0,0,0.60), rgba(0,0,0,0.90)),
radial-gradient(900px 240px at 20% 0%, rgba(0,217,255,0.08), transparent 60%);
transition:
padding 220ms cubic-bezier(.22,.8,.22,1),
border-radius 220ms cubic-bezier(.22,.8,.22,1),
background 300ms cubic-bezier(.22,.8,.22,1),
border-color 300ms cubic-bezier(.22,.8,.22,1),
box-shadow 300ms cubic-bezier(.22,.8,.22,1);
backdrop-filter: blur(10px);
-webkit-backdrop-filter: blur(10px);
display: grid;
row-gap: 12px;
min-width: 0;
overflow: clip;
}
.glossary-kicker{
margin: 0;
font-size: 12px;
letter-spacing: .12em;
text-transform: uppercase;
opacity: .72;
}
.glossary-hero h1{
margin: 0;
font-size: clamp(2.2rem, 4vw, 3.15rem);
line-height: 1.02;
letter-spacing: -.04em;
font-weight: 850;
transition:
font-size 220ms cubic-bezier(.22,.8,.22,1),
line-height 220ms cubic-bezier(.22,.8,.22,1);
min-width: 0;
}
.glossary-hero__collapsible{
display: grid;
row-gap: 6px;
min-width: 0;
}
.glossary-intro{
margin: 0;
max-width: 72ch;
font-size: 1.05rem;
line-height: 1.55;
opacity: .94;
min-width: 0;
transition:
font-size 220ms cubic-bezier(.22,.8,.22,1),
line-height 220ms cubic-bezier(.22,.8,.22,1),
max-height 220ms cubic-bezier(.22,.8,.22,1),
opacity 180ms ease;
}
.glossary-hero__toggle{
display: inline-flex;
align-items: center;
justify-content: center;
width: fit-content;
min-height: 30px;
padding: 3px 0;
border: 0;
border-radius: 0;
background: transparent;
color: inherit;
font-size: 12px;
line-height: 1.2;
letter-spacing: .01em;
opacity: .72;
cursor: pointer;
text-decoration: underline;
text-decoration-thickness: 1px;
text-underline-offset: 2px;
transition:
opacity 120ms ease,
transform 120ms ease;
}
.glossary-hero__toggle:hover{
opacity: .92;
transform: translateY(-1px);
}
.glossary-hero__toggle:focus-visible{
outline: 2px solid rgba(0,217,255,0.24);
outline-offset: 4px;
border-radius: 4px;
}
.glossary-hero__toggle[hidden]{
display: none !important;
}
.glossary-hero-follow{
margin: 2px 0 0;
min-height: var(--glossary-follow-height);
display: block;
max-width: min(100%, 22ch);
opacity: 0;
transform: translateY(10px) scale(.985);
filter: blur(6px);
transition:
opacity 220ms cubic-bezier(.22,1,.36,1),
transform 320ms cubic-bezier(.22,1,.36,1),
filter 320ms cubic-bezier(.22,1,.36,1);
pointer-events: none;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
will-change: opacity, transform, filter;
min-width: 0;
}
.glossary-hero-follow.is-visible{
opacity: 1;
transform: translateY(0) scale(1);
filter: blur(0);
}
:global(body.glossary-home-follow-on) .glossary-hero{
padding: 12px 14px 14px;
border-bottom-left-radius: 18px;
border-bottom-right-radius: 18px;
}
:global(body.glossary-home-follow-on) .glossary-hero h1{
font-size: clamp(1.7rem, 3.2vw, 2.2rem);
line-height: 1.02;
}
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
font-size: .94rem;
line-height: 1.34;
max-height: 2.7em;
overflow: hidden;
display: -webkit-box;
-webkit-box-orient: vertical;
-webkit-line-clamp: 2;
}
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-hero__toggle{
display: inline-flex;
}
@media (max-width: 760px){
.glossary-hero{
top: calc(var(--glossary-sticky-top) - 2px);
padding: 12px 14px 16px;
border-radius: 22px;
row-gap: 10px;
}
.glossary-hero h1{
font-size: clamp(1.9rem, 8vw, 2.45rem);
line-height: 1.02;
letter-spacing: -.03em;
}
.glossary-hero__collapsible{
row-gap: 7px;
}
.glossary-intro{
max-width: 100%;
width: 100%;
font-size: .98rem;
line-height: 1.44;
}
:global(body.glossary-home-follow-on) .glossary-hero{
padding: 10px 13px 12px;
border-radius: 18px;
}
:global(body.glossary-home-follow-on) .glossary-hero h1{
font-size: clamp(1.45rem, 6vw, 1.8rem);
}
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
max-width: 100%;
width: 100%;
font-size: .86rem;
line-height: 1.24;
max-height: 2.48em;
-webkit-line-clamp: 2;
opacity: .9;
}
.glossary-hero__toggle{
min-height: 28px;
font-size: 11.5px;
}
.glossary-hero-follow{
max-width: min(100%, 24ch);
}
}
@media (max-width: 520px){
.glossary-hero{
padding: 11px 12px 14px;
border-radius: 20px;
}
.glossary-intro{
max-width: 100%;
width: 100%;
font-size: .94rem;
line-height: 1.4;
}
:global(body.glossary-home-follow-on) .glossary-hero{
padding: 9px 11px 11px;
}
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
max-width: 100%;
width: 100%;
font-size: .84rem;
line-height: 1.22;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-hero{
padding: 10px 12px 12px;
border-radius: 16px;
row-gap: 8px;
}
.glossary-kicker{
font-size: 10px;
letter-spacing: .1em;
}
.glossary-hero h1{
font-size: clamp(1.35rem, 4vw, 1.8rem);
line-height: 1;
}
.glossary-intro{
font-size: .84rem;
line-height: 1.24;
}
:global(body.glossary-home-follow-on) .glossary-hero{
padding: 9px 11px 10px;
border-radius: 16px;
}
:global(body.glossary-home-follow-on) .glossary-hero h1{
font-size: clamp(1.1rem, 3vw, 1.35rem);
}
:global(body.glossary-home-follow-on:not(.glossary-home-hero-expanded)) .glossary-intro{
font-size: .8rem;
line-height: 1.18;
max-height: 2.36em;
-webkit-line-clamp: 2;
opacity: .88;
}
.glossary-hero__toggle{
min-height: 24px;
font-size: 11px;
}
.glossary-hero-follow{
max-width: min(100%, 26ch);
}
}
@media (max-width: 860px){
.glossary-hero{
position: static !important;
top: auto !important;
z-index: auto !important;
margin-bottom: 18px !important;
}
.glossary-hero-follow{
display: none !important;
min-height: 0 !important;
opacity: 0 !important;
transform: none !important;
filter: none !important;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-hero{
position: static !important;
top: auto !important;
z-index: auto !important;
margin-bottom: 14px !important;
}
.glossary-hero-follow{
display: none !important;
min-height: 0 !important;
opacity: 0 !important;
transform: none !important;
filter: none !important;
}
}
</style>

View File

@@ -0,0 +1,133 @@
---
export interface Props {
id?: string;
title: string;
intro?: string;
followSection?: string;
ctaHref?: string;
ctaLabel?: string;
}
const {
id,
title,
intro,
followSection,
ctaHref,
ctaLabel,
} = Astro.props;
const resolvedFollowSection = (followSection || title || "").trim();
const showCta = Boolean(ctaHref && ctaLabel);
---
<section id={id} class="glossary-section">
<div class="glossary-section__head">
<div>
<h2 data-follow-section={resolvedFollowSection}>{title}</h2>
{intro && (
<p class="glossary-intro">{intro}</p>
)}
</div>
{showCta && (
<a class="glossary-cta" href={ctaHref}>
{ctaLabel}
</a>
)}
</div>
<slot />
</section>
<style>
.glossary-section{
margin-top: 34px;
scroll-margin-top: calc(var(--glossary-sticky-top) + 150px);
}
.glossary-section__head{
display: flex;
justify-content: space-between;
align-items: start;
gap: 14px;
flex-wrap: wrap;
margin-bottom: 12px;
}
.glossary-section h2{
margin: 0;
font-size: clamp(1.8rem, 3vw, 2.55rem);
line-height: 1.06;
letter-spacing: -.03em;
font-weight: 800;
}
.glossary-intro{
margin: 0;
max-width: 72ch;
font-size: 1rem;
line-height: 1.52;
opacity: .94;
}
.glossary-section__head .glossary-intro{
margin-top: 8px;
}
.glossary-cta{
display: inline-flex;
align-items: center;
justify-content: center;
min-height: 38px;
border: 1px solid var(--glossary-border-strong);
border-radius: 999px;
padding: 6px 13px;
color: var(--glossary-accent);
text-decoration: none;
white-space: nowrap;
transition: transform 120ms ease, background 120ms ease;
}
.glossary-cta:hover{
background: var(--glossary-bg-soft-strong);
text-decoration: none;
transform: translateY(-1px);
}
@media (max-width: 760px){
.glossary-section{
margin-top: 24px;
scroll-margin-top: calc(var(--glossary-sticky-top) + 110px);
}
.glossary-section__head{
flex-direction: column;
align-items: stretch;
gap: 10px;
margin-bottom: 10px;
}
.glossary-section h2{
font-size: clamp(1.45rem, 6vw, 1.95rem);
line-height: 1.05;
}
.glossary-intro{
font-size: .95rem;
line-height: 1.42;
}
.glossary-section__head .glossary-intro{
margin-top: 6px;
}
.glossary-cta{
width: fit-content;
min-height: 35px;
padding: 5px 12px;
font-size: .95rem;
}
}
</style>

View File

@@ -0,0 +1,219 @@
---
interface LinkItem {
href: string;
label: string;
}
interface Props {
ariaLabel: string;
title: string;
meta?: string;
backHref?: string;
backLabel?: string;
pageItems?: LinkItem[];
usefulLinks?: LinkItem[];
}
const {
ariaLabel,
title,
meta,
backHref = "/glossaire/",
backLabel = "← Retour au glossaire",
pageItems = [],
usefulLinks = [],
} = Astro.props;
---
<nav class="glossary-portal-aside" aria-label={ariaLabel}>
<div class="glossary-portal-aside__block">
<a class="glossary-portal-aside__back" href={backHref}>{backLabel}</a>
<div class="glossary-portal-aside__title">{title}</div>
{meta && <div class="glossary-portal-aside__meta">{meta}</div>}
</div>
{pageItems.length > 0 && (
<div class="glossary-portal-aside__block">
<h2 class="glossary-portal-aside__heading">Dans cette page</h2>
<ul class="glossary-portal-aside__list">
{pageItems.map((item) => (
<li><a href={item.href}>{item.label}</a></li>
))}
</ul>
</div>
)}
{usefulLinks.length > 0 && (
<div class="glossary-portal-aside__block">
<h2 class="glossary-portal-aside__heading">Renvois utiles</h2>
<ul class="glossary-portal-aside__list">
{usefulLinks.map((item) => (
<li><a href={item.href}>{item.label}</a></li>
))}
</ul>
</div>
)}
</nav>
<style>
.glossary-portal-aside{
display: flex;
flex-direction: column;
gap: 14px;
}
.glossary-portal-aside__block{
border: 1px solid rgba(127,127,127,0.22);
border-radius: 16px;
padding: 14px;
background: rgba(127,127,127,0.05);
}
.glossary-portal-aside__back{
display: inline-block;
margin-bottom: 10px;
font-size: 14px;
font-weight: 700;
line-height: 1.35;
text-decoration: none;
}
.glossary-portal-aside__title{
font-size: 16px;
font-weight: 800;
letter-spacing: .2px;
line-height: 1.3;
}
.glossary-portal-aside__meta{
margin-top: 8px;
font-size: 13px;
line-height: 1.4;
opacity: .8;
}
.glossary-portal-aside__heading{
margin: 0 0 11px;
font-size: 14px;
font-weight: 800;
line-height: 1.35;
opacity: .94;
}
.glossary-portal-aside__list{
list-style: none;
margin: 0;
padding: 0;
}
.glossary-portal-aside__list li{
margin: 7px 0;
}
.glossary-portal-aside__list a{
text-decoration: none;
font-size: 14px;
line-height: 1.4;
}
@media (max-width: 980px){
.glossary-portal-aside{
gap: 12px;
}
.glossary-portal-aside__block{
padding: 12px;
border-radius: 14px;
}
}
@media (max-width: 760px){
.glossary-portal-aside{
gap: 10px;
}
.glossary-portal-aside__block{
padding: 11px 12px;
border-radius: 14px;
}
.glossary-portal-aside__back{
margin-bottom: 8px;
font-size: 13px;
}
.glossary-portal-aside__title{
font-size: 15px;
line-height: 1.22;
}
.glossary-portal-aside__meta{
margin-top: 6px;
font-size: 12px;
line-height: 1.32;
}
.glossary-portal-aside__heading{
margin-bottom: 8px;
font-size: 13px;
line-height: 1.22;
}
.glossary-portal-aside__list li{
margin: 5px 0;
}
.glossary-portal-aside__list a{
font-size: 12.5px;
line-height: 1.3;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-portal-aside{
gap: 8px;
}
.glossary-portal-aside__block{
padding: 9px 10px;
border-radius: 12px;
}
.glossary-portal-aside__back{
margin-bottom: 6px;
font-size: 12px;
}
.glossary-portal-aside__title{
font-size: 14px;
line-height: 1.18;
}
.glossary-portal-aside__meta{
margin-top: 4px;
font-size: 11px;
line-height: 1.24;
}
.glossary-portal-aside__heading{
margin-bottom: 6px;
font-size: 12px;
line-height: 1.18;
}
.glossary-portal-aside__list li{
margin: 4px 0;
}
.glossary-portal-aside__list a{
font-size: 11.5px;
line-height: 1.22;
}
}
@media (prefers-color-scheme: dark){
.glossary-portal-aside__block{
background: rgba(255,255,255,0.04);
}
}
</style>

View File

@@ -0,0 +1,67 @@
---
export interface Props {
href: string;
label: string;
icon?: string;
className?: string;
}
const {
href,
label,
icon = "↗",
className,
} = Astro.props;
---
<a class:list={["glossary-portal-cta", className]} href={href}>
<span>{label}</span>
<span aria-hidden="true">{icon}</span>
</a>
<style>
.glossary-portal-cta{
display: inline-flex;
align-items: center;
justify-content: center;
min-height: 40px;
padding: 7px 14px;
border: 1px solid rgba(127,127,127,0.24);
border-radius: 999px;
background: rgba(127,127,127,0.05);
text-decoration: none;
line-height: 1.2;
transition:
transform 120ms ease,
background 120ms ease,
border-color 120ms ease;
}
.glossary-portal-cta:hover{
transform: translateY(-1px);
background: rgba(127,127,127,0.08);
border-color: rgba(0,217,255,0.18);
text-decoration: none;
}
.glossary-portal-cta:focus-visible{
outline: 2px solid rgba(0,217,255,0.28);
outline-offset: 3px;
}
@media (max-width: 760px){
.glossary-portal-cta{
min-height: 36px;
padding: 6px 12px;
font-size: 12px;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-portal-cta{
min-height: 32px;
padding: 5px 10px;
font-size: 11px;
}
}
</style>

View File

@@ -0,0 +1,118 @@
---
export type GlossaryPortalGridItem = {
href: string;
title: string;
description: string;
meta: string;
};
export interface Props {
items?: GlossaryPortalGridItem[];
secondary?: boolean;
}
const {
items = [],
secondary = false,
} = Astro.props;
---
<div
class:list={[
"glossary-portals",
secondary && "glossary-portals--secondary",
]}
>
{items.map((item) => (
<a class="glossary-portal-card" href={item.href}>
<strong>{item.title}</strong>
<span>{item.description}</span>
<small>{item.meta}</small>
</a>
))}
</div>
<style>
.glossary-portals{
display: grid;
grid-template-columns: repeat(auto-fit, minmax(240px, 1fr));
gap: 12px;
margin-top: 12px;
}
.glossary-portal-card{
display: flex;
flex-direction: column;
gap: 7px;
padding: 14px 15px;
border: 1px solid var(--glossary-border);
border-radius: 16px;
background: var(--glossary-bg-soft);
text-decoration: none;
transition: transform 120ms ease, background 120ms ease, border-color 120ms ease;
}
.glossary-portal-card:hover{
transform: translateY(-1px);
background: var(--glossary-bg-soft-strong);
border-color: rgba(0,217,255,0.16);
text-decoration: none;
}
.glossary-portal-card strong{
color: var(--glossary-accent);
font-size: 1.04rem;
line-height: 1.24;
}
.glossary-portal-card span{
color: inherit;
font-size: .98rem;
line-height: 1.46;
opacity: .94;
}
.glossary-portal-card small{
color: var(--glossary-accent);
font-size: .9rem;
line-height: 1.28;
opacity: .9;
}
@media (max-width: 760px){
.glossary-portals{
grid-template-columns: 1fr;
gap: 10px;
margin-top: 10px;
}
.glossary-portal-card{
padding: 12px 12px;
border-radius: 14px;
gap: 6px;
}
.glossary-portal-card strong{
font-size: .98rem;
}
.glossary-portal-card span{
font-size: .94rem;
line-height: 1.42;
}
.glossary-portal-card small{
font-size: .85rem;
}
}
@media (prefers-color-scheme: dark){
.glossary-portal-card{
background: rgba(255,255,255,0.04);
}
.glossary-portal-card:hover{
background: rgba(255,255,255,0.07);
}
}
</style>

View File

@@ -0,0 +1,260 @@
---
interface Props {
prefix: string;
kicker: string;
title: string;
intro: string;
moreParagraphs?: string[];
introMaxWidth?: string;
followIntroMaxWidth?: string;
moreMaxHeight?: string;
}
const {
prefix,
kicker,
title,
intro,
moreParagraphs = [],
introMaxWidth = "70ch",
followIntroMaxWidth = "62ch",
moreMaxHeight = "18rem",
} = Astro.props;
---
<div
class="glossary-portal-hero glossary-page-hero"
data-glossary-portal-hero
style={`--portal-hero-intro-max-w:${introMaxWidth}; --portal-hero-follow-intro-max-w:${followIntroMaxWidth}; --portal-hero-secondary-max-h:${moreMaxHeight};`}
>
<p class="glossary-portal-hero__kicker">{kicker}</p>
<h1>{title}</h1>
<p class="glossary-portal-hero__intro glossary-portal-hero__intro--lead">
{intro}
</p>
{moreParagraphs.length > 0 && (
<div class="glossary-portal-hero__collapsible">
<div
class="glossary-portal-hero__more"
id={`${prefix}-hero-more`}
data-glossary-portal-more
aria-hidden="false"
>
{moreParagraphs.map((paragraph) => (
<p class="glossary-portal-hero__intro glossary-portal-hero__intro--more">
{paragraph}
</p>
))}
</div>
<button
class="glossary-portal-hero__toggle"
id={`${prefix}-hero-toggle`}
data-glossary-portal-toggle
type="button"
aria-controls={`${prefix}-hero-more`}
aria-expanded="false"
hidden
>
lire la suite
</button>
</div>
)}
</div>
<style>
.glossary-portal-hero{
position: sticky;
top: var(--glossary-sticky-top);
z-index: 12;
margin-bottom: var(--portal-hero-margin-bottom, 28px);
padding:
var(--portal-hero-pad-top, 20px)
var(--portal-hero-pad-x, 18px)
var(--portal-hero-pad-bottom, 22px);
border: 1px solid rgba(127,127,127,0.18);
border-radius: 28px;
background:
linear-gradient(180deg, rgba(0,0,0,0.60), rgba(0,0,0,0.92)),
radial-gradient(980px 260px at 18% 0%, rgba(0,217,255,0.08), transparent 60%);
backdrop-filter: blur(10px);
-webkit-backdrop-filter: blur(10px);
display: grid;
row-gap: var(--portal-hero-gap, 16px);
min-width: 0;
overflow: hidden;
transition:
background 280ms cubic-bezier(.22,.8,.22,1),
border-color 220ms cubic-bezier(.22,.8,.22,1),
box-shadow 220ms cubic-bezier(.22,.8,.22,1),
border-radius 220ms ease,
padding 220ms ease,
row-gap 220ms ease,
margin-bottom 220ms ease;
box-shadow:
inset 0 1px 0 rgba(255,255,255,0.02),
0 10px 26px rgba(0,0,0,0.08);
}
.glossary-portal-hero__kicker{
margin: 0;
font-size: var(--portal-kicker-size, 12px);
line-height: var(--portal-kicker-lh, 1.2);
letter-spacing: var(--portal-kicker-spacing, .14em);
text-transform: uppercase;
font-weight: 650;
opacity: .74;
}
.glossary-portal-hero h1{
margin: 0;
font-size: var(--portal-hero-h1-size, clamp(3rem, 4.8vw, 4.15rem));
line-height: var(--portal-hero-h1-lh, .98);
letter-spacing: var(--portal-hero-h1-spacing, -.045em);
font-weight: 850;
text-wrap: balance;
transition:
font-size 180ms ease,
line-height 180ms ease,
letter-spacing 180ms ease;
}
.glossary-portal-hero__intro{
margin: 0;
max-width: var(--portal-hero-intro-max-w, 70ch);
font-size: var(--portal-hero-intro-size, 1.06rem);
line-height: var(--portal-hero-intro-lh, 1.6);
text-wrap: pretty;
transition:
font-size 180ms ease,
line-height 180ms ease,
max-width 180ms ease,
opacity 180ms ease;
}
.glossary-portal-hero__intro--lead{ opacity: .95; }
.glossary-portal-hero__intro--more{ opacity: .89; }
.glossary-portal-hero__collapsible{
display: grid;
row-gap: 8px;
min-width: 0;
}
.glossary-portal-hero__more{
display: grid;
gap: 12px;
max-height: var(--portal-hero-secondary-max-h, 20em);
overflow: hidden;
opacity: var(--portal-hero-secondary-opacity, .92);
min-width: 0;
transition:
max-height 220ms ease,
opacity 180ms ease;
}
.glossary-portal-hero__toggle{
display: inline-flex;
align-items: center;
justify-content: center;
width: fit-content;
min-height: 34px;
padding: 5px 0;
border: 0;
border-radius: 0;
background: transparent;
color: inherit;
font-size: 12.5px;
line-height: 1.2;
letter-spacing: .01em;
opacity: .72;
cursor: pointer;
text-decoration: underline;
text-decoration-thickness: 1px;
text-underline-offset: 2px;
transition:
opacity 120ms ease,
transform 120ms ease;
}
.glossary-portal-hero__toggle:hover{
opacity: .92;
transform: translateY(-1px);
}
.glossary-portal-hero__toggle:focus-visible{
outline: 2px solid rgba(0,217,255,0.24);
outline-offset: 4px;
border-radius: 4px;
}
.glossary-portal-hero__toggle[hidden]{
display: none !important;
}
@media (max-width: 980px){
.glossary-portal-hero{
border-radius: 24px;
}
.glossary-portal-hero h1{
text-wrap: pretty;
}
.glossary-portal-hero__more{
gap: 10px;
}
}
@media (max-width: 860px){
.glossary-portal-hero{
position: static !important;
top: auto !important;
z-index: auto !important;
margin-bottom: 18px !important;
width: 100% !important;
max-width: 100% !important;
min-width: 0 !important;
}
.glossary-portal-hero h1,
.glossary-portal-hero__intro,
.glossary-portal-hero__more,
.glossary-portal-hero__collapsible{
min-width: 0 !important;
max-width: 100% !important;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-portal-hero{
position: static !important;
top: auto !important;
z-index: auto !important;
margin-bottom: 14px !important;
width: 100% !important;
max-width: 100% !important;
min-width: 0 !important;
border-radius: 16px !important;
}
.glossary-portal-hero h1,
.glossary-portal-hero__intro,
.glossary-portal-hero__more,
.glossary-portal-hero__collapsible{
min-width: 0 !important;
max-width: 100% !important;
}
}
@media (prefers-color-scheme: dark){
.glossary-portal-hero{
box-shadow:
inset 0 1px 0 rgba(255,255,255,0.02),
0 14px 34px rgba(0,0,0,0.16);
}
}
</style>

View File

@@ -0,0 +1,127 @@
---
export interface Props {
id?: string;
title: string;
count?: string;
intro?: string;
surface?: boolean;
className?: string;
}
const {
id,
title,
count,
intro,
surface = false,
className,
} = Astro.props;
---
<div
class:list={[
"glossary-portal-panel",
surface && "glossary-portal-panel--surface",
className,
]}
>
<div class="glossary-portal-panel__head">
<h3 id={id}>{title}</h3>
{count && <span class="glossary-portal-panel__count">{count}</span>}
</div>
{intro && <p class="glossary-portal-panel__intro">{intro}</p>}
<slot />
</div>
<style>
.glossary-portal-panel{
display: grid;
gap: 10px;
}
.glossary-portal-panel--surface{
padding:
var(--portal-panel-pad-y, 16px)
var(--portal-panel-pad-x, 16px);
border: 1px solid var(--glossary-border, rgba(127,127,127,0.18));
border-radius: var(--portal-panel-radius, 18px);
background:
var(--glossary-bg-soft, rgba(127,127,127,0.035));
}
.glossary-portal-panel__head{
display: flex;
align-items: flex-start;
justify-content: space-between;
gap: 10px;
flex-wrap: wrap;
}
.glossary-portal-panel__head h3{
margin: 0;
font-size: var(--portal-local-h3-size, clamp(1.35rem, 2vw, 1.7rem));
line-height: var(--portal-local-h3-lh, 1.15);
letter-spacing: -.02em;
}
.glossary-portal-panel__count{
display: inline-flex;
align-items: center;
min-height: 26px;
padding: 0 9px;
border: 1px solid rgba(127,127,127,0.20);
border-radius: 999px;
background: rgba(127,127,127,0.04);
font-size: 11.5px;
line-height: 1.2;
opacity: .8;
white-space: nowrap;
}
.glossary-portal-panel__intro{
margin: 0;
font-size: var(--portal-card-text-size, 14px);
line-height: var(--portal-card-text-lh, 1.45);
opacity: .92;
}
@media (max-width: 760px){
.glossary-portal-panel{
gap: 8px;
}
.glossary-portal-panel__head{
gap: 8px;
}
.glossary-portal-panel__count{
min-height: 23px;
padding: 0 8px;
font-size: 10.5px;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-portal-panel{
gap: 6px;
}
.glossary-portal-panel__head{
gap: 6px;
}
.glossary-portal-panel__count{
min-height: 21px;
padding: 0 7px;
font-size: 10px;
}
}
@media (prefers-color-scheme: dark){
.glossary-portal-panel--surface{
background: rgba(255,255,255,0.04);
}
}
</style>

View File

@@ -0,0 +1,143 @@
---
interface Props {
id: string;
title: string;
count?: string;
intro?: string;
final?: boolean;
className?: string;
}
const {
id,
title,
count,
intro,
final = false,
className,
} = Astro.props;
---
<section class:list={["glossary-portal-section", final && "glossary-portal-section--final", className]}>
<div class="glossary-portal-section__head">
<h2 id={id}>{title}</h2>
{count && <span class="glossary-portal-section__count">{count}</span>}
</div>
{intro && <p class="glossary-portal-section__intro">{intro}</p>}
<slot />
</section>
<style>
.glossary-portal-section{
margin-top: 30px;
}
.glossary-portal-section h2{
margin: 0;
font-size: clamp(1.8rem, 3vw, 2.35rem);
line-height: 1.05;
letter-spacing: -.03em;
font-weight: 800;
}
.glossary-portal-section__head{
display: flex;
align-items: flex-start;
justify-content: space-between;
gap: 12px;
flex-wrap: wrap;
margin-bottom: 10px;
}
.glossary-portal-section__count{
display: inline-flex;
align-items: center;
min-height: 28px;
padding: 0 10px;
border: 1px solid rgba(127,127,127,0.20);
border-radius: 999px;
background: rgba(127,127,127,0.04);
font-size: 12px;
line-height: 1.2;
opacity: .8;
white-space: nowrap;
}
.glossary-portal-section__intro{
margin: 0;
max-width: 76ch;
font-size: var(--portal-body-size, 1rem);
line-height: var(--portal-body-lh, 1.55);
opacity: .94;
}
.glossary-portal-section--final{
margin-top: 34px;
}
@media (max-width: 980px){
.glossary-portal-section{
margin-top: 24px;
}
.glossary-portal-section h2{
font-size: clamp(1.6rem, 4.4vw, 2rem);
line-height: 1.04;
}
}
@media (max-width: 760px){
.glossary-portal-section{
margin-top: 20px;
}
.glossary-portal-section__head{
gap: 8px;
margin-bottom: 8px;
}
.glossary-portal-section h2{
font-size: clamp(1.34rem, 6.5vw, 1.72rem);
line-height: 1.04;
letter-spacing: -.022em;
}
.glossary-portal-section__count{
min-height: 24px;
padding: 0 8px;
font-size: 11px;
}
.glossary-portal-section--final{
margin-top: 24px;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
.glossary-portal-section{
margin-top: 16px;
}
.glossary-portal-section__head{
gap: 6px;
margin-bottom: 6px;
}
.glossary-portal-section h2{
font-size: clamp(1.12rem, 4.2vw, 1.34rem);
line-height: 1.02;
}
.glossary-portal-section__count{
min-height: 22px;
padding: 0 7px;
font-size: 10px;
}
.glossary-portal-section--final{
margin-top: 18px;
}
}
</style>

View File

@@ -0,0 +1,487 @@
---
interface Props {
heroMoreId: string;
heroToggleId: string;
sectionHeadSelector?: string;
mobileBreakpoint?: number;
autoCollapseDelta?: number;
}
const {
heroMoreId,
heroToggleId,
sectionHeadSelector = ".glossary-portal-section__head",
mobileBreakpoint = 860,
autoCollapseDelta = 160,
} = Astro.props;
---
<script
is:inline
define:vars={{ heroMoreId, heroToggleId, sectionHeadSelector, mobileBreakpoint, autoCollapseDelta }}
>
(() => {
const boot = () => {
const body = document.body;
const root = document.documentElement;
const hero = document.querySelector("[data-glossary-portal-hero]");
const follow = document.getElementById("reading-follow");
const heroMore = document.getElementById(heroMoreId);
const heroToggle = document.getElementById(heroToggleId);
if (!body || !root || !hero || !follow) return;
const BODY_CLASS = "is-glossary-portal-page";
const FOLLOW_ON_CLASS = "glossary-portal-follow-on";
const EXPANDED_CLASS = "glossary-portal-hero-expanded";
const CONDENSED_CLASS = "glossary-portal-hero-condensed";
const mqMobile = window.matchMedia(`(max-width: ${mobileBreakpoint}px)`);
const mqSmallLandscape = window.matchMedia(
"(orientation: landscape) and (max-width: 920px) and (max-height: 520px)"
);
let expandedAtY = null;
let lastScrollY = window.scrollY || 0;
let raf = 0;
let lastFollowOn = null;
let lastCondensed = null;
let lastHeroHeight = -1;
body.classList.add(BODY_CLASS);
const isCompactViewport = () =>
mqMobile.matches || mqSmallLandscape.matches;
const stripLocalSticky = () => {
document.querySelectorAll(sectionHeadSelector).forEach((el) => {
el.classList.remove("is-sticky");
el.removeAttribute("data-sticky-active");
});
};
const readStickyTop = () => {
const raw = getComputedStyle(document.documentElement)
.getPropertyValue("--glossary-sticky-top")
.trim();
const n = Number.parseFloat(raw);
return Number.isFinite(n) ? n : 64;
};
const computeFollowOn = () =>
!isCompactViewport() &&
follow.classList.contains("is-on") &&
follow.style.display !== "none" &&
follow.getAttribute("aria-hidden") !== "true";
const computeCondensed = () => {
if (isCompactViewport()) return false;
const heroRect = hero.getBoundingClientRect();
const stickyTop = readStickyTop();
return heroRect.top <= stickyTop + 2;
};
const measureHeroHeight = () =>
Math.max(0, Math.round(hero.getBoundingClientRect().height || 0));
const PIN_EPS = 3;
const isHeroPinned = () => {
if (isCompactViewport()) return false;
const rect = hero.getBoundingClientRect();
const stickyTop = readStickyTop();
const cs = getComputedStyle(hero);
if (cs.position !== "sticky") return false;
const pinnedOnRail = Math.abs(rect.top - stickyTop) <= PIN_EPS;
const stillVisible = rect.bottom > stickyTop + 24;
return pinnedOnRail && stillVisible;
};
const applyLocalStickyHeight = () => {
const h = isHeroPinned() ? measureHeroHeight() : 0;
if (h === lastHeroHeight) return;
lastHeroHeight = h;
if (typeof window.__archiSetLocalStickyHeight === "function") {
window.__archiSetLocalStickyHeight(h);
} else {
root.style.setProperty("--glossary-local-sticky-h", `${h}px`);
}
};
const syncFollowState = () => {
const on = computeFollowOn();
if (on !== lastFollowOn) {
lastFollowOn = on;
body.classList.toggle(FOLLOW_ON_CLASS, on);
}
return on;
};
const syncCondensedState = () => {
const condensed = computeCondensed();
if (condensed !== lastCondensed) {
lastCondensed = condensed;
body.classList.toggle(CONDENSED_CLASS, condensed);
}
return condensed;
};
const collapseHero = () => {
if (!body.classList.contains(EXPANDED_CLASS)) return;
body.classList.remove(EXPANDED_CLASS);
expandedAtY = null;
if (heroMore) {
heroMore.setAttribute("aria-hidden", "true");
}
if (heroToggle) {
heroToggle.hidden = false;
heroToggle.setAttribute("aria-expanded", "false");
}
try {
window.__archiUpdateFollow?.();
} catch {}
schedule();
};
const expandHero = () => {
body.classList.add(EXPANDED_CLASS);
expandedAtY = window.scrollY || 0;
if (heroMore) {
heroMore.setAttribute("aria-hidden", "false");
}
if (heroToggle) {
heroToggle.hidden = true;
heroToggle.setAttribute("aria-expanded", "true");
}
try {
window.__archiUpdateFollow?.();
} catch {}
schedule();
};
const syncHeroState = (condensed) => {
const expanded = body.classList.contains(EXPANDED_CLASS);
const collapsed = condensed && !expanded;
if (isCompactViewport() || !condensed) {
body.classList.remove(EXPANDED_CLASS);
expandedAtY = null;
if (heroMore) {
heroMore.setAttribute("aria-hidden", "false");
}
if (heroToggle) {
heroToggle.hidden = true;
heroToggle.setAttribute("aria-expanded", "false");
}
return;
}
if (heroMore) {
heroMore.setAttribute("aria-hidden", collapsed ? "true" : "false");
}
if (heroToggle) {
heroToggle.hidden = !collapsed;
heroToggle.setAttribute("aria-expanded", expanded ? "true" : "false");
}
};
const maybeAutoCollapseOnScroll = () => {
if (isCompactViewport()) {
lastScrollY = window.scrollY || 0;
return;
}
if (!body.classList.contains(EXPANDED_CLASS)) {
lastScrollY = window.scrollY || 0;
return;
}
if (expandedAtY == null) {
lastScrollY = window.scrollY || 0;
return;
}
const currentY = window.scrollY || 0;
const scrollingDown = currentY > lastScrollY;
const delta = currentY - expandedAtY;
if (scrollingDown && delta >= autoCollapseDelta) {
collapseHero();
}
lastScrollY = currentY;
};
const syncAll = () => {
stripLocalSticky();
if (isCompactViewport()) {
body.classList.remove(FOLLOW_ON_CLASS);
body.classList.remove(CONDENSED_CLASS);
body.classList.remove(EXPANDED_CLASS);
lastFollowOn = false;
lastCondensed = false;
expandedAtY = null;
if (heroMore) {
heroMore.setAttribute("aria-hidden", "false");
}
if (heroToggle) {
heroToggle.hidden = true;
heroToggle.setAttribute("aria-expanded", "false");
}
requestAnimationFrame(() => {
applyLocalStickyHeight();
try {
window.__archiUpdateFollow?.();
} catch {}
});
return;
}
const condensed = syncCondensedState();
syncHeroState(condensed);
requestAnimationFrame(() => {
applyLocalStickyHeight();
syncFollowState();
try {
window.__archiUpdateFollow?.();
} catch {}
});
requestAnimationFrame(() => {
applyLocalStickyHeight();
try {
window.__archiUpdateFollow?.();
} catch {}
});
};
const schedule = () => {
if (raf) return;
raf = requestAnimationFrame(() => {
raf = 0;
syncAll();
});
};
heroToggle?.addEventListener("click", expandHero);
const onScroll = () => {
maybeAutoCollapseOnScroll();
schedule();
};
const followObserver = new MutationObserver(schedule);
followObserver.observe(follow, {
attributes: true,
attributeFilter: ["class", "style", "aria-hidden"],
subtree: false,
});
const heroResizeObserver =
typeof ResizeObserver !== "undefined"
? new ResizeObserver(schedule)
: null;
heroResizeObserver?.observe(hero);
window.addEventListener("scroll", onScroll, { passive: true });
window.addEventListener("resize", schedule);
window.addEventListener("pageshow", schedule);
if (document.fonts?.ready) {
document.fonts.ready.then(schedule).catch(() => {});
}
if (mqMobile.addEventListener) {
mqMobile.addEventListener("change", schedule);
} else if (mqMobile.addListener) {
mqMobile.addListener(schedule);
}
if (mqSmallLandscape.addEventListener) {
mqSmallLandscape.addEventListener("change", schedule);
} else if (mqSmallLandscape.addListener) {
mqSmallLandscape.addListener(schedule);
}
schedule();
};
if (document.readyState === "loading") {
document.addEventListener("DOMContentLoaded", boot, { once: true });
} else {
boot();
}
})();
</script>
<style>
:global(body.is-glossary-portal-page #reading-follow){
z-index: 10;
}
/* Le hero se condense dès quil devient sticky */
:global(body.is-glossary-portal-page.glossary-portal-hero-condensed .glossary-portal-hero){
padding:
var(--portal-hero-pad-top-condensed, 14px)
var(--portal-hero-pad-x-condensed, 16px)
var(--portal-hero-pad-bottom-condensed, 16px);
row-gap: var(--portal-hero-gap-condensed, 10px);
box-shadow:
inset 0 1px 0 rgba(255,255,255,0.02),
0 8px 20px rgba(0,0,0,0.12);
}
:global(body.is-glossary-portal-page.glossary-portal-hero-condensed .glossary-portal-hero h1){
font-size: var(--portal-hero-h1-size-condensed, clamp(2.05rem, 3.15vw, 2.7rem));
line-height: var(--portal-hero-h1-lh-condensed, 1);
letter-spacing: var(--portal-hero-h1-spacing-condensed, -.04em);
}
:global(body.is-glossary-portal-page.glossary-portal-hero-condensed .glossary-portal-hero__intro){
max-width: var(--portal-hero-follow-intro-max-w, 62ch);
font-size: var(--portal-hero-intro-size-condensed, .98rem);
line-height: var(--portal-hero-intro-lh-condensed, 1.46);
}
:global(body.is-glossary-portal-page.glossary-portal-hero-condensed .glossary-portal-hero__kicker){
opacity: .68;
}
/* Le more se replie dès létat condensé */
:global(body.is-glossary-portal-page.glossary-portal-hero-condensed:not(.glossary-portal-hero-expanded) .glossary-portal-hero__more){
max-height: 0;
opacity: 0;
overflow: hidden;
pointer-events: none;
}
:global(body.is-glossary-portal-page.glossary-portal-hero-condensed:not(.glossary-portal-hero-expanded) .glossary-portal-hero__toggle){
display: inline-flex;
}
/* Laccolage hero + follow narrive que quand le follow est actif */
:global(body.is-glossary-portal-page.glossary-portal-hero-condensed.glossary-portal-follow-on .glossary-portal-hero){
margin-bottom: 0;
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
}
:global(body.is-glossary-portal-page.glossary-portal-follow-on #reading-follow .reading-follow__inner){
margin-top: -1px;
border-top-left-radius: 0;
border-top-right-radius: 0;
}
:global(body.is-glossary-portal-page.glossary-portal-follow-on #reading-follow .rf-h2){
letter-spacing: -.02em;
}
:global(body.is-glossary-portal-page .glossary-portal-section__head.is-sticky),
:global(body.is-glossary-portal-page .glossary-portal-section__head[data-sticky-active="true"]){
position: static !important;
top: auto !important;
z-index: auto !important;
padding: 0 !important;
border: 0 !important;
background: transparent !important;
box-shadow: none !important;
backdrop-filter: none !important;
-webkit-backdrop-filter: none !important;
}
@media (max-width: 860px){
:global(body.is-glossary-portal-page #reading-follow),
:global(body.is-glossary-portal-page #reading-follow .reading-follow__inner){
display: none !important;
opacity: 0 !important;
visibility: hidden !important;
pointer-events: none !important;
}
:global(body.is-glossary-portal-page){
--followbar-h: 0px !important;
--sticky-offset-px: calc(var(--sticky-header-h, 0px) + var(--page-gap, 12px)) !important;
}
:global(body.is-glossary-portal-page .glossary-portal-hero){
margin-bottom: var(--portal-hero-margin-bottom, 18px);
border-radius: 20px !important;
box-shadow: none !important;
}
:global(body.is-glossary-portal-page .glossary-portal-hero__more){
max-height: none !important;
opacity: 1 !important;
overflow: visible !important;
pointer-events: auto !important;
}
:global(body.is-glossary-portal-page .glossary-portal-hero__toggle){
display: none !important;
}
}
@media (orientation: landscape) and (max-width: 920px) and (max-height: 520px){
:global(body.is-glossary-portal-page #reading-follow),
:global(body.is-glossary-portal-page #reading-follow .reading-follow__inner){
display: none !important;
opacity: 0 !important;
visibility: hidden !important;
pointer-events: none !important;
}
:global(body.is-glossary-portal-page){
--followbar-h: 0px !important;
--sticky-offset-px: calc(var(--sticky-header-h, 0px) + var(--page-gap, 12px)) !important;
}
:global(body.is-glossary-portal-page .glossary-portal-hero){
margin-bottom: var(--portal-hero-margin-bottom, 12px);
border-radius: 16px !important;
box-shadow: none !important;
}
:global(body.is-glossary-portal-page .glossary-portal-hero__more){
max-height: none !important;
opacity: 1 !important;
overflow: visible !important;
pointer-events: auto !important;
}
:global(body.is-glossary-portal-page .glossary-portal-hero__toggle){
display: none !important;
}
}
</style>

View File

@@ -0,0 +1,125 @@
---
import type { GlossaryRelationBlock } from "../lib/glossary";
import { hrefOfGlossaryEntry } from "../lib/glossary";
interface Props {
relationBlocks: GlossaryRelationBlock[];
}
const { relationBlocks = [] } = Astro.props;
const relationsHeadingId = "relations-conceptuelles";
---
{relationBlocks.length > 0 && (
<section
class="glossary-relations"
aria-labelledby={relationsHeadingId}
>
<h2 id={relationsHeadingId}>Relations conceptuelles</h2>
<div class="glossary-relations-grid">
{relationBlocks.map((block) => (
<section class={`glossary-relations-card ${block.className}`}>
<h3>{block.title}</h3>
<ul>
{block.items.map((item) => (
<li>
<a href={hrefOfGlossaryEntry(item)}>{item.data.term}</a>
<span> — {item.data.definitionShort}</span>
</li>
))}
</ul>
</section>
))}
</div>
</section>
)}
<style>
.glossary-relations{
margin-top: 22px;
padding-top: 14px;
border-top: 1px solid rgba(127,127,127,0.18);
}
.glossary-relations h2{
margin: 0 0 12px;
font-size: clamp(1.35rem, 3vw, 1.8rem);
line-height: 1.08;
}
.glossary-relations-grid{
display: grid;
grid-template-columns: repeat(auto-fit, minmax(220px, 1fr));
gap: 10px;
}
.glossary-relations-card{
border: 1px solid rgba(127,127,127,0.20);
border-radius: 14px;
padding: 12px 13px;
background: rgba(127,127,127,0.05);
}
.glossary-relations-card h3{
margin: 0 0 8px;
font-size: 14px;
line-height: 1.3;
}
.glossary-relations-card ul{
margin: 0;
padding-left: 16px;
}
.glossary-relations-card li{
margin-bottom: 7px;
font-size: 13.5px;
line-height: 1.42;
}
.glossary-relations-card li:last-child{
margin-bottom: 0;
}
.glossary-relations-card span{
opacity: .88;
}
@media (max-width: 760px){
.glossary-relations{
margin-top: 18px;
padding-top: 12px;
}
.glossary-relations h2{
margin-bottom: 10px;
font-size: 1.3rem;
}
.glossary-relations-grid{
grid-template-columns: 1fr;
gap: 8px;
}
.glossary-relations-card{
padding: 11px 11px;
border-radius: 13px;
}
.glossary-relations-card h3{
font-size: 13px;
margin-bottom: 7px;
}
.glossary-relations-card li{
font-size: 13px;
line-height: 1.38;
}
}
@media (prefers-color-scheme: dark){
.glossary-relations-card{
background: rgba(255,255,255,0.04);
}
}
</style>

View File

@@ -68,7 +68,6 @@ const { initialLevel = 1 } = Astro.props;
} catch {} } catch {}
} }
// init : storage > initialLevel
let start = clampLevel(initialLevel); let start = clampLevel(initialLevel);
try { try {
const stored = localStorage.getItem(KEY); const stored = localStorage.getItem(KEY);
@@ -77,13 +76,11 @@ const { initialLevel = 1 } = Astro.props;
applyLevel(start, { persist: false }); applyLevel(start, { persist: false });
// clicks
wrap.addEventListener("click", (ev) => { wrap.addEventListener("click", (ev) => {
const btn = ev.target?.closest?.("button[data-level]"); const btn = ev.target?.closest?.("button[data-level]");
if (!btn) return; if (!btn) return;
ev.preventDefault(); ev.preventDefault();
// ✅ crucial : on capture la position AVANT le reflow lié au changement de niveau
captureBeforeLevelSwitch(); captureBeforeLevelSwitch();
applyLevel(btn.dataset.level); applyLevel(btn.dataset.level);
}); });
@@ -95,6 +92,8 @@ const { initialLevel = 1 } = Astro.props;
display: inline-flex; display: inline-flex;
gap: 8px; gap: 8px;
align-items: center; align-items: center;
flex-wrap: wrap;
max-width: 100%;
} }
.level-btn{ .level-btn{
@@ -106,6 +105,7 @@ const { initialLevel = 1 } = Astro.props;
cursor: pointer; cursor: pointer;
user-select: none; user-select: none;
transition: filter .12s ease, transform .12s ease, background .12s ease, border-color .12s ease; transition: filter .12s ease, transform .12s ease, background .12s ease, border-color .12s ease;
white-space: nowrap;
} }
.level-btn:hover{ .level-btn:hover{
@@ -125,4 +125,21 @@ const { initialLevel = 1 } = Astro.props;
.level-btn:active{ .level-btn:active{
transform: translateY(1px); transform: translateY(1px);
} }
</style>
@media (max-width: 980px){
.level-toggle{
gap: 6px;
}
.level-btn{
padding: 5px 9px;
font-size: 12px;
}
}
@media (max-width: 760px){
.level-toggle{
display: none;
}
}
</style>

View File

@@ -3,49 +3,122 @@ const { headings } = Astro.props;
// H2/H3 seulement // H2/H3 seulement
const items = (headings || []).filter((h) => h.depth >= 2 && h.depth <= 3); const items = (headings || []).filter((h) => h.depth >= 2 && h.depth <= 3);
const tocId = `toc-local-${Math.random().toString(36).slice(2, 9)}`;
--- ---
{items.length > 0 && ( {items.length > 0 && (
<nav class="toc-local" aria-label="Dans ce chapitre"> <nav class="toc-local" aria-label="Dans ce chapitre" data-toc-local>
<div class="toc-local__title">Dans ce chapitre</div> <button
class="toc-local__head toc-local__toggle"
type="button"
aria-expanded="true"
aria-controls={tocId}
>
<span class="toc-local__title">Dans ce chapitre</span>
<span class="toc-local__chevron" aria-hidden="true">▾</span>
</button>
<ol class="toc-local__list"> <div class="toc-local__body-clip" id={tocId}>
{items.map((h) => ( <div class="toc-local__body">
<li <ol class="toc-local__list">
class={`toc-local__item d${h.depth}`} {items.map((h) => (
data-toc-item <li
data-depth={h.depth} class={`toc-local__item d${h.depth}`}
data-id={h.slug} data-toc-item
> data-depth={h.depth}
<a href={`#${h.slug}`} data-toc-link data-slug={h.slug}> data-id={h.slug}
{h.text} >
</a> <a href={`#${h.slug}`} data-toc-link data-slug={h.slug}>
</li> <span class="toc-local__mark" aria-hidden="true"></span>
))} <span class="toc-local__text">{h.text}</span>
</ol> </a>
</li>
))}
</ol>
</div>
</div>
</nav> </nav>
)} )}
<script is:inline> <script is:inline>
(() => { (() => {
function init() { function init() {
const toc = document.querySelector(".toc-local"); const toc = document.querySelector(".toc-local[data-toc-local]");
if (!toc) return; if (!toc || toc.dataset.tocReady === "1") return;
toc.dataset.tocReady = "1";
const toggle = toc.querySelector(".toc-local__toggle");
const bodyClip = toc.querySelector(".toc-local__body-clip");
const mq = window.matchMedia("(max-width: 980px)");
const KEY = `archicratie:toc-local:${window.location.pathname}`;
if (!toggle || !bodyClip) return;
const readState = () => {
try {
const v = localStorage.getItem(KEY);
if (v === "open") return true;
if (v === "closed") return false;
} catch {}
return null;
};
const writeState = (open) => {
try { localStorage.setItem(KEY, open ? "open" : "closed"); } catch {}
};
const setOpen = (open, { persist = true, emit = true } = {}) => {
const isMobile = mq.matches;
toc.classList.toggle("is-collapsed", isMobile && !open);
toggle.setAttribute("aria-expanded", open ? "true" : "false");
if (persist && isMobile) writeState(open);
if (emit && open && isMobile) {
window.dispatchEvent(new CustomEvent("archicratie:tocLocalOpen"));
}
};
const initAccordion = () => {
if (!mq.matches) {
setOpen(true, { persist: false, emit: false });
return;
}
const stored = readState();
setOpen(stored == null ? true : stored, { persist: false, emit: false });
};
toggle.addEventListener("click", () => {
const next = toggle.getAttribute("aria-expanded") !== "true";
setOpen(next);
});
if (mq.addEventListener) {
mq.addEventListener("change", initAccordion);
} else if (mq.addListener) {
mq.addListener(initAccordion);
}
const itemEls = Array.from(toc.querySelectorAll("[data-toc-item]")); const itemEls = Array.from(toc.querySelectorAll("[data-toc-item]"));
if (!itemEls.length) return; if (!itemEls.length) {
initAccordion();
return;
}
const ordered = itemEls const ordered = itemEls
.map((li) => { .map((li) => {
const a = li.querySelector("a[data-toc-link]"); const a = li.querySelector("a[data-toc-link]");
const id = li.getAttribute("data-id") || a?.dataset.slug || ""; const id = li.getAttribute("data-id") || a?.dataset.slug || "";
const depth = Number(li.getAttribute("data-depth") || "0"); const depth = Number(li.getAttribute("data-depth") || "0");
const el = id ? document.getElementById(id) : null; // span.details-anchor OU h3[id] const el = id ? document.getElementById(id) : null;
return (a && id && el) ? { id, depth, li, a, el } : null; return (a && id && el) ? { id, depth, li, a, el } : null;
}) })
.filter(Boolean); .filter(Boolean);
if (!ordered.length) return; if (!ordered.length) {
initAccordion();
return;
}
const clear = () => { const clear = () => {
for (const t of ordered) { for (const t of ordered) {
@@ -55,14 +128,29 @@ const items = (headings || []).filter((h) => h.depth >= 2 && h.depth <= 3);
}; };
const openDetailsIfNeeded = (el) => { const openDetailsIfNeeded = (el) => {
const d = el?.closest?.("details"); try {
if (d && !d.open) d.open = true; if (!el) return;
let d = el.closest?.("details") || null;
if (!d && el.classList?.contains("details-anchor")) {
const n = el.nextElementSibling;
if (n && n.tagName === "DETAILS") d = n;
}
if (!d) {
const s = el.closest?.("summary");
if (s && s.parentElement && s.parentElement.tagName === "DETAILS") d = s.parentElement;
}
if (d && d.tagName === "DETAILS" && !d.open) d.open = true;
} catch {}
}; };
let current = ""; let current = "";
const setCurrent = (id) => { const setCurrent = (id, { autoOpen = true } = {}) => {
if (!id || id === current) return; if (!id) return;
const t = ordered.find((x) => x.id === id); const t = ordered.find((x) => x.id === id);
if (!t) return; if (!t) return;
@@ -74,17 +162,21 @@ const items = (headings || []).filter((h) => h.depth >= 2 && h.depth <= 3);
t.a.setAttribute("aria-current", "true"); t.a.setAttribute("aria-current", "true");
t.li.classList.add("is-current"); t.li.classList.add("is-current");
// ✅ IMPORTANT: plus de scrollIntoView ici if (mq.matches && autoOpen && toc.classList.contains("is-collapsed")) {
// sinon ça scroll l'aside pendant le scroll du reading => TOC global “disparaît”. setOpen(true);
}
window.dispatchEvent(
new CustomEvent("archicratie:tocLocalActive", { detail: { id } })
);
}; };
const computeActive = () => { const computeActive = () => {
const visible = ordered.filter((t) => { const visible = ordered.filter((t) => {
const d = t.el.closest?.("details"); const d = t.el.closest?.("details");
if (d && !d.open) { if (d && !d.open) {
// Si l'élément est dans <summary>, il reste visible même details fermé
const inSummary = !!t.el.closest?.("summary"); const inSummary = !!t.el.closest?.("summary");
if (!inSummary) return false; if (!inSummary && !t.el.classList?.contains("details-anchor")) return false;
} }
return true; return true;
}); });
@@ -102,7 +194,7 @@ const items = (headings || []).filter((h) => h.depth >= 2 && h.depth <= 3);
} }
if (!best) best = visible[0]; if (!best) best = visible[0];
setCurrent(best.id); if (best && best.id !== current) setCurrent(best.id, { autoOpen: true });
}; };
let ticking = false; let ticking = false;
@@ -117,11 +209,14 @@ const items = (headings || []).filter((h) => h.depth >= 2 && h.depth <= 3);
const syncFromHash = () => { const syncFromHash = () => {
const id = (location.hash || "").slice(1); const id = (location.hash || "").slice(1);
if (!id) { computeActive(); return; } if (!id) {
computeActive();
return;
}
const el = document.getElementById(id); const el = document.getElementById(id);
if (el) openDetailsIfNeeded(el); if (el) openDetailsIfNeeded(el);
setCurrent(id); setCurrent(id, { autoOpen: true });
}; };
toc.addEventListener("click", (ev) => { toc.addEventListener("click", (ev) => {
@@ -133,13 +228,14 @@ const items = (headings || []).filter((h) => h.depth >= 2 && h.depth <= 3);
const el = document.getElementById(id); const el = document.getElementById(id);
if (el) openDetailsIfNeeded(el); if (el) openDetailsIfNeeded(el);
setCurrent(id); setCurrent(id, { autoOpen: true });
}); });
window.addEventListener("scroll", onScroll, { passive: true }); window.addEventListener("scroll", onScroll, { passive: true });
window.addEventListener("resize", onScroll); window.addEventListener("resize", onScroll);
window.addEventListener("hashchange", syncFromHash); window.addEventListener("hashchange", syncFromHash);
initAccordion();
syncFromHash(); syncFromHash();
onScroll(); onScroll();
} }
@@ -153,30 +249,183 @@ const items = (headings || []).filter((h) => h.depth >= 2 && h.depth <= 3);
</script> </script>
<style> <style>
.toc-local{margin-top:12px;border:1px solid rgba(127,127,127,.25);border-radius:16px;padding:12px} .toc-local{
.toc-local__title{font-size:13px;opacity:.85;margin-bottom:8px} margin-top: 12px;
border: 1px solid rgba(127,127,127,.25);
.toc-local__list{list-style:none;margin:0;padding:0} border-radius: 16px;
.toc-local__item::marker{content:""} padding: 12px;
.toc-local__item{margin:6px 0} background: rgba(127,127,127,0.03);
.toc-local__item.d3{margin-left:12px;opacity:.9}
.toc-local__item.is-current > a{
font-weight: 750;
text-decoration: underline;
} }
.toc-local a{ .toc-local__toggle{
display:inline-block; width: 100%;
max-width:100%; appearance: none;
text-decoration:none; border: 0;
background: transparent;
color: inherit;
text-align: left;
padding: 0;
cursor: pointer;
} }
.toc-local a:hover{ text-decoration: underline; }
.toc-local__head{
display: flex;
align-items: center;
justify-content: space-between;
gap: 10px;
margin-bottom: 8px;
}
.toc-local__title{
font-size: 13px;
opacity: .85;
}
.toc-local__chevron{
font-size: 12px;
opacity: .72;
transition: transform 180ms ease;
}
.toc-local__body-clip{
display: grid;
grid-template-rows: 1fr;
transition:
grid-template-rows 220ms ease,
opacity 160ms ease,
margin-top 220ms ease;
}
.toc-local__body{
min-height: 0;
overflow: hidden;
}
.toc-local__list{ .toc-local__list{
list-style: none;
margin: 0;
padding: 0;
max-height: 44vh; max-height: 44vh;
overflow: auto; overflow: auto;
padding-right: 8px; padding-right: 8px;
scrollbar-gutter: stable; scrollbar-gutter: stable;
} }
</style> .toc-local__item::marker{ content:""; }
.toc-local__item{ margin: 6px 0; }
.toc-local__item.d3{
margin-left: 14px;
opacity: .94;
}
.toc-local a{
display: grid;
grid-template-columns: auto 1fr;
gap: 8px;
align-items: start;
max-width: 100%;
text-decoration: none;
}
.toc-local a:hover{
text-decoration: none;
}
.toc-local__mark{
width: 10px;
height: 10px;
margin-top: .36em;
border-radius: 999px;
border: 1px solid rgba(127,127,127,.34);
background: transparent;
opacity: .68;
}
.toc-local__text{
line-height: 1.28;
}
.toc-local__item.is-current > a{
font-weight: 760;
}
.toc-local__item.is-current > a .toc-local__mark{
background: currentColor;
border-color: currentColor;
box-shadow: 0 0 0 3px rgba(127,127,127,.10);
opacity: 1;
}
@media (max-width: 980px){
.toc-local{
padding: 10px 12px;
border-radius: 14px;
}
.toc-local__head{
margin-bottom: 0;
min-height: 28px;
}
.toc-local__body-clip{
margin-top: 10px;
}
.toc-local.is-collapsed .toc-local__body-clip{
grid-template-rows: 0fr;
opacity: 0;
margin-top: 0;
}
.toc-local__body{
min-height: 0;
overflow: hidden;
transition: opacity 180ms ease;
}
.toc-local.is-collapsed .toc-local__body{
opacity: 0;
}
.toc-local.is-collapsed .toc-local__chevron{
transform: rotate(-90deg);
}
.toc-local__title{
font-size: 13px;
}
.toc-local__list{
max-height: min(42vh, 360px);
padding-right: 4px;
}
.toc-local__item{
margin: 5px 0;
}
.toc-local__item.d2 > a .toc-local__text{
font-size: 12.9px;
line-height: 1.24;
font-weight: 680;
}
.toc-local__item.d3{
margin-left: 12px;
}
.toc-local__item.d3 > a .toc-local__text{
font-size: 12.1px;
line-height: 1.22;
opacity: .95;
}
.toc-local__item.d3 > a .toc-local__mark{
width: 8px;
height: 8px;
margin-top: .42em;
opacity: .55;
}
}
</style>

View File

@@ -25,12 +25,12 @@
{/* ✅ actions références en haut (niveau 2 uniquement) */} {/* ✅ actions références en haut (niveau 2 uniquement) */}
<div class="panel-top-actions level-2" aria-label="Actions références"> <div class="panel-top-actions level-2" aria-label="Actions références">
<div class="panel-actions"> <div class="panel-actions">
<button class="panel-btn panel-btn--primary" id="panel-ref-submit" type="button"> <button class="panel-btn panel-btn--primary" id="panel-ref-submit" type="button">
Soumettre une référence (Gitea) Soumettre une référence (Gitea)
</button> </button>
</div> </div>
<div class="panel-msg" id="panel-ref-msg" hidden></div> <div class="panel-msg" id="panel-ref-msg" hidden></div>
</div> </div>
<section class="panel-block level-2" aria-label="Références et auteurs"> <section class="panel-block level-2" aria-label="Références et auteurs">
@@ -60,7 +60,7 @@
</section> </section>
</div> </div>
{/* ✅ Lightbox media (pop-up au-dessus du panel) */} {/* ✅ Lightbox media (plein écran) */}
<div class="panel-lightbox" id="panel-lightbox" hidden aria-hidden="true"> <div class="panel-lightbox" id="panel-lightbox" hidden aria-hidden="true">
<div class="panel-lightbox__overlay" data-close="1"></div> <div class="panel-lightbox__overlay" data-close="1"></div>
<div class="panel-lightbox__dialog" role="dialog" aria-modal="true" aria-label="Aperçu du média"> <div class="panel-lightbox__dialog" role="dialog" aria-modal="true" aria-label="Aperçu du média">
@@ -93,6 +93,9 @@
const btnMediaSubmit = root.querySelector("#panel-media-submit"); const btnMediaSubmit = root.querySelector("#panel-media-submit");
const msgMedia = root.querySelector("#panel-media-msg"); const msgMedia = root.querySelector("#panel-media-msg");
const btnRefSubmit = root.querySelector("#panel-ref-submit");
const msgRef = root.querySelector("#panel-ref-msg");
const taComment = root.querySelector("#panel-comment-text"); const taComment = root.querySelector("#panel-comment-text");
const btnSend = root.querySelector("#panel-comment-send"); const btnSend = root.querySelector("#panel-comment-send");
const msgComment = root.querySelector("#panel-comment-msg"); const msgComment = root.querySelector("#panel-comment-msg");
@@ -101,9 +104,6 @@
const lbContent = root.querySelector("#panel-lightbox-content"); const lbContent = root.querySelector("#panel-lightbox-content");
const lbCaption = root.querySelector("#panel-lightbox-caption"); const lbCaption = root.querySelector("#panel-lightbox-caption");
const btnRefSubmit = root.querySelector("#panel-ref-submit");
const msgRef = root.querySelector("#panel-ref-msg");
const docTitle = document.body?.dataset?.docTitle || document.title || "Archicratie"; const docTitle = document.body?.dataset?.docTitle || document.title || "Archicratie";
const docVersion = document.body?.dataset?.docVersion || ""; const docVersion = document.body?.dataset?.docVersion || "";
@@ -114,6 +114,16 @@
let currentParaId = ""; let currentParaId = "";
let mediaShowAll = (localStorage.getItem("archicratie:panel:mediaAll") === "1"); let mediaShowAll = (localStorage.getItem("archicratie:panel:mediaAll") === "1");
// ===== cosmetics: micro flash “update” =====
let _flashT = 0;
function flashUpdate(){
try {
root.classList.add("is-updating");
if (_flashT) clearTimeout(_flashT);
_flashT = setTimeout(() => root.classList.remove("is-updating"), 180);
} catch {}
}
// ===== globals ===== // ===== globals =====
function getG() { function getG() {
return window.__archiGitea || { ready: false, base: "", owner: "", repo: "" }; return window.__archiGitea || { ready: false, base: "", owner: "", repo: "" };
@@ -121,9 +131,6 @@
function getAuthInfoP() { function getAuthInfoP() {
return window.__archiAuthInfoP || Promise.resolve({ ok: false, groups: [] }); return window.__archiAuthInfoP || Promise.resolve({ ok: false, groups: [] });
} }
function isDev() {
return Boolean((window.__archiFlags && window.__archiFlags.dev) || /^(localhost|127\.0\.0\.1|\[::1\])$/i.test(location.hostname));
}
const access = { ready: false, canUsers: false }; const access = { ready: false, canUsers: false };
@@ -137,23 +144,20 @@
return Array.isArray(groups) && groups.some((x) => String(x).toLowerCase() === gg); return Array.isArray(groups) && groups.some((x) => String(x).toLowerCase() === gg);
} }
// ✅ règle mission : readers + editors peuvent soumettre médias + commentaires // ✅ readers + editors peuvent soumettre médias + commentaires + refs
// ✅ dev fallback : si /_auth/whoami nexiste pas, on autorise pour tester
getAuthInfoP().then((info) => { getAuthInfoP().then((info) => {
const groups = Array.isArray(info?.groups) ? info.groups : []; const groups = Array.isArray(info?.groups) ? info.groups : [];
const canReaders = inGroup(groups, "readers"); const canReaders = inGroup(groups, "readers");
const canEditors = inGroup(groups, "editors"); const canEditors = inGroup(groups, "editors");
access.canUsers = Boolean((info?.ok && (canReaders || canEditors)) || (isDev() && !info?.ok)); const whoamiSkipped = Boolean(window.__archiFlags && window.__archiFlags.whoamiSkipped);
access.canUsers = Boolean((info?.ok && (canReaders || canEditors)) || whoamiSkipped);
access.ready = true; access.ready = true;
if (btnMediaSubmit) btnMediaSubmit.disabled = !access.canUsers; if (btnMediaSubmit) btnMediaSubmit.disabled = !access.canUsers;
if (btnSend) btnSend.disabled = !access.canUsers; if (btnSend) btnSend.disabled = !access.canUsers;
if (btnRefSubmit) btnRefSubmit.disabled = !access.canUsers; if (btnRefSubmit) btnRefSubmit.disabled = !access.canUsers;
// si pas d'accès, on informe (soft)
if (!access.canUsers) { if (!access.canUsers) {
if (msgHead) { if (msgHead) {
msgHead.hidden = false; msgHead.hidden = false;
@@ -162,12 +166,12 @@
} }
} }
}).catch(() => { }).catch(() => {
// fallback dev
access.ready = true; access.ready = true;
if (isDev()) { if (Boolean(window.__archiFlags && window.__archiFlags.whoamiSkipped)) {
access.canUsers = true; access.canUsers = true;
if (btnMediaSubmit) btnMediaSubmit.disabled = false; if (btnMediaSubmit) btnMediaSubmit.disabled = false;
if (btnSend) btnSend.disabled = false; if (btnSend) btnSend.disabled = false;
if (btnRefSubmit) btnRefSubmit.disabled = false;
} }
}); });
@@ -209,8 +213,11 @@
async function loadIndex() { async function loadIndex() {
if (_idxP) return _idxP; if (_idxP) return _idxP;
_idxP = (async () => { _idxP = (async () => {
const res = await fetch("/annotations-index.json?_=" + Date.now(), { cache: "no-store" }).catch(() => null); try {
if (res && res.ok) return await res.json(); const res = await fetch("/annotations-index.json?_=" + Date.now(), { cache: "no-store" });
if (res && res.ok) return await res.json();
} catch {}
_idxP = null;
return null; return null;
})(); })();
return _idxP; return _idxP;
@@ -251,24 +258,22 @@
return issue.toString(); return issue.toString();
} }
// Ouvre un nouvel onglet UNE SEULE FOIS (évite le double-open Safari/Firefox + noopener). function openNewTab(url) {
function openNewTab(url) { try {
try { const a = document.createElement("a");
const a = document.createElement("a"); a.href = url;
a.href = url; a.target = "_blank";
a.target = "_blank"; a.rel = "noopener noreferrer";
a.rel = "noopener noreferrer"; a.style.display = "none";
a.style.display = "none"; document.body.appendChild(a);
document.body.appendChild(a); a.click();
a.click(); a.remove();
a.remove(); return true;
return true; // on ne peut pas détecter proprement un blocage sans retomber dans le double-open } catch {
} catch { return false;
return false;
}
} }
}
// ====== GARDES ANTI-DOUBLONS ======
const _openStamp = new Map(); const _openStamp = new Map();
function openOnce(key, fn) { function openOnce(key, fn) {
const now = Date.now(); const now = Date.now();
@@ -297,13 +302,21 @@
} }
// ===== Lightbox ===== // ===== Lightbox =====
function lockScroll(on) {
try {
document.documentElement.classList.toggle("archi-lb-open", !!on);
} catch {}
}
function closeLightbox() { function closeLightbox() {
if (!lb) return; if (!lb) return;
lb.hidden = true; lb.hidden = true;
lb.setAttribute("aria-hidden", "true"); lb.setAttribute("aria-hidden", "true");
if (lbContent) clear(lbContent); if (lbContent) clear(lbContent);
if (lbCaption) { lbCaption.hidden = true; lbCaption.textContent = ""; } if (lbCaption) { lbCaption.hidden = true; lbCaption.textContent = ""; }
lockScroll(false);
} }
function openLightbox({ type, src, caption }) { function openLightbox({ type, src, caption }) {
if (!lb || !lbContent) return; if (!lb || !lbContent) return;
clear(lbContent); clear(lbContent);
@@ -342,6 +355,7 @@
else { lbCaption.hidden = true; lbCaption.textContent = ""; } else { lbCaption.hidden = true; lbCaption.textContent = ""; }
} }
lockScroll(true);
lb.hidden = false; lb.hidden = false;
lb.setAttribute("aria-hidden", "false"); lb.setAttribute("aria-hidden", "false");
} }
@@ -359,7 +373,6 @@
}); });
} }
// ===== Renders =====
function renderLevel2(data) { function renderLevel2(data) {
clear(elL2); clear(elL2);
if (!elL2) return; if (!elL2) return;
@@ -369,7 +382,7 @@
return; return;
} }
if (Array.isArray(data.authors) && data.authors.length) { if (Array.isArray(data.mobilizedAuthors) && data.mobilizedAuthors.length) {
const h = document.createElement("h3"); const h = document.createElement("h3");
h.className = "panel-subtitle"; h.className = "panel-subtitle";
h.textContent = "Auteurs"; h.textContent = "Auteurs";
@@ -377,7 +390,7 @@
const ul = document.createElement("ul"); const ul = document.createElement("ul");
ul.className = "panel-list"; ul.className = "panel-list";
for (const a of data.authors) { for (const a of data.mobilizedAuthors) {
const li = document.createElement("li"); const li = document.createElement("li");
li.textContent = esc(a); li.textContent = esc(a);
ul.appendChild(li); ul.appendChild(li);
@@ -559,11 +572,22 @@
async function updatePanel(paraId) { async function updatePanel(paraId) {
currentParaId = paraId || currentParaId || ""; currentParaId = paraId || currentParaId || "";
if (elId) elId.textContent = currentParaId || "—"; if (elId) elId.textContent = currentParaId || "—";
flashUpdate();
hideMsg(msgHead); hideMsg(msgHead);
hideMsg(msgMedia); hideMsg(msgMedia);
hideMsg(msgComment); hideMsg(msgComment);
hideMsg(msgRef);
const idx = await loadIndex(); const idx = await loadIndex();
if (!idx && msgHead && msgHead.hidden) {
msgHead.hidden = false;
msgHead.textContent = "Index annotations indisponible (annotations-index.json).";
msgHead.dataset.kind = "info";
}
const data = idx?.pages?.[pageKey]?.paras?.[currentParaId] || null; const data = idx?.pages?.[pageKey]?.paras?.[currentParaId] || null;
renderLevel2(data); renderLevel2(data);
@@ -571,7 +595,6 @@
renderLevel4(data); renderLevel4(data);
} }
// ===== media "voir tous" =====
if (btnMediaAll) { if (btnMediaAll) {
bindClickOnce(btnMediaAll, (ev) => { bindClickOnce(btnMediaAll, (ev) => {
ev.preventDefault(); ev.preventDefault();
@@ -583,7 +606,6 @@
btnMediaAll.textContent = mediaShowAll ? "Réduire la liste" : "Voir tous les éléments"; btnMediaAll.textContent = mediaShowAll ? "Réduire la liste" : "Voir tous les éléments";
} }
// ===== media submit (readers + editors) =====
if (btnMediaSubmit) { if (btnMediaSubmit) {
bindClickOnce(btnMediaSubmit, (ev) => { bindClickOnce(btnMediaSubmit, (ev) => {
ev.preventDefault(); ev.preventDefault();
@@ -626,27 +648,26 @@
}); });
} }
// ===== référence submit (readers + editors) ===== if (btnRefSubmit) {
if (btnRefSubmit) {
bindClickOnce(btnRefSubmit, (ev) => { bindClickOnce(btnRefSubmit, (ev) => {
ev.preventDefault(); ev.preventDefault();
hideMsg(msgRef); hideMsg(msgRef);
if (guardEventOnce(ev, "gitea_open_ref")) return; if (guardEventOnce(ev, "gitea_open_ref")) return;
if (!currentParaId) return showMsg(msgRef, "Choisis dabord un paragraphe (scroll / survol).", "warn"); if (!currentParaId) return showMsg(msgRef, "Choisis dabord un paragraphe (scroll / survol).", "warn");
if (!getG().ready) return showMsg(msgRef, "Gitea non configuré (PUBLIC_GITEA_*).", "error"); if (!getG().ready) return showMsg(msgRef, "Gitea non configuré (PUBLIC_GITEA_*).", "error");
if (btnRefSubmit.disabled) return showMsg(msgRef, "Connexion requise (readers/editors).", "error"); if (btnRefSubmit.disabled) return showMsg(msgRef, "Connexion requise (readers/editors).", "error");
const pageUrl = new URL(location.href); const pageUrl = new URL(location.href);
pageUrl.search = ""; pageUrl.search = "";
pageUrl.hash = currentParaId; pageUrl.hash = currentParaId;
const paraTxt = getParaText(currentParaId); const paraTxt = getParaText(currentParaId);
const excerpt = paraTxt.length > FULL_TEXT_SOFT_LIMIT ? (paraTxt.slice(0, FULL_TEXT_SOFT_LIMIT) + "…") : paraTxt; const excerpt = paraTxt.length > FULL_TEXT_SOFT_LIMIT ? (paraTxt.slice(0, FULL_TEXT_SOFT_LIMIT) + "…") : paraTxt;
const title = `[Reference] ${currentParaId} — ${docTitle}`; const title = `[Reference] ${currentParaId} — ${docTitle}`;
const body = [ const body = [
`Chemin: ${location.pathname}`, `Chemin: ${location.pathname}`,
`URL: ${pageUrl.toString()}`, `URL: ${pageUrl.toString()}`,
`Ancre: #${currentParaId}`, `Ancre: #${currentParaId}`,
@@ -664,18 +685,16 @@
``, ``,
`---`, `---`,
`Note: issue générée depuis le site (pré-remplissage).`, `Note: issue générée depuis le site (pré-remplissage).`,
].join("\n"); ].join("\n");
const url = buildIssueURL({ title, body }); const url = buildIssueURL({ title, body });
if (!url) return showMsg(msgRef, "Impossible de générer lissue.", "error"); if (!url) return showMsg(msgRef, "Impossible de générer lissue.", "error");
const ok = openOnce(`ref:${currentParaId}`, () => openNewTab(url)); const ok = openOnce(`ref:${currentParaId}`, () => openNewTab(url));
if (!ok) showMsg(msgRef, "Si rien ne souvre : autorise les popups pour ce site.", "error"); if (!ok) showMsg(msgRef, "Si rien ne souvre : autorise les popups pour ce site.", "error");
}); });
} }
// ===== commentaire (readers + editors) =====
if (btnSend) { if (btnSend) {
bindClickOnce(btnSend, (ev) => { bindClickOnce(btnSend, (ev) => {
ev.preventDefault(); ev.preventDefault();
@@ -727,60 +746,31 @@
}); });
} }
// ===== wiring: para courant (aligné sur le paragraphe sous le reading-follow) ===== // ===== wiring: para courant (SOURCE OF TRUTH = EditionLayout) =====
function isPara(el) { function onCurrentPara(ev) {
return Boolean(el && el.nodeType === 1 && el.matches && el.matches('.reading p[id^="p-"]')); try {
const id = ev?.detail?.id ? String(ev.detail.id) : "";
if (!id || !/^p-\d+-/i.test(id)) return;
if (id === currentParaId) return;
updatePanel(id);
} catch {}
} }
window.addEventListener("archicratie:currentPara", onCurrentPara);
function pickParaAtY(y) { const initial = String(location.hash || "").replace(/^#/, "").trim();
const x = Math.max(0, Math.round(window.innerWidth * 0.5));
const candidates = [
document.elementFromPoint(x, y),
document.elementFromPoint(Math.min(window.innerWidth - 1, x + 60), y),
document.elementFromPoint(Math.max(0, x - 60), y),
].filter(Boolean);
for (const c of candidates) { if (/^p-\d+-/i.test(initial)) {
if (isPara(c)) return c; updatePanel(initial);
const p = c.closest ? c.closest('.reading p[id^="p-"]') : null; } else if (window.__archiCurrentParaId && /^p-\d+-/i.test(String(window.__archiCurrentParaId))) {
if (isPara(p)) return p; updatePanel(String(window.__archiCurrentParaId));
} } else {
return null; setTimeout(() => {
try {
const id = String(window.__archiCurrentParaId || "").trim();
if (/^p-\d+-/i.test(id)) updatePanel(id);
} catch {}
}, 0);
} }
let _lastPicked = "";
function syncFromFollowLine() {
const off = Number(document.documentElement.style.getPropertyValue("--sticky-offset-px")) || 0;
const y = Math.round(off + 8);
const p = pickParaAtY(y);
if (!p || !p.id) return;
if (p.id === _lastPicked) return;
_lastPicked = p.id;
// met à jour l'app global (EditionLayout écoute déjà currentPara)
try { window.dispatchEvent(new CustomEvent("archicratie:currentPara", { detail: { id: p.id } })); } catch {}
// et met à jour le panel immédiatement (sans attendre)
updatePanel(p.id);
}
let ticking = false;
function onScroll() {
if (ticking) return;
ticking = true;
requestAnimationFrame(() => {
ticking = false;
syncFromFollowLine();
});
}
window.addEventListener("scroll", onScroll, { passive: true });
window.addEventListener("resize", onScroll);
// Initial: hash > sinon calc
const initial = String(location.hash || "").replace(/^#/, "");
if (/^p-\d+-/i.test(initial)) updatePanel(initial);
else setTimeout(() => { try { syncFromFollowLine(); } catch {} }, 0);
})(); })();
</script> </script>
@@ -793,6 +783,8 @@
position: sticky; position: sticky;
top: calc(var(--sticky-header-h) + var(--page-gap)); top: calc(var(--sticky-header-h) + var(--page-gap));
align-self: start; align-self: start;
--thumb: 92px; /* ✅ taille des vignettes (80110 selon goût) */
} }
:global(body[data-reading-level="3"]) .page-panel{ :global(body[data-reading-level="3"]) .page-panel{
@@ -910,28 +902,33 @@
/* actions médias en haut */ /* actions médias en haut */
.panel-top-actions{ margin-top: 8px; } .panel-top-actions{ margin-top: 8px; }
/* ===== media thumbnails (150x150) ===== */ /* ===== media thumbnails (plus petits + plus denses) ===== */
.panel-media-grid{ .panel-media-grid{
display: grid; display: grid;
grid-template-columns: repeat(auto-fill, minmax(150px, 1fr)); grid-template-columns: repeat(auto-fill, minmax(var(--thumb), 1fr));
gap: 10px; gap: 10px;
} }
.panel-media-tile{ .panel-media-tile{
width: 150px; width: 100%;
max-width: 100%;
border: 1px solid rgba(127,127,127,.20); border: 1px solid rgba(127,127,127,.20);
border-radius: 14px; border-radius: 14px;
padding: 8px; padding: 8px;
background: rgba(127,127,127,0.04); background: rgba(127,127,127,0.04);
cursor: pointer; cursor: pointer;
text-align: left; text-align: left;
transition: transform 120ms ease, background 120ms ease, border-color 120ms ease;
}
.panel-media-tile:hover{
transform: translateY(-1px);
background: rgba(127,127,127,0.07);
border-color: rgba(127,127,127,.32);
} }
.panel-media-tile img{ .panel-media-tile img{
width: 150px; width: 100%;
height: 150px; height: var(--thumb);
max-width: 100%;
object-fit: cover; object-fit: cover;
display: block; display: block;
border-radius: 10px; border-radius: 10px;
@@ -939,8 +936,8 @@
} }
.panel-media-ph{ .panel-media-ph{
width: 150px; width: 100%;
height: 150px; height: var(--thumb);
border-radius: 10px; border-radius: 10px;
display: grid; display: grid;
place-items: center; place-items: center;
@@ -983,7 +980,11 @@
resize: vertical; resize: vertical;
} }
/* ===== Lightbox ===== */ /* ===== Lightbox (plein écran “cinéma”) ===== */
:global(html.archi-lb-open){
overflow: hidden; /* ✅ empêche le scroll derrière */
}
.panel-lightbox{ .panel-lightbox{
position: fixed; position: fixed;
inset: 0; inset: 0;
@@ -993,58 +994,66 @@
.panel-lightbox__overlay{ .panel-lightbox__overlay{
position: absolute; position: absolute;
inset: 0; inset: 0;
background: rgba(0,0,0,0.80); background: rgba(0,0,0,0.84);
backdrop-filter: blur(6px); backdrop-filter: blur(10px);
-webkit-backdrop-filter: blur(6px); -webkit-backdrop-filter: blur(10px);
} }
.panel-lightbox__dialog{ .panel-lightbox__dialog{
position: absolute; position: absolute;
right: 24px; left: 50%;
top: calc(var(--sticky-header-h) + 16px); top: 50%;
width: min(520px, calc(100vw - 48px)); transform: translate(-50%, -50%);
max-height: calc(100vh - (var(--sticky-header-h) + 32px));
width: min(1100px, 92vw);
max-height: 92vh;
overflow: auto; overflow: auto;
border: 1px solid rgba(127,127,127,0.22); border: 1px solid rgba(255,255,255,0.14);
border-radius: 16px; border-radius: 18px;
background: rgba(255,255,255,0.10);
backdrop-filter: blur(10px);
-webkit-backdrop-filter: blur(10px);
padding: 12px;
}
@media (prefers-color-scheme: dark){ background: rgba(20,20,20,0.55);
.panel-lightbox__dialog{ backdrop-filter: blur(14px);
background: rgba(0,0,0,0.28); -webkit-backdrop-filter: blur(14px);
}
padding: 16px;
box-shadow: 0 24px 70px rgba(0,0,0,0.55);
} }
.panel-lightbox__close{ .panel-lightbox__close{
position: sticky; position: absolute;
top: 0; top: 12px;
margin-left: auto; right: 12px;
display: inline-flex; display: inline-flex;
align-items: center; align-items: center;
justify-content: center; justify-content: center;
width: 34px;
height: 30px; width: 44px;
border-radius: 10px; height: 40px;
border: 1px solid rgba(127,127,127,0.35);
background: rgba(127,127,127,0.10); border-radius: 14px;
border: 1px solid rgba(255,255,255,0.22);
background: rgba(255,255,255,0.10);
cursor: pointer; cursor: pointer;
font-size: 18px; font-size: 22px;
font-weight: 900; font-weight: 900;
} }
.panel-lightbox__content{
margin-top: 36px;
}
.panel-lightbox__content img, .panel-lightbox__content img,
.panel-lightbox__content video{ .panel-lightbox__content video{
display: block; display: block;
width: 100%; width: 100%;
height: auto; max-height: calc(92vh - 160px);
max-width: 1400px; object-fit: contain;
margin: 0 auto;
border-radius: 12px; background: rgba(0,0,0,0.22);
border-radius: 14px;
} }
.panel-lightbox__content audio{ .panel-lightbox__content audio{
@@ -1052,13 +1061,14 @@
} }
.panel-lightbox__caption{ .panel-lightbox__caption{
margin-top: 10px; margin-top: 12px;
font-size: 12px; font-size: 12px;
font-weight: 800; font-weight: 800;
opacity: .92; opacity: .92;
color: rgba(255,255,255,0.92);
} }
@media (max-width: 1100px){ @media (max-width: 1100px){
.page-panel{ display: none; } .page-panel{ display: none; }
} }
</style> </style>

View File

@@ -1,11 +1,17 @@
<nav class="site-nav" aria-label="Navigation principale"> <nav class="site-nav" aria-label="Navigation principale">
<a href="/">Accueil</a><span aria-hidden="true"> · </span>
<a href="/editions/">Carte des œuvres</a><span aria-hidden="true"> · </span> <a href="/">Accueil</a>
<a href="/methode/">Méthode</a><span aria-hidden="true"> · </span> <span aria-hidden="true"> · </span>
<a href="/recherche/">Recherche</a><span aria-hidden="true"> · </span>
<a href="/archicrat-ia/">Essai-thèse</a><span aria-hidden="true"> · </span> <a href="/archicrat-ia/">Essai-thèse</a>
<a href="/traite/">Traité</a><span aria-hidden="true"> · </span> <span aria-hidden="true"> · </span>
<a href="/ia/">Cas IA</a><span aria-hidden="true"> · </span>
<a href="/glossaire/">Glossaire</a><span aria-hidden="true"> · </span> <a href="/cas-ia/">Cas IA</a>
<a href="/atlas/">Atlas</a> <span aria-hidden="true"> · </span>
</nav>
<a href="/glossaire/">Glossaire</a>
<span aria-hidden="true"> · </span>
<a href="/recherche/">Recherche</a>
</nav>

112
src/content.config.ts Normal file
View File

@@ -0,0 +1,112 @@
import { defineCollection, z } from "astro:content";
const linkSchema = z.object({
type: z.enum(["definition", "appui", "transposition"]),
target: z.string().min(1),
note: z.string().optional()
});
const baseTextSchema = z.object({
title: z.string().min(1),
level: z.union([z.literal(1), z.literal(2), z.literal(3)]).default(1),
version: z.string().min(1),
concepts: z.array(z.string().min(1)).default([]),
links: z.array(linkSchema).default([]),
order: z.number().int().nonnegative().optional(),
summary: z.string().optional()
});
// Éditions (séparation stricte : edition + status verrouillés par collection)
const casIa = defineCollection({
type: "content",
schema: baseTextSchema.extend({
edition: z.literal("cas-ia"),
status: z.literal("application")
})
});
const commencer = defineCollection({
type: "content",
schema: baseTextSchema.extend({
edition: z.literal("commencer"),
status: z.union([z.literal("presentation"), z.literal("draft")])
})
});
// ✅ NOUVELLE collection : archicrat-ia (Essai-thèse)
// NOTE : on accepte temporairement edition/status "archicratie/modele_sociopolitique"
// si tes MDX nont pas encore été normalisés.
// Quand tu voudras "strict", on passera à edition="archicrat-ia" status="essai_these"
// + update frontmatter des 7 fichiers.
const archicratIa = defineCollection({
type: "content",
schema: baseTextSchema.extend({
edition: z.union([z.literal("archicrat-ia"), z.literal("archicratie")]),
status: z.union([z.literal("essai_these"), z.literal("modele_sociopolitique")])
})
});
// Glossaire (référentiel terminologique)
const glossaire = defineCollection({
type: "content",
schema: z.object({
title: z.string().min(1),
term: z.string().min(1),
aliases: z.array(z.string().min(1)).default([]),
urlAliases: z
.array(z.string().regex(/^[a-z0-9]+(?:-[a-z0-9]+)*$/))
.default([]),
mobilizedAuthors: z.array(z.string().min(1)).default([]),
comparisonTraditions: z.array(z.string().min(1)).default([]),
edition: z.literal("glossaire"),
status: z.literal("referentiel"),
version: z.string().min(1),
definitionShort: z.string().min(1),
concepts: z.array(z.string().min(1)).default([]),
links: z.array(linkSchema).default([]),
kind: z.enum([
"concept",
"topologie",
"diagnostic",
"verbe",
"paradigme",
"doctrine",
"dispositif",
"figure",
"qualification",
"epistemologie",
]),
family: z.enum([
"concept-fondamental",
"scene",
"dynamique",
"pathologie",
"topologie",
"meta-regime",
"paradigme",
"doctrine",
"verbe",
"dispositif-ia",
"tension-irreductible",
"figure",
"qualification",
"epistemologie",
]
)
.optional(),
domain: z.enum(["transversal", "theorie", "cas-ia"]),
level: z.enum(["fondamental", "intermediaire", "avance"]),
related: z.array(z.string().min(1)).default([]),
opposedTo: z.array(z.string().min(1)).default([]),
seeAlso: z.array(z.string().min(1)).default([])
})
});
export const collections = {
commencer,
"archicrat-ia": archicratIa,
"cas-ia": casIa,
glossaire,
};

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More