Sprint 1 : scaffolding complet de Glibredecision

Plateforme de decisions collectives pour Duniter/G1.
Backend FastAPI async + PostgreSQL (14 tables, 8 routers, 6 services,
moteur de vote avec formule d'inertie WoT/Smith/TechComm).
Frontend Nuxt 4 + Nuxt UI v3 + Pinia (9 pages, 5 stores).
Infrastructure Docker + Woodpecker CI + Traefik.
Documentation technique et utilisateur (15 fichiers).
Seed : Licence G1, Engagement Forgeron v2.0.0, 4 protocoles de vote.
30 tests unitaires (formules, mode params, vote nuance) -- tous verts.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Yvv
2026-02-28 12:46:11 +01:00
commit 25437f24e3
100 changed files with 10236 additions and 0 deletions

View File

@@ -0,0 +1,123 @@
"""Sanctuary service: immutable archival to IPFS + on-chain hash.
The sanctuary is the immutable layer of Glibredecision. Every adopted
document version, decision result, or vote tally is hashed (SHA-256),
stored on IPFS, and anchored on-chain via system.remark.
"""
from __future__ import annotations
import hashlib
import json
import uuid
from datetime import datetime, timezone
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.sanctuary import SanctuaryEntry
async def archive_to_sanctuary(
entry_type: str,
reference_id: uuid.UUID,
content: str,
title: str,
db: AsyncSession,
) -> SanctuaryEntry:
"""Hash content and create a sanctuary entry.
Parameters
----------
entry_type:
Type of the archived entity (``"document"``, ``"decision"``,
``"vote_result"``).
reference_id:
UUID of the source entity (document, decision, or vote session).
content:
The full text content to archive and hash.
title:
Human-readable title for the archive entry.
db:
Async database session.
Returns
-------
SanctuaryEntry
The newly created sanctuary entry with content_hash set.
"""
# Compute SHA-256 hash of the content
content_hash = hashlib.sha256(content.encode("utf-8")).hexdigest()
# Build metadata
metadata = {
"archived_at": datetime.now(timezone.utc).isoformat(),
"entry_type": entry_type,
"content_length": len(content),
}
entry = SanctuaryEntry(
entry_type=entry_type,
reference_id=reference_id,
title=title,
content_hash=content_hash,
metadata_json=json.dumps(metadata, ensure_ascii=False),
)
# TODO: Upload content to IPFS via kubo HTTP API
# ipfs_cid = await _upload_to_ipfs(content)
# entry.ipfs_cid = ipfs_cid
# TODO: Anchor hash on-chain via system.remark
# tx_hash, block_number = await _anchor_on_chain(content_hash)
# entry.chain_tx_hash = tx_hash
# entry.chain_block = block_number
db.add(entry)
await db.commit()
await db.refresh(entry)
return entry
async def _upload_to_ipfs(content: str) -> str:
"""Upload content to IPFS via kubo HTTP API.
TODO: Implement using httpx against settings.IPFS_API_URL.
Example::
import httpx
from app.config import settings
async with httpx.AsyncClient() as client:
response = await client.post(
f"{settings.IPFS_API_URL}/api/v0/add",
files={"file": ("content.txt", content.encode("utf-8"))},
)
response.raise_for_status()
return response.json()["Hash"]
"""
raise NotImplementedError("IPFS upload pas encore implemente")
async def _anchor_on_chain(content_hash: str) -> tuple[str, int]:
"""Anchor a content hash on-chain via system.remark.
TODO: Implement using substrate-interface.
Example::
from substrateinterface import SubstrateInterface
from app.config import settings
substrate = SubstrateInterface(url=settings.DUNITER_RPC_URL)
call = substrate.compose_call(
call_module="System",
call_function="remark",
call_params={"remark": f"glibredecision:sanctuary:{content_hash}"},
)
extrinsic = substrate.create_signed_extrinsic(call=call, keypair=keypair)
receipt = substrate.submit_extrinsic(extrinsic, wait_for_inclusion=True)
return receipt.extrinsic_hash, receipt.block_number
"""
raise NotImplementedError("Ancrage on-chain pas encore implemente")