Sprint 2 : moteur de documents + sanctuaire

Backend:
- CRUD complet documents/items/versions (update, delete, accept, reject, reorder)
- Service IPFS (upload/retrieve/pin via kubo HTTP API)
- Service sanctuaire : pipeline SHA-256 + IPFS + on-chain (system.remark)
- Verification integrite des entrees sanctuaire
- Recherche par reference (document -> entrees sanctuaire)
- Serialisation deterministe des documents pour archivage
- 14 tests unitaires supplementaires (document service)

Frontend:
- 9 composants : StatusBadge, MarkdownRenderer, DiffView, ItemCard,
  ItemVersionDiff, DocumentList, SanctuaryEntry, IPFSLink, ChainAnchor
- Page detail item avec historique des versions et diff
- Page detail sanctuaire avec verification integrite
- Modal de creation de document + proposition de version
- Archivage document vers sanctuaire depuis la page detail

Documentation:
- API reference mise a jour (9 nouveaux endpoints)
- Guides utilisateur documents et sanctuaire enrichis

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Yvv
2026-02-28 13:08:48 +01:00
parent 25437f24e3
commit 2bdc731639
26 changed files with 3452 additions and 397 deletions

View File

@@ -3,27 +3,33 @@
from __future__ import annotations
import difflib
import logging
import uuid
from fastapi import APIRouter, Depends, HTTPException, Query, status
from sqlalchemy import func, select
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.orm import selectinload
from app.database import get_db
from app.models.document import Document, DocumentItem, ItemVersion
from app.models.user import DuniterIdentity
from app.schemas.document import (
DocumentCreate,
DocumentFullOut,
DocumentItemCreate,
DocumentItemOut,
DocumentItemUpdate,
DocumentOut,
DocumentUpdate,
ItemReorderRequest,
ItemVersionCreate,
ItemVersionOut,
)
from app.services import document_service
from app.services.auth_service import get_current_identity
logger = logging.getLogger(__name__)
router = APIRouter()
@@ -208,6 +214,29 @@ async def list_items(
return [DocumentItemOut.model_validate(item) for item in items]
# NOTE: reorder must be declared BEFORE /{slug}/items/{item_id} routes
# to avoid "reorder" being parsed as a UUID path parameter.
@router.put("/{slug}/items/reorder", response_model=list[DocumentItemOut])
async def reorder_items(
slug: str,
payload: ItemReorderRequest,
db: AsyncSession = Depends(get_db),
identity: DuniterIdentity = Depends(get_current_identity),
) -> list[DocumentItemOut]:
"""Reorder items in a document by updating their sort_order values."""
doc = await _get_document_by_slug(db, slug)
items = await document_service.reorder_items(
doc.id,
[(entry.item_id, entry.sort_order) for entry in payload.items],
db,
)
return [DocumentItemOut.model_validate(item) for item in items]
@router.get("/{slug}/items/{item_id}", response_model=DocumentItemOut)
async def get_item(
slug: str,
@@ -260,3 +289,179 @@ async def propose_version(
await db.refresh(version)
return ItemVersionOut.model_validate(version)
# ── Item update & delete ───────────────────────────────────────────────────
@router.put("/{slug}/items/{item_id}", response_model=DocumentItemOut)
async def update_item(
slug: str,
item_id: uuid.UUID,
payload: DocumentItemUpdate,
db: AsyncSession = Depends(get_db),
identity: DuniterIdentity = Depends(get_current_identity),
) -> DocumentItemOut:
"""Update an item's text, title, position, or item_type."""
doc = await _get_document_by_slug(db, slug)
item = await _get_item(db, doc.id, item_id)
update_data = payload.model_dump(exclude_unset=True)
for field, value in update_data.items():
setattr(item, field, value)
await db.commit()
await db.refresh(item)
return DocumentItemOut.model_validate(item)
@router.delete("/{slug}/items/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
async def delete_item(
slug: str,
item_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
identity: DuniterIdentity = Depends(get_current_identity),
) -> None:
"""Delete a document item.
Refuses deletion if the item has any active votes (status 'voting').
"""
doc = await _get_document_by_slug(db, slug)
item = await _get_item(db, doc.id, item_id)
# Check for active votes on this item's versions
active_versions_result = await db.execute(
select(func.count()).select_from(ItemVersion).where(
ItemVersion.item_id == item.id,
ItemVersion.status == "voting",
)
)
active_count = active_versions_result.scalar() or 0
if active_count > 0:
raise HTTPException(
status_code=status.HTTP_409_CONFLICT,
detail="Impossible de supprimer un element avec des votes en cours",
)
await db.delete(item)
await db.commit()
# ── Version accept & reject ────────────────────────────────────────────────
@router.put(
"/{slug}/items/{item_id}/versions/{version_id}/accept",
response_model=DocumentItemOut,
)
async def accept_version(
slug: str,
item_id: uuid.UUID,
version_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
identity: DuniterIdentity = Depends(get_current_identity),
) -> DocumentItemOut:
"""Accept a proposed version and apply it to the document item.
Replaces the item's current_text with the version's proposed_text
and rejects all other pending/voting versions for this item.
"""
doc = await _get_document_by_slug(db, slug)
# Verify item belongs to document
await _get_item(db, doc.id, item_id)
try:
updated_item = await document_service.apply_version(item_id, version_id, db)
except ValueError as exc:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc))
return DocumentItemOut.model_validate(updated_item)
@router.put(
"/{slug}/items/{item_id}/versions/{version_id}/reject",
response_model=ItemVersionOut,
)
async def reject_version(
slug: str,
item_id: uuid.UUID,
version_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
identity: DuniterIdentity = Depends(get_current_identity),
) -> ItemVersionOut:
"""Reject a proposed version."""
doc = await _get_document_by_slug(db, slug)
# Verify item belongs to document
await _get_item(db, doc.id, item_id)
try:
version = await document_service.reject_version(item_id, version_id, db)
except ValueError as exc:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc))
return ItemVersionOut.model_validate(version)
# ── Version listing ────────────────────────────────────────────────────────
@router.get(
"/{slug}/items/{item_id}/versions",
response_model=list[ItemVersionOut],
)
async def list_versions(
slug: str,
item_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
) -> list[ItemVersionOut]:
"""List all versions for a document item."""
doc = await _get_document_by_slug(db, slug)
await _get_item(db, doc.id, item_id)
result = await db.execute(
select(ItemVersion)
.where(ItemVersion.item_id == item_id)
.order_by(ItemVersion.created_at.desc())
)
versions = result.scalars().all()
return [ItemVersionOut.model_validate(v) for v in versions]
# ── Document full view ─────────────────────────────────────────────────────
@router.get("/{slug}/full", response_model=DocumentFullOut)
async def get_document_full(
slug: str,
db: AsyncSession = Depends(get_db),
) -> DocumentFullOut:
"""Get a document with all its items (not just count)."""
doc = await document_service.get_document_with_items(slug, db)
if doc is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Document introuvable")
return DocumentFullOut.model_validate(doc)
# ── Document archive to sanctuary ──────────────────────────────────────────
@router.post("/{slug}/archive", status_code=status.HTTP_201_CREATED)
async def archive_document(
slug: str,
db: AsyncSession = Depends(get_db),
identity: DuniterIdentity = Depends(get_current_identity),
) -> dict:
"""Archive a document to the sanctuary (IPFS + on-chain hash).
Serializes the full document text and sends it through the sanctuary pipeline.
"""
entry = await document_service.archive_document(slug, db)
return {
"message": "Document archive avec succes",
"sanctuary_entry_id": str(entry.id),
"content_hash": entry.content_hash,
"ipfs_cid": entry.ipfs_cid,
"chain_tx_hash": entry.chain_tx_hash,
}

View File

@@ -12,6 +12,7 @@ from app.database import get_db
from app.models.sanctuary import SanctuaryEntry
from app.models.user import DuniterIdentity
from app.schemas.sanctuary import SanctuaryEntryCreate, SanctuaryEntryOut
from app.services import sanctuary_service
from app.services.auth_service import get_current_identity
router = APIRouter()
@@ -37,19 +38,6 @@ async def list_entries(
return [SanctuaryEntryOut.model_validate(e) for e in entries]
@router.get("/{id}", response_model=SanctuaryEntryOut)
async def get_entry(
id: uuid.UUID,
db: AsyncSession = Depends(get_db),
) -> SanctuaryEntryOut:
"""Get a single sanctuary entry by ID."""
result = await db.execute(select(SanctuaryEntry).where(SanctuaryEntry.id == id))
entry = result.scalar_one_or_none()
if entry is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Entree sanctuaire introuvable")
return SanctuaryEntryOut.model_validate(entry)
@router.post("/", response_model=SanctuaryEntryOut, status_code=status.HTTP_201_CREATED)
async def create_entry(
payload: SanctuaryEntryCreate,
@@ -71,3 +59,47 @@ async def create_entry(
await db.refresh(entry)
return SanctuaryEntryOut.model_validate(entry)
@router.get("/by-reference/{reference_id}", response_model=list[SanctuaryEntryOut])
async def get_entries_by_reference(
reference_id: uuid.UUID,
db: AsyncSession = Depends(get_db),
) -> list[SanctuaryEntryOut]:
"""Get all sanctuary entries for a given reference ID.
Useful for finding all sanctuary entries associated with a document,
decision, or vote result.
"""
entries = await sanctuary_service.get_entries_by_reference(reference_id, db)
return [SanctuaryEntryOut.model_validate(e) for e in entries]
@router.get("/{id}/verify")
async def verify_entry(
id: uuid.UUID,
db: AsyncSession = Depends(get_db),
) -> dict:
"""Verify integrity of a sanctuary entry.
Re-fetches the content (from IPFS if available), re-hashes it,
and compares with the stored content_hash.
"""
try:
result = await sanctuary_service.verify_entry(id, db)
except ValueError as exc:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=str(exc))
return result
@router.get("/{id}", response_model=SanctuaryEntryOut)
async def get_entry(
id: uuid.UUID,
db: AsyncSession = Depends(get_db),
) -> SanctuaryEntryOut:
"""Get a single sanctuary entry by ID."""
result = await db.execute(select(SanctuaryEntry).where(SanctuaryEntry.id == id))
entry = result.scalar_one_or_none()
if entry is None:
raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Entree sanctuaire introuvable")
return SanctuaryEntryOut.model_validate(entry)

View File

@@ -60,6 +60,15 @@ class DocumentItemCreate(BaseModel):
voting_protocol_id: UUID | None = None
class DocumentItemUpdate(BaseModel):
"""Partial update for a document item."""
title: str | None = Field(default=None, max_length=256)
current_text: str | None = Field(default=None, min_length=1)
position: str | None = Field(default=None, max_length=16)
item_type: str | None = Field(default=None, max_length=32)
class DocumentItemOut(BaseModel):
"""Full document item representation."""
@@ -77,6 +86,59 @@ class DocumentItemOut(BaseModel):
updated_at: datetime
class DocumentItemFullOut(BaseModel):
"""Document item with its full version history."""
model_config = ConfigDict(from_attributes=True)
id: UUID
document_id: UUID
position: str
item_type: str
title: str | None = None
current_text: str
voting_protocol_id: UUID | None = None
sort_order: int
created_at: datetime
updated_at: datetime
versions: list[ItemVersionOut] = Field(default_factory=list)
class DocumentFullOut(BaseModel):
"""Document with full items list (not just count)."""
model_config = ConfigDict(from_attributes=True)
id: UUID
slug: str
title: str
doc_type: str
version: str
status: str
description: str | None = None
ipfs_cid: str | None = None
chain_anchor: str | None = None
created_at: datetime
updated_at: datetime
items: list[DocumentItemOut] = Field(default_factory=list)
# ── Item Reorder ─────────────────────────────────────────────────
class ItemReorderEntry(BaseModel):
"""A single item reorder entry."""
item_id: UUID
sort_order: int = Field(..., ge=0)
class ItemReorderRequest(BaseModel):
"""Payload for reordering items in a document."""
items: list[ItemReorderEntry]
# ── Item Version ─────────────────────────────────────────────────
@@ -101,3 +163,11 @@ class ItemVersionOut(BaseModel):
decision_id: UUID | None = None
proposed_by_id: UUID | None = None
created_at: datetime
# ── Forward reference resolution ─────────────────────────────────
# DocumentItemFullOut references ItemVersionOut which is defined after it.
# With `from __future__ import annotations`, Pydantic needs explicit rebuild.
DocumentItemFullOut.model_rebuild()
DocumentFullOut.model_rebuild()

View File

@@ -2,6 +2,7 @@
from __future__ import annotations
import logging
import uuid
from sqlalchemy import select
@@ -10,6 +11,8 @@ from sqlalchemy.orm import selectinload
from app.models.document import Document, DocumentItem, ItemVersion
logger = logging.getLogger(__name__)
async def get_document_with_items(slug: str, db: AsyncSession) -> Document | None:
"""Load a document with all its items and their versions, eagerly.
@@ -106,3 +109,221 @@ async def apply_version(
await db.refresh(item)
return item
async def reject_version(
item_id: uuid.UUID,
version_id: uuid.UUID,
db: AsyncSession,
) -> ItemVersion:
"""Mark a version as rejected.
Parameters
----------
item_id:
UUID of the DocumentItem the version belongs to.
version_id:
UUID of the ItemVersion to reject.
db:
Async database session.
Returns
-------
ItemVersion
The rejected version.
Raises
------
ValueError
If the item or version is not found, or the version does not
belong to the item.
"""
# Load item to verify existence
item_result = await db.execute(
select(DocumentItem).where(DocumentItem.id == item_id)
)
item = item_result.scalar_one_or_none()
if item is None:
raise ValueError(f"Element de document introuvable : {item_id}")
# Load version
version_result = await db.execute(
select(ItemVersion).where(ItemVersion.id == version_id)
)
version = version_result.scalar_one_or_none()
if version is None:
raise ValueError(f"Version introuvable : {version_id}")
if version.item_id != item.id:
raise ValueError(
f"La version {version_id} n'appartient pas a l'element {item_id}"
)
version.status = "rejected"
await db.commit()
await db.refresh(version)
return version
async def get_item_with_versions(
item_id: uuid.UUID,
db: AsyncSession,
) -> DocumentItem | None:
"""Eager-load a document item with all its versions.
Parameters
----------
item_id:
UUID of the DocumentItem.
db:
Async database session.
Returns
-------
DocumentItem | None
The item with versions loaded, or None if not found.
"""
result = await db.execute(
select(DocumentItem)
.options(selectinload(DocumentItem.versions))
.where(DocumentItem.id == item_id)
)
return result.scalar_one_or_none()
async def reorder_items(
document_id: uuid.UUID,
items_order: list[tuple[uuid.UUID, int]],
db: AsyncSession,
) -> list[DocumentItem]:
"""Update sort_order for multiple items in a document.
Parameters
----------
document_id:
UUID of the document.
items_order:
List of (item_id, sort_order) tuples.
db:
Async database session.
Returns
-------
list[DocumentItem]
The updated items, ordered by sort_order.
Raises
------
ValueError
If any item is not found or does not belong to the document.
"""
for item_id, sort_order in items_order:
result = await db.execute(
select(DocumentItem).where(
DocumentItem.id == item_id,
DocumentItem.document_id == document_id,
)
)
item = result.scalar_one_or_none()
if item is None:
raise ValueError(
f"Element {item_id} introuvable dans le document {document_id}"
)
item.sort_order = sort_order
await db.commit()
# Return all items in new order
result = await db.execute(
select(DocumentItem)
.where(DocumentItem.document_id == document_id)
.order_by(DocumentItem.sort_order)
)
return list(result.scalars().all())
def serialize_document_to_text(doc: Document) -> str:
"""Serialize a document and its items to a plain-text representation.
The items must be eagerly loaded on the document before calling this.
Parameters
----------
doc:
Document with items loaded.
Returns
-------
str
Plain-text serialization suitable for hashing and archival.
"""
lines: list[str] = []
lines.append(f"# {doc.title}")
lines.append(f"Version: {doc.version}")
lines.append(f"Type: {doc.doc_type}")
lines.append(f"Statut: {doc.status}")
if doc.description:
lines.append(f"Description: {doc.description}")
lines.append("")
# Sort items by sort_order
sorted_items = sorted(doc.items, key=lambda i: i.sort_order)
for item in sorted_items:
header = f"## {item.position}"
if item.title:
header += f" - {item.title}"
header += f" [{item.item_type}]"
lines.append(header)
lines.append(item.current_text)
lines.append("")
return "\n".join(lines)
async def archive_document(slug: str, db: AsyncSession):
"""Serialize a document to text and archive it to the sanctuary.
Parameters
----------
slug:
Slug of the document to archive.
db:
Async database session.
Returns
-------
SanctuaryEntry
The newly created sanctuary entry.
Raises
------
ValueError
If the document is not found.
"""
from app.services import sanctuary_service
doc = await get_document_with_items(slug, db)
if doc is None:
raise ValueError(f"Document introuvable : {slug}")
content = serialize_document_to_text(doc)
entry = await sanctuary_service.archive_to_sanctuary(
entry_type="document",
reference_id=doc.id,
content=content,
title=f"{doc.title} v{doc.version}",
db=db,
)
# Update document with sanctuary references
if entry.ipfs_cid:
doc.ipfs_cid = entry.ipfs_cid
if entry.chain_tx_hash:
doc.chain_anchor = entry.chain_tx_hash
await db.commit()
await db.refresh(doc)
return entry

View File

@@ -0,0 +1,125 @@
"""IPFS service: upload, retrieve, and pin content via kubo HTTP API.
Uses httpx async client to communicate with the local kubo node.
All operations handle connection errors gracefully: they log a warning
and return None instead of crashing the caller.
"""
from __future__ import annotations
import logging
import httpx
from app.config import settings
logger = logging.getLogger(__name__)
# Timeout for IPFS operations (seconds)
_IPFS_TIMEOUT = 30.0
async def upload_to_ipfs(content: str | bytes) -> str | None:
"""Upload content to IPFS via kubo HTTP API (POST /api/v0/add).
Parameters
----------
content:
The content to upload. Strings are encoded as UTF-8.
Returns
-------
str | None
The IPFS CID (Content Identifier) of the uploaded content,
or None if the upload failed.
"""
if isinstance(content, str):
content = content.encode("utf-8")
try:
async with httpx.AsyncClient(timeout=_IPFS_TIMEOUT) as client:
response = await client.post(
f"{settings.IPFS_API_URL}/api/v0/add",
files={"file": ("content.txt", content, "application/octet-stream")},
)
response.raise_for_status()
data = response.json()
cid = data.get("Hash")
if cid:
logger.info("Contenu uploade sur IPFS: CID=%s", cid)
return cid
except httpx.ConnectError:
logger.warning("Impossible de se connecter au noeud IPFS (%s)", settings.IPFS_API_URL)
return None
except httpx.HTTPStatusError as exc:
logger.warning("Erreur HTTP IPFS lors de l'upload: %s", exc.response.status_code)
return None
except Exception:
logger.warning("Erreur inattendue lors de l'upload IPFS", exc_info=True)
return None
async def get_from_ipfs(cid: str) -> bytes | None:
"""Retrieve content from IPFS by CID via the gateway.
Parameters
----------
cid:
The IPFS Content Identifier to retrieve.
Returns
-------
bytes | None
The raw content bytes, or None if retrieval failed.
"""
try:
async with httpx.AsyncClient(timeout=_IPFS_TIMEOUT) as client:
response = await client.post(
f"{settings.IPFS_API_URL}/api/v0/cat",
params={"arg": cid},
)
response.raise_for_status()
logger.info("Contenu recupere depuis IPFS: CID=%s", cid)
return response.content
except httpx.ConnectError:
logger.warning("Impossible de se connecter au noeud IPFS (%s)", settings.IPFS_API_URL)
return None
except httpx.HTTPStatusError as exc:
logger.warning("Erreur HTTP IPFS lors de la recuperation (CID=%s): %s", cid, exc.response.status_code)
return None
except Exception:
logger.warning("Erreur inattendue lors de la recuperation IPFS (CID=%s)", cid, exc_info=True)
return None
async def pin(cid: str) -> bool:
"""Pin content on the local IPFS node to prevent garbage collection.
Parameters
----------
cid:
The IPFS Content Identifier to pin.
Returns
-------
bool
True if pinning succeeded, False otherwise.
"""
try:
async with httpx.AsyncClient(timeout=_IPFS_TIMEOUT) as client:
response = await client.post(
f"{settings.IPFS_API_URL}/api/v0/pin/add",
params={"arg": cid},
)
response.raise_for_status()
logger.info("Contenu epingle sur IPFS: CID=%s", cid)
return True
except httpx.ConnectError:
logger.warning("Impossible de se connecter au noeud IPFS pour l'epinglage (%s)", settings.IPFS_API_URL)
return False
except httpx.HTTPStatusError as exc:
logger.warning("Erreur HTTP IPFS lors de l'epinglage (CID=%s): %s", cid, exc.response.status_code)
return False
except Exception:
logger.warning("Erreur inattendue lors de l'epinglage IPFS (CID=%s)", cid, exc_info=True)
return False

View File

@@ -9,12 +9,17 @@ from __future__ import annotations
import hashlib
import json
import logging
import uuid
from datetime import datetime, timezone
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession
from app.models.sanctuary import SanctuaryEntry
from app.services import ipfs_service
logger = logging.getLogger(__name__)
async def archive_to_sanctuary(
@@ -26,6 +31,12 @@ async def archive_to_sanctuary(
) -> SanctuaryEntry:
"""Hash content and create a sanctuary entry.
Pipeline:
1. Hash content (SHA-256)
2. Try to upload to IPFS via ipfs_service (catch errors, log, continue)
3. Try to anchor on-chain via blockchain_service (catch errors, log, continue)
4. Create SanctuaryEntry with whatever succeeded
Parameters
----------
entry_type:
@@ -45,33 +56,65 @@ async def archive_to_sanctuary(
SanctuaryEntry
The newly created sanctuary entry with content_hash set.
"""
# Compute SHA-256 hash of the content
# 1. Compute SHA-256 hash of the content
content_hash = hashlib.sha256(content.encode("utf-8")).hexdigest()
# Build metadata
metadata = {
metadata: dict = {
"archived_at": datetime.now(timezone.utc).isoformat(),
"entry_type": entry_type,
"content_length": len(content),
}
ipfs_cid: str | None = None
chain_tx_hash: str | None = None
chain_block: int | None = None
# 2. Try to upload to IPFS
try:
ipfs_cid = await ipfs_service.upload_to_ipfs(content)
if ipfs_cid:
# Pin the content to keep it available
await ipfs_service.pin(ipfs_cid)
metadata["ipfs_cid"] = ipfs_cid
logger.info("Contenu archive sur IPFS: CID=%s", ipfs_cid)
else:
logger.warning("Upload IPFS echoue (retour None) pour %s:%s", entry_type, reference_id)
except Exception:
logger.warning(
"Erreur lors de l'upload IPFS pour %s:%s",
entry_type, reference_id,
exc_info=True,
)
# 3. Try to anchor on-chain (still a structured stub)
try:
chain_tx_hash, chain_block = await _anchor_on_chain(content_hash)
if chain_tx_hash:
metadata["chain_tx_hash"] = chain_tx_hash
metadata["chain_block"] = chain_block
logger.info("Hash ancre on-chain: tx=%s block=%s", chain_tx_hash, chain_block)
except NotImplementedError:
logger.info("Ancrage on-chain pas encore implemente, etape ignoree")
except Exception:
logger.warning(
"Erreur lors de l'ancrage on-chain pour %s:%s",
entry_type, reference_id,
exc_info=True,
)
# 4. Create SanctuaryEntry with whatever succeeded
entry = SanctuaryEntry(
entry_type=entry_type,
reference_id=reference_id,
title=title,
content_hash=content_hash,
ipfs_cid=ipfs_cid,
chain_tx_hash=chain_tx_hash,
chain_block=chain_block,
metadata_json=json.dumps(metadata, ensure_ascii=False),
)
# TODO: Upload content to IPFS via kubo HTTP API
# ipfs_cid = await _upload_to_ipfs(content)
# entry.ipfs_cid = ipfs_cid
# TODO: Anchor hash on-chain via system.remark
# tx_hash, block_number = await _anchor_on_chain(content_hash)
# entry.chain_tx_hash = tx_hash
# entry.chain_block = block_number
db.add(entry)
await db.commit()
await db.refresh(entry)
@@ -79,31 +122,115 @@ async def archive_to_sanctuary(
return entry
async def _upload_to_ipfs(content: str) -> str:
"""Upload content to IPFS via kubo HTTP API.
async def verify_entry(
entry_id: uuid.UUID,
db: AsyncSession,
) -> dict:
"""Verify the integrity of a sanctuary entry.
TODO: Implement using httpx against settings.IPFS_API_URL.
Re-fetches the content (from IPFS if available) and re-hashes it
to compare with the stored content_hash.
Example::
Parameters
----------
entry_id:
UUID of the SanctuaryEntry to verify.
db:
Async database session.
import httpx
from app.config import settings
Returns
-------
dict
Verification result with keys:
- ``entry_id``: UUID of the entry
- ``valid``: bool indicating if the hash matches
- ``stored_hash``: the stored content_hash
- ``computed_hash``: the re-computed hash (or None if content unavailable)
- ``source``: where the content was fetched from (``"ipfs"`` or ``"unavailable"``)
- ``detail``: human-readable detail message
async with httpx.AsyncClient() as client:
response = await client.post(
f"{settings.IPFS_API_URL}/api/v0/add",
files={"file": ("content.txt", content.encode("utf-8"))},
)
response.raise_for_status()
return response.json()["Hash"]
Raises
------
ValueError
If the entry is not found.
"""
raise NotImplementedError("IPFS upload pas encore implemente")
result = await db.execute(
select(SanctuaryEntry).where(SanctuaryEntry.id == entry_id)
)
entry = result.scalar_one_or_none()
if entry is None:
raise ValueError(f"Entree sanctuaire introuvable : {entry_id}")
stored_hash = entry.content_hash
computed_hash: str | None = None
source = "unavailable"
# Try to re-fetch content from IPFS
if entry.ipfs_cid:
try:
content_bytes = await ipfs_service.get_from_ipfs(entry.ipfs_cid)
if content_bytes is not None:
computed_hash = hashlib.sha256(content_bytes).hexdigest()
source = "ipfs"
except Exception:
logger.warning(
"Impossible de recuperer le contenu IPFS pour verification (CID=%s)",
entry.ipfs_cid,
exc_info=True,
)
if computed_hash is None:
return {
"entry_id": entry.id,
"valid": False,
"stored_hash": stored_hash,
"computed_hash": None,
"source": source,
"detail": "Contenu indisponible pour la verification",
}
is_valid = computed_hash == stored_hash
return {
"entry_id": entry.id,
"valid": is_valid,
"stored_hash": stored_hash,
"computed_hash": computed_hash,
"source": source,
"detail": "Integrite verifiee" if is_valid else "Hash different - contenu potentiellement altere",
}
async def _anchor_on_chain(content_hash: str) -> tuple[str, int]:
async def get_entries_by_reference(
reference_id: uuid.UUID,
db: AsyncSession,
) -> list[SanctuaryEntry]:
"""Query all sanctuary entries for a given reference_id.
Parameters
----------
reference_id:
UUID of the referenced entity (document, decision, etc.).
db:
Async database session.
Returns
-------
list[SanctuaryEntry]
All entries matching the reference_id, ordered by creation date desc.
"""
result = await db.execute(
select(SanctuaryEntry)
.where(SanctuaryEntry.reference_id == reference_id)
.order_by(SanctuaryEntry.created_at.desc())
)
return list(result.scalars().all())
async def _anchor_on_chain(content_hash: str) -> tuple[str | None, int | None]:
"""Anchor a content hash on-chain via system.remark.
TODO: Implement using substrate-interface.
Currently a stub. When implemented, this will use substrate-interface
to submit a system.remark extrinsic containing the content hash.
Example::
@@ -119,5 +246,15 @@ async def _anchor_on_chain(content_hash: str) -> tuple[str, int]:
extrinsic = substrate.create_signed_extrinsic(call=call, keypair=keypair)
receipt = substrate.submit_extrinsic(extrinsic, wait_for_inclusion=True)
return receipt.extrinsic_hash, receipt.block_number
Parameters
----------
content_hash:
The SHA-256 hash to anchor.
Returns
-------
tuple[str | None, int | None]
(tx_hash, block_number) or (None, None) if not implemented.
"""
raise NotImplementedError("Ancrage on-chain pas encore implemente")

View File

@@ -0,0 +1,418 @@
"""Tests for document service: apply_version, reject_version, and serialization.
These are pure unit tests that mock the database layer to test
the service logic in isolation.
"""
from __future__ import annotations
import hashlib
import uuid
from datetime import datetime, timezone
from unittest.mock import AsyncMock, MagicMock
import pytest
sqlalchemy = pytest.importorskip("sqlalchemy", reason="sqlalchemy required for document service tests")
from app.services.document_service import ( # noqa: E402
apply_version,
reject_version,
serialize_document_to_text,
)
# ---------------------------------------------------------------------------
# Helpers: mock objects that behave like SQLAlchemy models
# ---------------------------------------------------------------------------
def _make_item(
item_id: uuid.UUID | None = None,
document_id: uuid.UUID | None = None,
current_text: str = "Texte original",
position: str = "1",
item_type: str = "clause",
title: str | None = None,
sort_order: int = 0,
) -> MagicMock:
"""Create a mock DocumentItem."""
item = MagicMock()
item.id = item_id or uuid.uuid4()
item.document_id = document_id or uuid.uuid4()
item.current_text = current_text
item.position = position
item.item_type = item_type
item.title = title
item.sort_order = sort_order
item.created_at = datetime.now(timezone.utc)
item.updated_at = datetime.now(timezone.utc)
return item
def _make_version(
version_id: uuid.UUID | None = None,
item_id: uuid.UUID | None = None,
proposed_text: str = "Texte propose",
status: str = "proposed",
) -> MagicMock:
"""Create a mock ItemVersion."""
version = MagicMock()
version.id = version_id or uuid.uuid4()
version.item_id = item_id or uuid.uuid4()
version.proposed_text = proposed_text
version.status = status
version.diff_text = None
version.rationale = None
version.decision_id = None
version.proposed_by_id = None
version.created_at = datetime.now(timezone.utc)
return version
def _make_document(
doc_id: uuid.UUID | None = None,
slug: str = "test-doc",
title: str = "Document de test",
doc_type: str = "licence",
version: str = "1.0.0",
status: str = "active",
description: str | None = "Description de test",
items: list | None = None,
) -> MagicMock:
"""Create a mock Document."""
doc = MagicMock()
doc.id = doc_id or uuid.uuid4()
doc.slug = slug
doc.title = title
doc.doc_type = doc_type
doc.version = version
doc.status = status
doc.description = description
doc.ipfs_cid = None
doc.chain_anchor = None
doc.items = items or []
doc.created_at = datetime.now(timezone.utc)
doc.updated_at = datetime.now(timezone.utc)
return doc
def _make_async_db(
item: MagicMock | None = None,
version: MagicMock | None = None,
other_versions: list[MagicMock] | None = None,
) -> AsyncMock:
"""Create a mock async database session.
The mock session's execute() returns appropriate results based on
the query being run. It supports multiple sequential calls:
1st call -> item lookup
2nd call -> version lookup
3rd call (optional) -> other versions lookup (for apply_version)
"""
db = AsyncMock()
call_results = []
# Item result
item_result = MagicMock()
item_result.scalar_one_or_none.return_value = item
call_results.append(item_result)
# Version result
version_result = MagicMock()
version_result.scalar_one_or_none.return_value = version
call_results.append(version_result)
# Other versions result (for apply_version)
if other_versions is not None:
other_result = MagicMock()
other_scalars = MagicMock()
other_scalars.__iter__ = MagicMock(return_value=iter(other_versions))
other_result.scalars.return_value = other_scalars
call_results.append(other_result)
db.execute = AsyncMock(side_effect=call_results)
db.commit = AsyncMock()
db.refresh = AsyncMock()
return db
# ---------------------------------------------------------------------------
# Tests: apply_version
# ---------------------------------------------------------------------------
class TestApplyVersion:
"""Test document_service.apply_version."""
@pytest.mark.asyncio
async def test_apply_version_updates_text(self):
"""Applying a version replaces item's current_text with proposed_text."""
item_id = uuid.uuid4()
version_id = uuid.uuid4()
item = _make_item(item_id=item_id, current_text="Ancien texte")
version = _make_version(
version_id=version_id,
item_id=item_id,
proposed_text="Nouveau texte",
)
db = _make_async_db(item=item, version=version, other_versions=[])
result = await apply_version(item_id, version_id, db)
assert result.current_text == "Nouveau texte"
assert version.status == "accepted"
db.commit.assert_awaited_once()
@pytest.mark.asyncio
async def test_apply_version_rejects_other_pending(self):
"""Applying a version rejects other pending/voting versions."""
item_id = uuid.uuid4()
version_id = uuid.uuid4()
item = _make_item(item_id=item_id)
version = _make_version(version_id=version_id, item_id=item_id)
other_v1 = _make_version(item_id=item_id, status="proposed")
other_v2 = _make_version(item_id=item_id, status="voting")
db = _make_async_db(
item=item,
version=version,
other_versions=[other_v1, other_v2],
)
await apply_version(item_id, version_id, db)
assert other_v1.status == "rejected"
assert other_v2.status == "rejected"
@pytest.mark.asyncio
async def test_apply_version_item_not_found(self):
"""ValueError is raised when item does not exist."""
db = _make_async_db(item=None, version=None)
with pytest.raises(ValueError, match="Element de document introuvable"):
await apply_version(uuid.uuid4(), uuid.uuid4(), db)
@pytest.mark.asyncio
async def test_apply_version_version_not_found(self):
"""ValueError is raised when version does not exist."""
item = _make_item()
db = _make_async_db(item=item, version=None)
with pytest.raises(ValueError, match="Version introuvable"):
await apply_version(item.id, uuid.uuid4(), db)
@pytest.mark.asyncio
async def test_apply_version_wrong_item(self):
"""ValueError is raised when version belongs to a different item."""
item_id = uuid.uuid4()
other_item_id = uuid.uuid4()
version_id = uuid.uuid4()
item = _make_item(item_id=item_id)
version = _make_version(version_id=version_id, item_id=other_item_id)
db = _make_async_db(item=item, version=version)
with pytest.raises(ValueError, match="n'appartient pas"):
await apply_version(item_id, version_id, db)
# ---------------------------------------------------------------------------
# Tests: reject_version
# ---------------------------------------------------------------------------
class TestRejectVersion:
"""Test document_service.reject_version."""
@pytest.mark.asyncio
async def test_reject_version_sets_status(self):
"""Rejecting a version sets its status to 'rejected'."""
item_id = uuid.uuid4()
version_id = uuid.uuid4()
item = _make_item(item_id=item_id)
version = _make_version(
version_id=version_id,
item_id=item_id,
status="proposed",
)
db = _make_async_db(item=item, version=version)
result = await reject_version(item_id, version_id, db)
assert result.status == "rejected"
db.commit.assert_awaited_once()
@pytest.mark.asyncio
async def test_reject_version_item_not_found(self):
"""ValueError is raised when item does not exist."""
db = _make_async_db(item=None, version=None)
with pytest.raises(ValueError, match="Element de document introuvable"):
await reject_version(uuid.uuid4(), uuid.uuid4(), db)
@pytest.mark.asyncio
async def test_reject_version_version_not_found(self):
"""ValueError is raised when version does not exist."""
item = _make_item()
db = _make_async_db(item=item, version=None)
with pytest.raises(ValueError, match="Version introuvable"):
await reject_version(item.id, uuid.uuid4(), db)
@pytest.mark.asyncio
async def test_reject_version_wrong_item(self):
"""ValueError is raised when version belongs to a different item."""
item_id = uuid.uuid4()
other_item_id = uuid.uuid4()
version_id = uuid.uuid4()
item = _make_item(item_id=item_id)
version = _make_version(version_id=version_id, item_id=other_item_id)
db = _make_async_db(item=item, version=version)
with pytest.raises(ValueError, match="n'appartient pas"):
await reject_version(item_id, version_id, db)
# ---------------------------------------------------------------------------
# Tests: serialize_document_to_text
# ---------------------------------------------------------------------------
class TestSerializeDocumentToText:
"""Test document serialization for archival."""
def test_basic_serialization(self):
"""A document with items serializes to the expected text format."""
doc_id = uuid.uuid4()
item1 = _make_item(
document_id=doc_id,
position="1",
title="Preambule",
item_type="preamble",
current_text="Le texte du preambule.",
sort_order=0,
)
item2 = _make_item(
document_id=doc_id,
position="2",
title="Article premier",
item_type="clause",
current_text="Le texte de l'article premier.",
sort_order=1,
)
item3 = _make_item(
document_id=doc_id,
position="2.1",
title=None,
item_type="rule",
current_text="Sous-article sans titre.",
sort_order=2,
)
doc = _make_document(
doc_id=doc_id,
title="Licence G1",
version="2.0.0",
doc_type="licence",
status="active",
description="La licence monetaire de la G1",
items=[item1, item2, item3],
)
text = serialize_document_to_text(doc)
assert "# Licence G1" in text
assert "Version: 2.0.0" in text
assert "Type: licence" in text
assert "Statut: active" in text
assert "Description: La licence monetaire de la G1" in text
# Items
assert "## 1 - Preambule [preamble]" in text
assert "Le texte du preambule." in text
assert "## 2 - Article premier [clause]" in text
assert "Le texte de l'article premier." in text
assert "## 2.1 [rule]" in text
assert "Sous-article sans titre." in text
def test_serialization_ordering(self):
"""Items are serialized in sort_order, not insertion order."""
doc_id = uuid.uuid4()
item_b = _make_item(
document_id=doc_id,
position="2",
title="Second",
current_text="Texte B",
sort_order=1,
)
item_a = _make_item(
document_id=doc_id,
position="1",
title="Premier",
current_text="Texte A",
sort_order=0,
)
# Insert in reverse order
doc = _make_document(doc_id=doc_id, items=[item_b, item_a])
text = serialize_document_to_text(doc)
# "Premier" should appear before "Second"
idx_a = text.index("Texte A")
idx_b = text.index("Texte B")
assert idx_a < idx_b, "Items should be ordered by sort_order"
def test_serialization_without_description(self):
"""Document without description omits that line."""
doc = _make_document(description=None, items=[])
text = serialize_document_to_text(doc)
assert "Description:" not in text
def test_serialization_hash_is_deterministic(self):
"""Same document content produces the same SHA-256 hash."""
doc_id = uuid.uuid4()
item = _make_item(
document_id=doc_id,
position="1",
title="Test",
current_text="Contenu identique",
sort_order=0,
)
doc1 = _make_document(doc_id=doc_id, title="Doc", version="1.0", items=[item])
doc2 = _make_document(doc_id=doc_id, title="Doc", version="1.0", items=[item])
text1 = serialize_document_to_text(doc1)
text2 = serialize_document_to_text(doc2)
hash1 = hashlib.sha256(text1.encode("utf-8")).hexdigest()
hash2 = hashlib.sha256(text2.encode("utf-8")).hexdigest()
assert hash1 == hash2
def test_empty_document(self):
"""A document with no items serializes header only."""
doc = _make_document(items=[])
text = serialize_document_to_text(doc)
assert "# Document de test" in text
assert "Version: 1.0.0" in text
# Should end with just a newline after the header block
lines = text.strip().split("\n")
assert len(lines) >= 4 # title, version, type, status