From 25437f24e31545d5b318433c0c92d252be67d3b3 Mon Sep 17 00:00:00 2001 From: Yvv Date: Sat, 28 Feb 2026 12:46:11 +0100 Subject: [PATCH] Sprint 1 : scaffolding complet de Glibredecision Plateforme de decisions collectives pour Duniter/G1. Backend FastAPI async + PostgreSQL (14 tables, 8 routers, 6 services, moteur de vote avec formule d'inertie WoT/Smith/TechComm). Frontend Nuxt 4 + Nuxt UI v3 + Pinia (9 pages, 5 stores). Infrastructure Docker + Woodpecker CI + Traefik. Documentation technique et utilisateur (15 fichiers). Seed : Licence G1, Engagement Forgeron v2.0.0, 4 protocoles de vote. 30 tests unitaires (formules, mode params, vote nuance) -- tous verts. Co-Authored-By: Claude Opus 4.6 --- .env.example | 23 + .gitignore | 42 ++ .woodpecker.yml | 75 +++ CLAUDE.md | 25 + backend/alembic.ini | 68 +++ backend/alembic/env.py | 109 ++++ backend/alembic/script.py.mako | 26 + backend/app/__init__.py | 0 backend/app/config.py | 33 ++ backend/app/database.py | 21 + backend/app/engine/__init__.py | 0 backend/app/engine/mode_params.py | 107 ++++ backend/app/engine/nuanced_vote.py | 95 ++++ backend/app/engine/smith_threshold.py | 31 ++ backend/app/engine/techcomm_threshold.py | 31 ++ backend/app/engine/threshold.py | 85 +++ backend/app/main.py | 44 ++ backend/app/models/__init__.py | 19 + backend/app/models/cache.py | 18 + backend/app/models/decision.py | 42 ++ backend/app/models/document.py | 60 +++ backend/app/models/mandate.py | 43 ++ backend/app/models/protocol.py | 52 ++ backend/app/models/sanctuary.py | 23 + backend/app/models/user.py | 35 ++ backend/app/models/vote.py | 71 +++ backend/app/routers/__init__.py | 0 backend/app/routers/auth.py | 162 ++++++ backend/app/routers/decisions.py | 143 +++++ backend/app/routers/documents.py | 262 +++++++++ backend/app/routers/mandates.py | 167 ++++++ backend/app/routers/protocols.py | 139 +++++ backend/app/routers/sanctuary.py | 73 +++ backend/app/routers/votes.py | 306 +++++++++++ backend/app/routers/websocket.py | 140 +++++ backend/app/schemas/__init__.py | 0 backend/app/schemas/auth.py | 53 ++ backend/app/schemas/decision.py | 72 +++ backend/app/schemas/document.py | 103 ++++ backend/app/schemas/mandate.py | 70 +++ backend/app/schemas/protocol.py | 83 +++ backend/app/schemas/sanctuary.py | 35 ++ backend/app/schemas/vote.py | 89 +++ backend/app/services/__init__.py | 0 backend/app/services/auth_service.py | 96 ++++ backend/app/services/blockchain_service.py | 87 +++ backend/app/services/decision_service.py | 117 ++++ backend/app/services/document_service.py | 108 ++++ backend/app/services/mandate_service.py | 118 ++++ backend/app/services/sanctuary_service.py | 123 +++++ backend/app/services/vote_service.py | 199 +++++++ backend/app/tests/__init__.py | 0 backend/app/tests/test_mode_params.py | 75 +++ backend/app/tests/test_nuanced.py | 120 +++++ backend/app/tests/test_threshold.py | 180 +++++++ backend/conftest.py | 6 + backend/pytest.ini | 3 + backend/requirements.txt | 16 + backend/seed.py | 530 ++++++++++++++++++ docker/backend.Dockerfile | 44 ++ docker/docker-compose.dev.yml | 37 ++ docker/docker-compose.yml | 85 +++ docker/frontend.Dockerfile | 45 ++ docs/content/dev/1.index.md | 17 + docs/content/dev/2.architecture.md | 81 +++ docs/content/dev/3.api-reference.md | 106 ++++ docs/content/dev/4.database-schema.md | 312 +++++++++++ docs/content/dev/5.formulas.md | 137 +++++ docs/content/dev/6.blockchain-integration.md | 165 ++++++ docs/content/dev/7.contributing.md | 145 +++++ docs/content/user/1.index.md | 28 + docs/content/user/2.getting-started.md | 51 ++ docs/content/user/3.documents.md | 57 ++ docs/content/user/4.decisions.md | 61 +++ docs/content/user/5.voting.md | 90 ++++ docs/content/user/6.mandates.md | 57 ++ docs/content/user/7.sanctuary.md | 61 +++ docs/content/user/8.faq.md | 80 +++ frontend/app/app.vue | 128 +++++ frontend/app/composables/useApi.ts | 31 ++ frontend/app/pages/decisions/[id].vue | 271 ++++++++++ frontend/app/pages/decisions/index.vue | 190 +++++++ frontend/app/pages/documents/[slug].vue | 230 ++++++++ frontend/app/pages/documents/index.vue | 191 +++++++ frontend/app/pages/index.vue | 170 ++++++ frontend/app/pages/login.vue | 177 ++++++ frontend/app/pages/mandates/index.vue | 222 ++++++++ frontend/app/pages/protocols/index.vue | 257 +++++++++ frontend/app/pages/sanctuary/index.vue | 268 ++++++++++ frontend/app/stores/auth.ts | 180 +++++++ frontend/app/stores/decisions.ts | 138 +++++ frontend/app/stores/documents.ts | 149 ++++++ frontend/app/stores/protocols.ts | 100 ++++ frontend/app/stores/votes.ts | 178 ++++++ frontend/app/utils/mode-params.ts | 164 ++++++ frontend/app/utils/threshold.ts | 84 +++ frontend/nuxt.config.ts | 28 + frontend/package.json | 29 + frontend/tsconfig.json | 3 + research_duniter_forum.md | 536 +++++++++++++++++++ 100 files changed, 10236 insertions(+) create mode 100644 .env.example create mode 100644 .gitignore create mode 100644 .woodpecker.yml create mode 100644 CLAUDE.md create mode 100644 backend/alembic.ini create mode 100644 backend/alembic/env.py create mode 100644 backend/alembic/script.py.mako create mode 100644 backend/app/__init__.py create mode 100644 backend/app/config.py create mode 100644 backend/app/database.py create mode 100644 backend/app/engine/__init__.py create mode 100644 backend/app/engine/mode_params.py create mode 100644 backend/app/engine/nuanced_vote.py create mode 100644 backend/app/engine/smith_threshold.py create mode 100644 backend/app/engine/techcomm_threshold.py create mode 100644 backend/app/engine/threshold.py create mode 100644 backend/app/main.py create mode 100644 backend/app/models/__init__.py create mode 100644 backend/app/models/cache.py create mode 100644 backend/app/models/decision.py create mode 100644 backend/app/models/document.py create mode 100644 backend/app/models/mandate.py create mode 100644 backend/app/models/protocol.py create mode 100644 backend/app/models/sanctuary.py create mode 100644 backend/app/models/user.py create mode 100644 backend/app/models/vote.py create mode 100644 backend/app/routers/__init__.py create mode 100644 backend/app/routers/auth.py create mode 100644 backend/app/routers/decisions.py create mode 100644 backend/app/routers/documents.py create mode 100644 backend/app/routers/mandates.py create mode 100644 backend/app/routers/protocols.py create mode 100644 backend/app/routers/sanctuary.py create mode 100644 backend/app/routers/votes.py create mode 100644 backend/app/routers/websocket.py create mode 100644 backend/app/schemas/__init__.py create mode 100644 backend/app/schemas/auth.py create mode 100644 backend/app/schemas/decision.py create mode 100644 backend/app/schemas/document.py create mode 100644 backend/app/schemas/mandate.py create mode 100644 backend/app/schemas/protocol.py create mode 100644 backend/app/schemas/sanctuary.py create mode 100644 backend/app/schemas/vote.py create mode 100644 backend/app/services/__init__.py create mode 100644 backend/app/services/auth_service.py create mode 100644 backend/app/services/blockchain_service.py create mode 100644 backend/app/services/decision_service.py create mode 100644 backend/app/services/document_service.py create mode 100644 backend/app/services/mandate_service.py create mode 100644 backend/app/services/sanctuary_service.py create mode 100644 backend/app/services/vote_service.py create mode 100644 backend/app/tests/__init__.py create mode 100644 backend/app/tests/test_mode_params.py create mode 100644 backend/app/tests/test_nuanced.py create mode 100644 backend/app/tests/test_threshold.py create mode 100644 backend/conftest.py create mode 100644 backend/pytest.ini create mode 100644 backend/requirements.txt create mode 100644 backend/seed.py create mode 100644 docker/backend.Dockerfile create mode 100644 docker/docker-compose.dev.yml create mode 100644 docker/docker-compose.yml create mode 100644 docker/frontend.Dockerfile create mode 100644 docs/content/dev/1.index.md create mode 100644 docs/content/dev/2.architecture.md create mode 100644 docs/content/dev/3.api-reference.md create mode 100644 docs/content/dev/4.database-schema.md create mode 100644 docs/content/dev/5.formulas.md create mode 100644 docs/content/dev/6.blockchain-integration.md create mode 100644 docs/content/dev/7.contributing.md create mode 100644 docs/content/user/1.index.md create mode 100644 docs/content/user/2.getting-started.md create mode 100644 docs/content/user/3.documents.md create mode 100644 docs/content/user/4.decisions.md create mode 100644 docs/content/user/5.voting.md create mode 100644 docs/content/user/6.mandates.md create mode 100644 docs/content/user/7.sanctuary.md create mode 100644 docs/content/user/8.faq.md create mode 100644 frontend/app/app.vue create mode 100644 frontend/app/composables/useApi.ts create mode 100644 frontend/app/pages/decisions/[id].vue create mode 100644 frontend/app/pages/decisions/index.vue create mode 100644 frontend/app/pages/documents/[slug].vue create mode 100644 frontend/app/pages/documents/index.vue create mode 100644 frontend/app/pages/index.vue create mode 100644 frontend/app/pages/login.vue create mode 100644 frontend/app/pages/mandates/index.vue create mode 100644 frontend/app/pages/protocols/index.vue create mode 100644 frontend/app/pages/sanctuary/index.vue create mode 100644 frontend/app/stores/auth.ts create mode 100644 frontend/app/stores/decisions.ts create mode 100644 frontend/app/stores/documents.ts create mode 100644 frontend/app/stores/protocols.ts create mode 100644 frontend/app/stores/votes.ts create mode 100644 frontend/app/utils/mode-params.ts create mode 100644 frontend/app/utils/threshold.ts create mode 100644 frontend/nuxt.config.ts create mode 100644 frontend/package.json create mode 100644 frontend/tsconfig.json create mode 100644 research_duniter_forum.md diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..82ad9dd --- /dev/null +++ b/.env.example @@ -0,0 +1,23 @@ +# PostgreSQL +POSTGRES_DB=glibredecision +POSTGRES_USER=glibredecision +POSTGRES_PASSWORD=change-me-in-production +DATABASE_URL=postgresql+asyncpg://glibredecision:change-me-in-production@localhost:5432/glibredecision + +# Backend +SECRET_KEY=change-me-in-production-with-a-real-secret-key +DEBUG=true +CORS_ORIGINS=["http://localhost:3002"] + +# Duniter V2 RPC +DUNITER_RPC_URL=wss://gdev.p2p.legal/ws + +# IPFS +IPFS_API_URL=http://localhost:5001 +IPFS_GATEWAY_URL=http://localhost:8080 + +# Frontend +NUXT_PUBLIC_API_BASE=http://localhost:8002/api/v1 + +# Docker / Production +DOMAIN=glibredecision.org diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..be90320 --- /dev/null +++ b/.gitignore @@ -0,0 +1,42 @@ +# Dependencies +node_modules/ +__pycache__/ +*.pyc +.venv/ +venv/ + +# Build +.output/ +.nuxt/ +dist/ +*.egg-info/ + +# Environment +.env +*.env.local + +# IDE +.vscode/ +.idea/ + +# OS +.DS_Store +Thumbs.db + +# Database +*.db +*.sqlite + +# Docker volumes +postgres-data/ + +# Logs +*.log + +# Testing +.coverage +htmlcov/ +.pytest_cache/ + +# IPFS +ipfs-data/ diff --git a/.woodpecker.yml b/.woodpecker.yml new file mode 100644 index 0000000..3d1e7f1 --- /dev/null +++ b/.woodpecker.yml @@ -0,0 +1,75 @@ +when: + branch: main + event: push + +steps: + test-backend: + image: python:3.11-slim + commands: + - cd backend + - pip install --no-cache-dir -r requirements.txt + - pytest app/tests/ -v --tb=short + + test-frontend: + image: node:20-slim + commands: + - cd frontend + - npm ci + - npm run build + + docker-backend: + image: woodpeckerci/plugin-docker-buildx + depends_on: + - test-backend + settings: + repo: ${CI_FORGE_URL}/${CI_REPO} + dockerfile: docker/backend.Dockerfile + context: . + tag: + - latest + - ${CI_COMMIT_SHA:0:8} + target: production + registry: + from_secret: docker_registry + username: + from_secret: docker_username + password: + from_secret: docker_password + + docker-frontend: + image: woodpeckerci/plugin-docker-buildx + depends_on: + - test-frontend + settings: + repo: ${CI_FORGE_URL}/${CI_REPO} + dockerfile: docker/frontend.Dockerfile + context: . + tag: + - latest + - ${CI_COMMIT_SHA:0:8} + target: production + registry: + from_secret: docker_registry + username: + from_secret: docker_username + password: + from_secret: docker_password + + deploy: + image: appleboy/drone-ssh + depends_on: + - docker-backend + - docker-frontend + settings: + host: + from_secret: deploy_host + username: + from_secret: deploy_username + key: + from_secret: deploy_key + port: 22 + script: + - cd /opt/glibredecision + - docker compose -f docker/docker-compose.yml pull + - docker compose -f docker/docker-compose.yml up -d --remove-orphans + - docker image prune -f diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..1894873 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,25 @@ +# Glibredecision + +Plateforme de decisions collectives pour la communaute Duniter/G1. + +## Stack +- **Frontend**: Nuxt 4 + Nuxt UI v3 + Pinia + UnoCSS (port 3002) +- **Backend**: Python FastAPI + SQLAlchemy async + PostgreSQL asyncpg (port 8002) +- **Auth**: Duniter V2 Ed25519 challenge-response +- **Sanctuaire**: IPFS (kubo) + hash on-chain (system.remark) + +## Commands +- Backend: `cd backend && uvicorn app.main:app --port 8002 --reload` +- Backend tests: `cd backend && pytest tests/ -v` +- Frontend: `cd frontend && npm run dev` +- Frontend build: `cd frontend && npm run build` +- Migrations: `cd backend && alembic upgrade head` +- Docker: `docker compose -f docker/docker-compose.yml up` + +## Conventions +- French for UI text and documentation +- English for code (variable names, comments, docstrings) +- API versioned under `/api/v1/` +- Pydantic v2 for all schemas +- Async everywhere (SQLAlchemy, FastAPI) +- Ed25519 signatures for vote integrity diff --git a/backend/alembic.ini b/backend/alembic.ini new file mode 100644 index 0000000..49fc2f9 --- /dev/null +++ b/backend/alembic.ini @@ -0,0 +1,68 @@ +[alembic] +# path to migration scripts +script_location = alembic + +# template used to generate migration file names +file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s + +# sys.path prepend - ensures app is importable +prepend_sys_path = . + +# timezone to use when rendering the date within the migration file +# as well as the filename. +timezone = UTC + +# max length of characters to apply to the "slug" field +truncate_slug_length = 40 + +# set to 'true' to run the environment during +# the 'revision' command, regardless of autogenerate +# revision_environment = false + +# set to 'true' to allow .hierarchical revisions +# new_file_template = + +# the output encoding used when revision files +# are written from script.py.mako +output_encoding = utf-8 + +# DATABASE_URL is read from app.config.settings in env.py +# (not from sqlalchemy.url here) +sqlalchemy.url = driver://user:pass@localhost/dbname + + +[post_write_hooks] + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = WARN +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARN +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = INFO +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/backend/alembic/env.py b/backend/alembic/env.py new file mode 100644 index 0000000..a0e03c9 --- /dev/null +++ b/backend/alembic/env.py @@ -0,0 +1,109 @@ +"""Alembic async environment for Glibredecision. + +Uses asyncpg via SQLAlchemy's async engine. +All models are imported so that Base.metadata is fully populated +before autogenerate runs. +""" + +from __future__ import annotations + +import asyncio +from logging.config import fileConfig + +from sqlalchemy import pool +from sqlalchemy.engine import Connection +from sqlalchemy.ext.asyncio import async_engine_from_config + +from alembic import context + +# Import settings to get the DATABASE_URL +from app.config import settings + +# Import Base for target_metadata +from app.database import Base + +# Import ALL models so their tables are registered on Base.metadata +from app.models import ( # noqa: F401 + DuniterIdentity, + Session, + Document, + DocumentItem, + ItemVersion, + Decision, + DecisionStep, + VoteSession, + Vote, + Mandate, + MandateStep, + VotingProtocol, + FormulaConfig, + SanctuaryEntry, + BlockchainCache, +) + +# Alembic Config object +config = context.config + +# Interpret the config file for Python logging +if config.config_file_name is not None: + fileConfig(config.config_file_name) + +# Override sqlalchemy.url with the real DATABASE_URL from settings +config.set_main_option("sqlalchemy.url", settings.DATABASE_URL) + +# Target metadata for autogenerate support +target_metadata = Base.metadata + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + Configures the context with just a URL and not an Engine. + Calls to context.execute() emit the given string to the script output. + """ + url = config.get_main_option("sqlalchemy.url") + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={"paramstyle": "named"}, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + """Run migrations with an active connection.""" + context.configure( + connection=connection, + target_metadata=target_metadata, + ) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """Run migrations in 'online' mode with an async engine.""" + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix="sqlalchemy.", + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + run_migrations_offline() +else: + run_migrations_online() diff --git a/backend/alembic/script.py.mako b/backend/alembic/script.py.mako new file mode 100644 index 0000000..fbc4b07 --- /dev/null +++ b/backend/alembic/script.py.mako @@ -0,0 +1,26 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +from alembic import op +import sqlalchemy as sa +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + ${downgrades if downgrades else "pass"} diff --git a/backend/app/__init__.py b/backend/app/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/config.py b/backend/app/config.py new file mode 100644 index 0000000..6d0d886 --- /dev/null +++ b/backend/app/config.py @@ -0,0 +1,33 @@ +from pydantic_settings import BaseSettings +from pathlib import Path + + +class Settings(BaseSettings): + APP_NAME: str = "Glibredecision" + DEBUG: bool = True + + # Database + DATABASE_URL: str = "postgresql+asyncpg://glibredecision:change-me-in-production@localhost:5432/glibredecision" + + # Auth + SECRET_KEY: str = "change-me-in-production-with-a-real-secret-key" + CHALLENGE_EXPIRE_SECONDS: int = 300 + TOKEN_EXPIRE_HOURS: int = 24 + + # Duniter V2 RPC + DUNITER_RPC_URL: str = "wss://gdev.p2p.legal/ws" + + # IPFS + IPFS_API_URL: str = "http://localhost:5001" + IPFS_GATEWAY_URL: str = "http://localhost:8080" + + # CORS + CORS_ORIGINS: list[str] = ["http://localhost:3002"] + + # Paths + BASE_DIR: Path = Path(__file__).resolve().parent.parent + + model_config = {"env_file": ".env", "env_file_encoding": "utf-8"} + + +settings = Settings() diff --git a/backend/app/database.py b/backend/app/database.py new file mode 100644 index 0000000..6ce56ec --- /dev/null +++ b/backend/app/database.py @@ -0,0 +1,21 @@ +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession, async_sessionmaker +from sqlalchemy.orm import DeclarativeBase + +from app.config import settings + +engine = create_async_engine(settings.DATABASE_URL, echo=settings.DEBUG) +async_session = async_sessionmaker(engine, class_=AsyncSession, expire_on_commit=False) + + +class Base(DeclarativeBase): + pass + + +async def get_db(): + async with async_session() as session: + yield session + + +async def init_db(): + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) diff --git a/backend/app/engine/__init__.py b/backend/app/engine/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/engine/mode_params.py b/backend/app/engine/mode_params.py new file mode 100644 index 0000000..7e81c12 --- /dev/null +++ b/backend/app/engine/mode_params.py @@ -0,0 +1,107 @@ +"""Parse mode-parameter strings into structured dicts. + +A mode-params string encodes voting formula parameters in a compact format. +Example: ``"D30M50B.1G.2T.1"`` + +Supported codes: + D = duration_days (int) + M = majority_pct (int, 0-100) + B = base_exponent (float) + G = gradient_exponent (float) + C = constant_base (float) + S = smith_exponent (float) + T = techcomm_exponent (float) + N = ratio_multiplier (float) + R = ratio_mode (bool, 0 or 1) + +Values may start with a dot for decimals < 1, e.g. ``B.1`` means base_exponent=0.1. +""" + +from __future__ import annotations + +import re + + +# Ordered list of recognised codes and their target keys + types +_CODES: dict[str, tuple[str, type]] = { + "D": ("duration_days", int), + "M": ("majority_pct", int), + "B": ("base_exponent", float), + "G": ("gradient_exponent", float), + "C": ("constant_base", float), + "S": ("smith_exponent", float), + "T": ("techcomm_exponent", float), + "N": ("ratio_multiplier", float), + "R": ("is_ratio_mode", bool), +} + +# Regex: a single uppercase letter followed by a numeric value (int or float, +# possibly starting with '.' for values like .1 meaning 0.1) +_PARAM_RE = re.compile(r"([A-Z])(\d*\.?\d+)") + + +def parse_mode_params(params_str: str) -> dict: + """Parse a mode-params string into a parameter dict. + + Parameters + ---------- + params_str: + Compact parameter string, e.g. ``"D30M50B.1G.2T.1"``. + + Returns + ------- + dict + Keys present depend on codes found in the string. Defaults are + applied for any code not present:: + + { + "duration_days": 30, + "majority_pct": 50, + "base_exponent": 0.1, + "gradient_exponent": 0.2, + "constant_base": 0.0, + "smith_exponent": None, + "techcomm_exponent": None, + "ratio_multiplier": None, + "is_ratio_mode": False, + } + + Raises + ------ + ValueError + If an unrecognised code letter is found. + """ + defaults: dict = { + "duration_days": 30, + "majority_pct": 50, + "base_exponent": 0.1, + "gradient_exponent": 0.2, + "constant_base": 0.0, + "smith_exponent": None, + "techcomm_exponent": None, + "ratio_multiplier": None, + "is_ratio_mode": False, + } + + if not params_str or not params_str.strip(): + return dict(defaults) + + result = dict(defaults) + + for match in _PARAM_RE.finditer(params_str): + code = match.group(1) + raw_value = match.group(2) + + if code not in _CODES: + raise ValueError(f"Code de parametre inconnu : '{code}'") + + key, target_type = _CODES[code] + + if target_type is int: + result[key] = int(float(raw_value)) + elif target_type is float: + result[key] = float(raw_value) + elif target_type is bool: + result[key] = float(raw_value) != 0.0 + + return result diff --git a/backend/app/engine/nuanced_vote.py b/backend/app/engine/nuanced_vote.py new file mode 100644 index 0000000..b02a7d2 --- /dev/null +++ b/backend/app/engine/nuanced_vote.py @@ -0,0 +1,95 @@ +"""Six-level nuanced vote evaluation. + +Levels: + 0 - CONTRE + 1 - PAS DU TOUT + 2 - PAS D'ACCORD + 3 - NEUTRE + 4 - D'ACCORD + 5 - TOUT A FAIT + +Adoption rule: + The sum of votes at levels 3 + 4 + 5 must be >= threshold_pct% of total votes. + A minimum number of participants is also required. +""" + +from __future__ import annotations + +LEVEL_LABELS: dict[int, str] = { + 0: "CONTRE", + 1: "PAS DU TOUT", + 2: "PAS D'ACCORD", + 3: "NEUTRE", + 4: "D'ACCORD", + 5: "TOUT A FAIT", +} + +NUM_LEVELS = 6 + + +def evaluate_nuanced( + votes: list[int], + threshold_pct: int = 80, + min_participants: int = 59, +) -> dict: + """Evaluate a nuanced vote from a list of individual vote levels. + + Parameters + ---------- + votes: + List of vote levels (each 0-5). One entry per voter. + threshold_pct: + Minimum percentage of positive votes (levels 3-5) out of total + for adoption. + min_participants: + Minimum number of participants required for validity. + + Returns + ------- + dict + { + "total": int, + "per_level_counts": {0: int, 1: int, ..., 5: int}, + "positive_count": int, # levels 3 + 4 + 5 + "positive_pct": float, # 0.0 - 100.0 + "threshold_met": bool, + "min_participants_met": bool, + "adopted": bool, + } + + Raises + ------ + ValueError + If any vote value is outside the 0-5 range. + """ + # Validate vote levels + for v in votes: + if v < 0 or v > 5: + raise ValueError( + f"Niveau de vote invalide : {v}. Les niveaux valides sont 0-5." + ) + + total = len(votes) + + per_level_counts: dict[int, int] = {level: 0 for level in range(NUM_LEVELS)} + for v in votes: + per_level_counts[v] += 1 + + # Positive = levels 3 (NEUTRE), 4 (D'ACCORD), 5 (TOUT A FAIT) + positive_count = per_level_counts[3] + per_level_counts[4] + per_level_counts[5] + + positive_pct = (positive_count / total * 100.0) if total > 0 else 0.0 + + threshold_met = positive_pct >= threshold_pct + min_participants_met = total >= min_participants + adopted = threshold_met and min_participants_met + + return { + "total": total, + "per_level_counts": per_level_counts, + "positive_count": positive_count, + "positive_pct": round(positive_pct, 2), + "threshold_met": threshold_met, + "min_participants_met": min_participants_met, + "adopted": adopted, + } diff --git a/backend/app/engine/smith_threshold.py b/backend/app/engine/smith_threshold.py new file mode 100644 index 0000000..93838c1 --- /dev/null +++ b/backend/app/engine/smith_threshold.py @@ -0,0 +1,31 @@ +"""Smith sub-WoT threshold criterion. + +The Smith criterion requires a minimum number of votes from Smith members +(forgerons) for certain decisions to be valid. + +Formula: ceil(SmithWotSize ^ S) +""" + +from __future__ import annotations + +import math + + +def smith_threshold(smith_wot_size: int, exponent: float = 0.1) -> int: + """Compute the minimum number of Smith member votes required. + + Parameters + ---------- + smith_wot_size: + Number of active Smith members. + exponent: + S in the formula ``ceil(smith_wot_size^S)``. + + Returns + ------- + int + Minimum Smith votes required. + """ + if smith_wot_size <= 0: + raise ValueError("smith_wot_size doit etre strictement positif") + return math.ceil(smith_wot_size ** exponent) diff --git a/backend/app/engine/techcomm_threshold.py b/backend/app/engine/techcomm_threshold.py new file mode 100644 index 0000000..51abe27 --- /dev/null +++ b/backend/app/engine/techcomm_threshold.py @@ -0,0 +1,31 @@ +"""Technical Committee threshold criterion. + +The TechComm criterion requires a minimum number of votes from +Technical Committee members for certain decisions. + +Formula: ceil(CoTecSize ^ T) +""" + +from __future__ import annotations + +import math + + +def techcomm_threshold(cotec_size: int, exponent: float = 0.1) -> int: + """Compute the minimum number of TechComm member votes required. + + Parameters + ---------- + cotec_size: + Number of Technical Committee members. + exponent: + T in the formula ``ceil(cotec_size^T)``. + + Returns + ------- + int + Minimum TechComm votes required. + """ + if cotec_size <= 0: + raise ValueError("cotec_size doit etre strictement positif") + return math.ceil(cotec_size ** exponent) diff --git a/backend/app/engine/threshold.py b/backend/app/engine/threshold.py new file mode 100644 index 0000000..09dbbc9 --- /dev/null +++ b/backend/app/engine/threshold.py @@ -0,0 +1,85 @@ +"""WoT members threshold formula for binary votes. + +Core formula: + Result = C + B^W + (M + (1-M) * (1 - (T/W)^G)) * max(0, T - C) + +Where: + C = constant_base + B = base_exponent + W = wot_size (corpus of eligible voters) + T = total_votes (for + against) + M = majority_ratio (majority_pct / 100) + G = gradient_exponent + +Inertia behaviour: + - Low participation (T << W) -> near-unanimity required + - High participation (T -> W) -> simple majority M suffices + +Reference test case: + wot_size=7224, votes_for=97, votes_against=23 (total=120) + params M50 B.1 G.2 => threshold=94, adopted (97 >= 94) +""" + +from __future__ import annotations + +import math + + +def wot_threshold( + wot_size: int, + total_votes: int, + majority_pct: int = 50, + base_exponent: float = 0.1, + gradient_exponent: float = 0.2, + constant_base: float = 0.0, +) -> int: + """Compute the minimum number of *for* votes required for adoption. + + Parameters + ---------- + wot_size: + Size of the eligible voter corpus (WoT members). + total_votes: + Number of votes cast (for + against). + majority_pct: + Majority percentage (0-100). 50 = simple majority at full participation. + base_exponent: + B in the formula. ``B^W`` contributes a vanishingly small offset + when W is large (0 < B < 1). + gradient_exponent: + G controls how fast the required super-majority decays toward M as + participation increases. + constant_base: + C, a fixed additive floor on the threshold. + + Returns + ------- + int + The ceiling of the computed threshold. A vote passes when + ``votes_for >= wot_threshold(...)``. + """ + if wot_size <= 0: + raise ValueError("wot_size doit etre strictement positif") + if total_votes < 0: + raise ValueError("total_votes ne peut pas etre negatif") + if not (0 <= majority_pct <= 100): + raise ValueError("majority_pct doit etre entre 0 et 100") + + C = constant_base + B = base_exponent + W = wot_size + T = total_votes + M = majority_pct / 100.0 + G = gradient_exponent + + # Guard: if no votes, threshold is at least ceil(C + B^W) + if T == 0: + return math.ceil(C + B ** W) + + # Core formula + participation_ratio = T / W + inertia_factor = 1.0 - participation_ratio ** G + required_ratio = M + (1.0 - M) * inertia_factor + result = C + B ** W + required_ratio * max(0.0, T - C) + + return math.ceil(result) diff --git a/backend/app/main.py b/backend/app/main.py new file mode 100644 index 0000000..a519155 --- /dev/null +++ b/backend/app/main.py @@ -0,0 +1,44 @@ +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware + +from app.config import settings +from app.database import init_db +from app.routers import auth, documents, decisions, votes, mandates, protocols, sanctuary, websocket + + +@asynccontextmanager +async def lifespan(app: FastAPI): + await init_db() + yield + + +app = FastAPI( + title=settings.APP_NAME, + description="Plateforme de decisions collectives pour la communaute Duniter/G1", + version="0.1.0", + lifespan=lifespan, +) + +app.add_middleware( + CORSMiddleware, + allow_origins=settings.CORS_ORIGINS, + allow_credentials=True, + allow_methods=["*"], + allow_headers=["*"], +) + +app.include_router(auth.router, prefix="/api/v1/auth", tags=["auth"]) +app.include_router(documents.router, prefix="/api/v1/documents", tags=["documents"]) +app.include_router(decisions.router, prefix="/api/v1/decisions", tags=["decisions"]) +app.include_router(votes.router, prefix="/api/v1/votes", tags=["votes"]) +app.include_router(mandates.router, prefix="/api/v1/mandates", tags=["mandates"]) +app.include_router(protocols.router, prefix="/api/v1/protocols", tags=["protocols"]) +app.include_router(sanctuary.router, prefix="/api/v1/sanctuary", tags=["sanctuary"]) +app.include_router(websocket.router, prefix="/api/v1/ws", tags=["websocket"]) + + +@app.get("/api/health") +async def health(): + return {"status": "ok"} diff --git a/backend/app/models/__init__.py b/backend/app/models/__init__.py new file mode 100644 index 0000000..cdc2e64 --- /dev/null +++ b/backend/app/models/__init__.py @@ -0,0 +1,19 @@ +from app.models.user import DuniterIdentity, Session +from app.models.document import Document, DocumentItem, ItemVersion +from app.models.decision import Decision, DecisionStep +from app.models.vote import VoteSession, Vote +from app.models.mandate import Mandate, MandateStep +from app.models.protocol import VotingProtocol, FormulaConfig +from app.models.sanctuary import SanctuaryEntry +from app.models.cache import BlockchainCache + +__all__ = [ + "DuniterIdentity", "Session", + "Document", "DocumentItem", "ItemVersion", + "Decision", "DecisionStep", + "VoteSession", "Vote", + "Mandate", "MandateStep", + "VotingProtocol", "FormulaConfig", + "SanctuaryEntry", + "BlockchainCache", +] diff --git a/backend/app/models/cache.py b/backend/app/models/cache.py new file mode 100644 index 0000000..99f21ef --- /dev/null +++ b/backend/app/models/cache.py @@ -0,0 +1,18 @@ +import uuid +from datetime import datetime + +from sqlalchemy import String, DateTime, func +from sqlalchemy.dialects.postgresql import UUID, JSONB +from sqlalchemy.orm import Mapped, mapped_column + +from app.database import Base + + +class BlockchainCache(Base): + __tablename__ = "blockchain_cache" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + cache_key: Mapped[str] = mapped_column(String(256), unique=True, nullable=False, index=True) + cache_value: Mapped[dict] = mapped_column(JSONB, nullable=False) + fetched_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) diff --git a/backend/app/models/decision.py b/backend/app/models/decision.py new file mode 100644 index 0000000..619f41d --- /dev/null +++ b/backend/app/models/decision.py @@ -0,0 +1,42 @@ +import uuid +from datetime import datetime + +from sqlalchemy import String, Integer, Text, DateTime, ForeignKey, func +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class Decision(Base): + __tablename__ = "decisions" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + title: Mapped[str] = mapped_column(String(256), nullable=False) + description: Mapped[str | None] = mapped_column(Text) + context: Mapped[str | None] = mapped_column(Text) + decision_type: Mapped[str] = mapped_column(String(64), nullable=False) # runtime_upgrade, document_change, mandate_vote, custom + status: Mapped[str] = mapped_column(String(32), default="draft") # draft, qualification, review, voting, executed, closed + voting_protocol_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("voting_protocols.id")) + created_by_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("duniter_identities.id")) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) + + steps: Mapped[list["DecisionStep"]] = relationship(back_populates="decision", cascade="all, delete-orphan", order_by="DecisionStep.step_order") + + +class DecisionStep(Base): + __tablename__ = "decision_steps" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + decision_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("decisions.id"), nullable=False) + step_order: Mapped[int] = mapped_column(Integer, nullable=False) + step_type: Mapped[str] = mapped_column(String(32), nullable=False) # qualification, review, vote, execution, reporting + title: Mapped[str | None] = mapped_column(String(256)) + description: Mapped[str | None] = mapped_column(Text) + status: Mapped[str] = mapped_column(String(32), default="pending") # pending, active, completed, skipped + vote_session_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("vote_sessions.id")) + outcome: Mapped[str | None] = mapped_column(Text) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + + decision: Mapped["Decision"] = relationship(back_populates="steps") diff --git a/backend/app/models/document.py b/backend/app/models/document.py new file mode 100644 index 0000000..20709ab --- /dev/null +++ b/backend/app/models/document.py @@ -0,0 +1,60 @@ +import uuid +from datetime import datetime + +from sqlalchemy import String, Integer, Text, DateTime, ForeignKey, func +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class Document(Base): + __tablename__ = "documents" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + slug: Mapped[str] = mapped_column(String(128), unique=True, nullable=False, index=True) + title: Mapped[str] = mapped_column(String(256), nullable=False) + doc_type: Mapped[str] = mapped_column(String(64), nullable=False) # licence, engagement, reglement, constitution + version: Mapped[str] = mapped_column(String(32), default="0.1.0") + status: Mapped[str] = mapped_column(String(32), default="draft") # draft, active, archived + description: Mapped[str | None] = mapped_column(Text) + ipfs_cid: Mapped[str | None] = mapped_column(String(128)) + chain_anchor: Mapped[str | None] = mapped_column(String(128)) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) + + items: Mapped[list["DocumentItem"]] = relationship(back_populates="document", cascade="all, delete-orphan", order_by="DocumentItem.position") + + +class DocumentItem(Base): + __tablename__ = "document_items" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + document_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("documents.id"), nullable=False) + position: Mapped[str] = mapped_column(String(16), nullable=False) # "1", "1.1", "3.2" + item_type: Mapped[str] = mapped_column(String(32), default="clause") # clause, rule, verification, preamble, section + title: Mapped[str | None] = mapped_column(String(256)) + current_text: Mapped[str] = mapped_column(Text, nullable=False) + voting_protocol_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("voting_protocols.id")) + sort_order: Mapped[int] = mapped_column(Integer, default=0) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) + + document: Mapped["Document"] = relationship(back_populates="items") + versions: Mapped[list["ItemVersion"]] = relationship(back_populates="item", cascade="all, delete-orphan") + + +class ItemVersion(Base): + __tablename__ = "item_versions" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + item_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("document_items.id"), nullable=False) + proposed_text: Mapped[str] = mapped_column(Text, nullable=False) + diff_text: Mapped[str | None] = mapped_column(Text) + rationale: Mapped[str | None] = mapped_column(Text) + status: Mapped[str] = mapped_column(String(32), default="proposed") # proposed, voting, accepted, rejected + decision_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("decisions.id")) + proposed_by_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("duniter_identities.id")) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + + item: Mapped["DocumentItem"] = relationship(back_populates="versions") diff --git a/backend/app/models/mandate.py b/backend/app/models/mandate.py new file mode 100644 index 0000000..71a9712 --- /dev/null +++ b/backend/app/models/mandate.py @@ -0,0 +1,43 @@ +import uuid +from datetime import datetime + +from sqlalchemy import String, Integer, Text, DateTime, ForeignKey, func +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class Mandate(Base): + __tablename__ = "mandates" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + title: Mapped[str] = mapped_column(String(256), nullable=False) + description: Mapped[str | None] = mapped_column(Text) + mandate_type: Mapped[str] = mapped_column(String(64), nullable=False) # techcomm, smith, custom + status: Mapped[str] = mapped_column(String(32), default="draft") # draft, candidacy, voting, active, reporting, completed, revoked + mandatee_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("duniter_identities.id")) + decision_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("decisions.id")) + starts_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True)) + ends_at: Mapped[datetime | None] = mapped_column(DateTime(timezone=True)) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) + + steps: Mapped[list["MandateStep"]] = relationship(back_populates="mandate", cascade="all, delete-orphan", order_by="MandateStep.step_order") + + +class MandateStep(Base): + __tablename__ = "mandate_steps" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + mandate_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("mandates.id"), nullable=False) + step_order: Mapped[int] = mapped_column(Integer, nullable=False) + step_type: Mapped[str] = mapped_column(String(32), nullable=False) # formulation, candidacy, vote, assignment, reporting, completion, revocation + title: Mapped[str | None] = mapped_column(String(256)) + description: Mapped[str | None] = mapped_column(Text) + status: Mapped[str] = mapped_column(String(32), default="pending") # pending, active, completed, skipped + vote_session_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("vote_sessions.id")) + outcome: Mapped[str | None] = mapped_column(Text) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + + mandate: Mapped["Mandate"] = relationship(back_populates="steps") diff --git a/backend/app/models/protocol.py b/backend/app/models/protocol.py new file mode 100644 index 0000000..8757ef8 --- /dev/null +++ b/backend/app/models/protocol.py @@ -0,0 +1,52 @@ +import uuid +from datetime import datetime + +from sqlalchemy import String, Integer, Float, Boolean, DateTime, ForeignKey, Text, func +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class FormulaConfig(Base): + __tablename__ = "formula_configs" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(String(128), nullable=False) + description: Mapped[str | None] = mapped_column(Text) + + # WoT threshold params + duration_days: Mapped[int] = mapped_column(Integer, default=30) + majority_pct: Mapped[int] = mapped_column(Integer, default=50) + base_exponent: Mapped[float] = mapped_column(Float, default=0.1) + gradient_exponent: Mapped[float] = mapped_column(Float, default=0.2) + constant_base: Mapped[float] = mapped_column(Float, default=0.0) + + # Smith criterion + smith_exponent: Mapped[float | None] = mapped_column(Float) + + # TechComm criterion + techcomm_exponent: Mapped[float | None] = mapped_column(Float) + + # Nuanced vote + nuanced_min_participants: Mapped[int | None] = mapped_column(Integer) + nuanced_threshold_pct: Mapped[int | None] = mapped_column(Integer) + + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + + protocols: Mapped[list["VotingProtocol"]] = relationship(back_populates="formula_config") + + +class VotingProtocol(Base): + __tablename__ = "voting_protocols" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + name: Mapped[str] = mapped_column(String(128), nullable=False) + description: Mapped[str | None] = mapped_column(Text) + vote_type: Mapped[str] = mapped_column(String(32), nullable=False) # binary, nuanced + formula_config_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("formula_configs.id"), nullable=False) + mode_params: Mapped[str | None] = mapped_column(String(64)) # e.g. "D30M50B.1G.2T.1" + is_meta_governed: Mapped[bool] = mapped_column(Boolean, default=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + + formula_config: Mapped["FormulaConfig"] = relationship(back_populates="protocols") diff --git a/backend/app/models/sanctuary.py b/backend/app/models/sanctuary.py new file mode 100644 index 0000000..cd4809c --- /dev/null +++ b/backend/app/models/sanctuary.py @@ -0,0 +1,23 @@ +import uuid +from datetime import datetime + +from sqlalchemy import String, Integer, Text, DateTime, func +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, mapped_column + +from app.database import Base + + +class SanctuaryEntry(Base): + __tablename__ = "sanctuary_entries" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + entry_type: Mapped[str] = mapped_column(String(64), nullable=False) # document, decision, vote_result + reference_id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), nullable=False) + title: Mapped[str | None] = mapped_column(String(256)) + content_hash: Mapped[str] = mapped_column(String(128), nullable=False) # SHA-256 + ipfs_cid: Mapped[str | None] = mapped_column(String(128)) + chain_tx_hash: Mapped[str | None] = mapped_column(String(128)) + chain_block: Mapped[int | None] = mapped_column(Integer) + metadata_json: Mapped[str | None] = mapped_column(Text) # JSON string for extra data + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) diff --git a/backend/app/models/user.py b/backend/app/models/user.py new file mode 100644 index 0000000..0dfb802 --- /dev/null +++ b/backend/app/models/user.py @@ -0,0 +1,35 @@ +import uuid +from datetime import datetime + +from sqlalchemy import String, Boolean, DateTime, ForeignKey, func +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class DuniterIdentity(Base): + __tablename__ = "duniter_identities" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + address: Mapped[str] = mapped_column(String(64), unique=True, nullable=False, index=True) + display_name: Mapped[str | None] = mapped_column(String(128)) + wot_status: Mapped[str] = mapped_column(String(32), default="unknown") # member, pending, revoked, unknown + is_smith: Mapped[bool] = mapped_column(Boolean, default=False) + is_techcomm: Mapped[bool] = mapped_column(Boolean, default=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + updated_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now(), onupdate=func.now()) + + sessions: Mapped[list["Session"]] = relationship(back_populates="identity", cascade="all, delete-orphan") + + +class Session(Base): + __tablename__ = "sessions" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + token_hash: Mapped[str] = mapped_column(String(128), unique=True, nullable=False, index=True) + identity_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("duniter_identities.id"), nullable=False) + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + expires_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + + identity: Mapped["DuniterIdentity"] = relationship(back_populates="sessions") diff --git a/backend/app/models/vote.py b/backend/app/models/vote.py new file mode 100644 index 0000000..b2950b8 --- /dev/null +++ b/backend/app/models/vote.py @@ -0,0 +1,71 @@ +import uuid +from datetime import datetime + +from sqlalchemy import String, Integer, Float, Boolean, Text, DateTime, ForeignKey, func +from sqlalchemy.dialects.postgresql import UUID +from sqlalchemy.orm import Mapped, mapped_column, relationship + +from app.database import Base + + +class VoteSession(Base): + __tablename__ = "vote_sessions" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + decision_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("decisions.id")) + item_version_id: Mapped[uuid.UUID | None] = mapped_column(ForeignKey("item_versions.id")) + voting_protocol_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("voting_protocols.id"), nullable=False) + + # Snapshot at session start + wot_size: Mapped[int] = mapped_column(Integer, default=0) + smith_size: Mapped[int] = mapped_column(Integer, default=0) + techcomm_size: Mapped[int] = mapped_column(Integer, default=0) + + # Dates + starts_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + ends_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), nullable=False) + + # Status + status: Mapped[str] = mapped_column(String(32), default="open") # open, closed, tallied + + # Tallies + votes_for: Mapped[int] = mapped_column(Integer, default=0) + votes_against: Mapped[int] = mapped_column(Integer, default=0) + votes_total: Mapped[int] = mapped_column(Integer, default=0) + smith_votes_for: Mapped[int] = mapped_column(Integer, default=0) + techcomm_votes_for: Mapped[int] = mapped_column(Integer, default=0) + threshold_required: Mapped[float] = mapped_column(Float, default=0.0) + result: Mapped[str | None] = mapped_column(String(32)) # adopted, rejected, null + + # Chain recording + chain_recorded: Mapped[bool] = mapped_column(Boolean, default=False) + chain_tx_hash: Mapped[str | None] = mapped_column(String(128)) + + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + + votes: Mapped[list["Vote"]] = relationship(back_populates="session", cascade="all, delete-orphan") + + +class Vote(Base): + __tablename__ = "votes" + + id: Mapped[uuid.UUID] = mapped_column(UUID(as_uuid=True), primary_key=True, default=uuid.uuid4) + session_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("vote_sessions.id"), nullable=False) + voter_id: Mapped[uuid.UUID] = mapped_column(ForeignKey("duniter_identities.id"), nullable=False) + vote_value: Mapped[str] = mapped_column(String(32), nullable=False) # for, against, or nuanced levels + nuanced_level: Mapped[int | None] = mapped_column(Integer) # 0-5 for nuanced votes + comment: Mapped[str | None] = mapped_column(Text) + + # Cryptographic proof + signature: Mapped[str] = mapped_column(Text, nullable=False) + signed_payload: Mapped[str] = mapped_column(Text, nullable=False) + + # Voter status snapshot + voter_wot_status: Mapped[str] = mapped_column(String(32), default="member") + voter_is_smith: Mapped[bool] = mapped_column(Boolean, default=False) + voter_is_techcomm: Mapped[bool] = mapped_column(Boolean, default=False) + is_active: Mapped[bool] = mapped_column(Boolean, default=True) + + created_at: Mapped[datetime] = mapped_column(DateTime(timezone=True), server_default=func.now()) + + session: Mapped["VoteSession"] = relationship(back_populates="votes") diff --git a/backend/app/routers/__init__.py b/backend/app/routers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/routers/auth.py b/backend/app/routers/auth.py new file mode 100644 index 0000000..56b822d --- /dev/null +++ b/backend/app/routers/auth.py @@ -0,0 +1,162 @@ +"""Auth router: Ed25519 challenge-response authentication for Duniter V2 identities.""" + +from __future__ import annotations + +import secrets +from datetime import datetime, timedelta, timezone + +from fastapi import APIRouter, Depends, HTTPException, status +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import settings +from app.database import get_db +from app.models.user import DuniterIdentity +from app.schemas.auth import ( + ChallengeRequest, + ChallengeResponse, + IdentityOut, + TokenResponse, + VerifyRequest, +) +from app.services.auth_service import ( + create_session, + get_current_identity, + get_or_create_identity, + invalidate_session, +) + +router = APIRouter() + +# ── In-memory challenge store (short-lived, no persistence needed) ────────── +# Structure: { address: { "challenge": str, "expires_at": datetime } } +_pending_challenges: dict[str, dict] = {} + + +def _cleanup_expired_challenges() -> None: + """Remove expired challenges from the in-memory store.""" + now = datetime.now(timezone.utc) + expired = [addr for addr, data in _pending_challenges.items() if data["expires_at"] < now] + for addr in expired: + del _pending_challenges[addr] + + +# ── Routes ────────────────────────────────────────────────────────────────── + + +@router.post("/challenge", response_model=ChallengeResponse) +async def request_challenge(payload: ChallengeRequest) -> ChallengeResponse: + """Generate a random Ed25519 challenge for the given Duniter address. + + The client must sign this challenge with the private key corresponding + to the address, then submit it via POST /verify. + """ + _cleanup_expired_challenges() + + challenge = secrets.token_hex(32) + expires_at = datetime.now(timezone.utc) + timedelta(seconds=settings.CHALLENGE_EXPIRE_SECONDS) + + _pending_challenges[payload.address] = { + "challenge": challenge, + "expires_at": expires_at, + } + + return ChallengeResponse(challenge=challenge, expires_at=expires_at) + + +@router.post("/verify", response_model=TokenResponse) +async def verify_challenge( + payload: VerifyRequest, + db: AsyncSession = Depends(get_db), +) -> TokenResponse: + """Verify the Ed25519 signature of a challenge and return a session token. + + Steps: + 1. Check that a pending challenge exists for the address. + 2. Verify the challenge string matches. + 3. Verify the Ed25519 signature against the address public key. + 4. Create or retrieve the DuniterIdentity. + 5. Create a session and return the bearer token. + """ + # 1. Retrieve pending challenge + pending = _pending_challenges.get(payload.address) + if pending is None: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Aucun challenge en attente pour cette adresse", + ) + + # 2. Check expiry + if pending["expires_at"] < datetime.now(timezone.utc): + del _pending_challenges[payload.address] + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Challenge expire, veuillez en demander un nouveau", + ) + + # 3. Verify challenge string matches + if pending["challenge"] != payload.challenge: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Challenge invalide", + ) + + # 4. Verify Ed25519 signature + # TODO: Implement actual Ed25519 verification using substrate-interface + # For now we accept any signature to allow development/testing. + # In production this MUST verify: verify(address_pubkey, challenge_bytes, signature_bytes) + # + # from substrateinterface import Keypair + # keypair = Keypair(ss58_address=payload.address) + # if not keypair.verify(payload.challenge.encode(), bytes.fromhex(payload.signature)): + # raise HTTPException(status_code=401, detail="Signature invalide") + + # 5. Consume the challenge + del _pending_challenges[payload.address] + + # 6. Get or create identity + identity = await get_or_create_identity(db, payload.address) + + # 7. Create session token + token = await create_session(db, identity) + + return TokenResponse( + token=token, + identity=IdentityOut.model_validate(identity), + ) + + +@router.get("/me", response_model=IdentityOut) +async def get_me( + identity: DuniterIdentity = Depends(get_current_identity), +) -> IdentityOut: + """Return the currently authenticated identity.""" + return IdentityOut.model_validate(identity) + + +@router.post("/logout", status_code=status.HTTP_204_NO_CONTENT) +async def logout( + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> None: + """Invalidate the current session token. + + Note: get_current_identity already validated the token, so we know it exists. + We re-extract it from the Authorization header to invalidate it. + """ + # We need the raw token to invalidate -- re-extract from the dependency chain. + # Since get_current_identity already validated, we know the request has a valid Bearer token. + # We use a slightly different approach: delete all sessions for this identity + # that match. For a cleaner approach, we accept the token via a dedicated dependency. + from fastapi import Request + + # This is handled by getting the token from the auth service + # For simplicity, we delete all sessions for the identity + from sqlalchemy import select + + from app.models.user import Session + + result = await db.execute(select(Session).where(Session.identity_id == identity.id)) + sessions = result.scalars().all() + for session in sessions: + await db.delete(session) + await db.commit() diff --git a/backend/app/routers/decisions.py b/backend/app/routers/decisions.py new file mode 100644 index 0000000..1309b67 --- /dev/null +++ b/backend/app/routers/decisions.py @@ -0,0 +1,143 @@ +"""Decisions router: CRUD for decision processes and their steps.""" + +from __future__ import annotations + +import uuid + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.database import get_db +from app.models.decision import Decision, DecisionStep +from app.models.user import DuniterIdentity +from app.schemas.decision import ( + DecisionCreate, + DecisionOut, + DecisionStepCreate, + DecisionStepOut, + DecisionUpdate, +) +from app.services.auth_service import get_current_identity + +router = APIRouter() + + +# ── Helpers ───────────────────────────────────────────────────────────────── + + +async def _get_decision(db: AsyncSession, decision_id: uuid.UUID) -> Decision: + """Fetch a decision by ID with its steps eagerly loaded, or raise 404.""" + result = await db.execute( + select(Decision) + .options(selectinload(Decision.steps)) + .where(Decision.id == decision_id) + ) + decision = result.scalar_one_or_none() + if decision is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Decision introuvable") + return decision + + +# ── Decision routes ───────────────────────────────────────────────────────── + + +@router.get("/", response_model=list[DecisionOut]) +async def list_decisions( + db: AsyncSession = Depends(get_db), + decision_type: str | None = Query(default=None, description="Filtrer par type de decision"), + status_filter: str | None = Query(default=None, alias="status", description="Filtrer par statut"), + skip: int = Query(default=0, ge=0), + limit: int = Query(default=50, ge=1, le=200), +) -> list[DecisionOut]: + """List all decisions with optional filters.""" + stmt = select(Decision).options(selectinload(Decision.steps)) + + if decision_type is not None: + stmt = stmt.where(Decision.decision_type == decision_type) + if status_filter is not None: + stmt = stmt.where(Decision.status == status_filter) + + stmt = stmt.order_by(Decision.created_at.desc()).offset(skip).limit(limit) + result = await db.execute(stmt) + decisions = result.scalars().unique().all() + + return [DecisionOut.model_validate(d) for d in decisions] + + +@router.post("/", response_model=DecisionOut, status_code=status.HTTP_201_CREATED) +async def create_decision( + payload: DecisionCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> DecisionOut: + """Create a new decision process.""" + decision = Decision( + **payload.model_dump(), + created_by_id=identity.id, + ) + db.add(decision) + await db.commit() + await db.refresh(decision) + + # Reload with steps (empty at creation) + decision = await _get_decision(db, decision.id) + return DecisionOut.model_validate(decision) + + +@router.get("/{id}", response_model=DecisionOut) +async def get_decision( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), +) -> DecisionOut: + """Get a single decision with all its steps.""" + decision = await _get_decision(db, id) + return DecisionOut.model_validate(decision) + + +@router.put("/{id}", response_model=DecisionOut) +async def update_decision( + id: uuid.UUID, + payload: DecisionUpdate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> DecisionOut: + """Update a decision's metadata (title, description, status, protocol).""" + decision = await _get_decision(db, id) + + update_data = payload.model_dump(exclude_unset=True) + for field, value in update_data.items(): + setattr(decision, field, value) + + await db.commit() + await db.refresh(decision) + + # Reload with steps + decision = await _get_decision(db, decision.id) + return DecisionOut.model_validate(decision) + + +# ── Decision Step routes ──────────────────────────────────────────────────── + + +@router.post("/{id}/steps", response_model=DecisionStepOut, status_code=status.HTTP_201_CREATED) +async def add_step( + id: uuid.UUID, + payload: DecisionStepCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> DecisionStepOut: + """Add a step to a decision process.""" + # Verify decision exists + decision = await _get_decision(db, id) + + step = DecisionStep( + decision_id=decision.id, + **payload.model_dump(), + ) + db.add(step) + await db.commit() + await db.refresh(step) + + return DecisionStepOut.model_validate(step) diff --git a/backend/app/routers/documents.py b/backend/app/routers/documents.py new file mode 100644 index 0000000..5680732 --- /dev/null +++ b/backend/app/routers/documents.py @@ -0,0 +1,262 @@ +"""Documents router: CRUD for reference documents, items, and item versions.""" + +from __future__ import annotations + +import difflib +import uuid + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import func, select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.database import get_db +from app.models.document import Document, DocumentItem, ItemVersion +from app.models.user import DuniterIdentity +from app.schemas.document import ( + DocumentCreate, + DocumentItemCreate, + DocumentItemOut, + DocumentOut, + DocumentUpdate, + ItemVersionCreate, + ItemVersionOut, +) +from app.services.auth_service import get_current_identity + +router = APIRouter() + + +# ── Helpers ───────────────────────────────────────────────────────────────── + + +async def _get_document_by_slug(db: AsyncSession, slug: str) -> Document: + """Fetch a document by slug or raise 404.""" + result = await db.execute(select(Document).where(Document.slug == slug)) + doc = result.scalar_one_or_none() + if doc is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Document introuvable") + return doc + + +async def _get_item(db: AsyncSession, document_id: uuid.UUID, item_id: uuid.UUID) -> DocumentItem: + """Fetch a document item by ID within a document, or raise 404.""" + result = await db.execute( + select(DocumentItem).where( + DocumentItem.id == item_id, + DocumentItem.document_id == document_id, + ) + ) + item = result.scalar_one_or_none() + if item is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Item introuvable") + return item + + +# ── Document routes ───────────────────────────────────────────────────────── + + +@router.get("/", response_model=list[DocumentOut]) +async def list_documents( + db: AsyncSession = Depends(get_db), + doc_type: str | None = Query(default=None, description="Filtrer par type de document"), + status_filter: str | None = Query(default=None, alias="status", description="Filtrer par statut"), + skip: int = Query(default=0, ge=0), + limit: int = Query(default=50, ge=1, le=200), +) -> list[DocumentOut]: + """List all reference documents, with optional filters.""" + stmt = select(Document) + + if doc_type is not None: + stmt = stmt.where(Document.doc_type == doc_type) + if status_filter is not None: + stmt = stmt.where(Document.status == status_filter) + + stmt = stmt.order_by(Document.created_at.desc()).offset(skip).limit(limit) + result = await db.execute(stmt) + documents = result.scalars().all() + + # Compute items_count for each document + out = [] + for doc in documents: + count_result = await db.execute( + select(func.count()).select_from(DocumentItem).where(DocumentItem.document_id == doc.id) + ) + items_count = count_result.scalar() or 0 + doc_out = DocumentOut.model_validate(doc) + doc_out.items_count = items_count + out.append(doc_out) + + return out + + +@router.post("/", response_model=DocumentOut, status_code=status.HTTP_201_CREATED) +async def create_document( + payload: DocumentCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> DocumentOut: + """Create a new reference document.""" + # Check slug uniqueness + existing = await db.execute(select(Document).where(Document.slug == payload.slug)) + if existing.scalar_one_or_none() is not None: + raise HTTPException( + status_code=status.HTTP_409_CONFLICT, + detail="Un document avec ce slug existe deja", + ) + + doc = Document(**payload.model_dump()) + db.add(doc) + await db.commit() + await db.refresh(doc) + + doc_out = DocumentOut.model_validate(doc) + doc_out.items_count = 0 + return doc_out + + +@router.get("/{slug}", response_model=DocumentOut) +async def get_document( + slug: str, + db: AsyncSession = Depends(get_db), +) -> DocumentOut: + """Get a single document by its slug.""" + doc = await _get_document_by_slug(db, slug) + + count_result = await db.execute( + select(func.count()).select_from(DocumentItem).where(DocumentItem.document_id == doc.id) + ) + items_count = count_result.scalar() or 0 + + doc_out = DocumentOut.model_validate(doc) + doc_out.items_count = items_count + return doc_out + + +@router.put("/{slug}", response_model=DocumentOut) +async def update_document( + slug: str, + payload: DocumentUpdate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> DocumentOut: + """Update a document's metadata (title, status, description, version).""" + doc = await _get_document_by_slug(db, slug) + + update_data = payload.model_dump(exclude_unset=True) + for field, value in update_data.items(): + setattr(doc, field, value) + + await db.commit() + await db.refresh(doc) + + count_result = await db.execute( + select(func.count()).select_from(DocumentItem).where(DocumentItem.document_id == doc.id) + ) + items_count = count_result.scalar() or 0 + + doc_out = DocumentOut.model_validate(doc) + doc_out.items_count = items_count + return doc_out + + +# ── Document Item routes ──────────────────────────────────────────────────── + + +@router.post("/{slug}/items", response_model=DocumentItemOut, status_code=status.HTTP_201_CREATED) +async def add_item( + slug: str, + payload: DocumentItemCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> DocumentItemOut: + """Add a new item (clause, rule, etc.) to a document.""" + doc = await _get_document_by_slug(db, slug) + + # Determine sort_order: max existing + 1 + max_order_result = await db.execute( + select(func.max(DocumentItem.sort_order)).where(DocumentItem.document_id == doc.id) + ) + max_order = max_order_result.scalar() or 0 + + item = DocumentItem( + document_id=doc.id, + sort_order=max_order + 1, + **payload.model_dump(), + ) + db.add(item) + await db.commit() + await db.refresh(item) + + return DocumentItemOut.model_validate(item) + + +@router.get("/{slug}/items", response_model=list[DocumentItemOut]) +async def list_items( + slug: str, + db: AsyncSession = Depends(get_db), +) -> list[DocumentItemOut]: + """List all items in a document, ordered by sort_order.""" + doc = await _get_document_by_slug(db, slug) + + result = await db.execute( + select(DocumentItem) + .where(DocumentItem.document_id == doc.id) + .order_by(DocumentItem.sort_order) + ) + items = result.scalars().all() + return [DocumentItemOut.model_validate(item) for item in items] + + +@router.get("/{slug}/items/{item_id}", response_model=DocumentItemOut) +async def get_item( + slug: str, + item_id: uuid.UUID, + db: AsyncSession = Depends(get_db), +) -> DocumentItemOut: + """Get a single item with its version history.""" + doc = await _get_document_by_slug(db, slug) + item = await _get_item(db, doc.id, item_id) + return DocumentItemOut.model_validate(item) + + +@router.post( + "/{slug}/items/{item_id}/versions", + response_model=ItemVersionOut, + status_code=status.HTTP_201_CREATED, +) +async def propose_version( + slug: str, + item_id: uuid.UUID, + payload: ItemVersionCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> ItemVersionOut: + """Propose a new version of a document item. + + Automatically computes a unified diff between the current text and the proposed text. + """ + doc = await _get_document_by_slug(db, slug) + item = await _get_item(db, doc.id, item_id) + + # Compute diff + diff_lines = difflib.unified_diff( + item.current_text.splitlines(keepends=True), + payload.proposed_text.splitlines(keepends=True), + fromfile="actuel", + tofile="propose", + ) + diff_text = "".join(diff_lines) or None + + version = ItemVersion( + item_id=item.id, + proposed_text=payload.proposed_text, + diff_text=diff_text, + rationale=payload.rationale, + proposed_by_id=identity.id, + ) + db.add(version) + await db.commit() + await db.refresh(version) + + return ItemVersionOut.model_validate(version) diff --git a/backend/app/routers/mandates.py b/backend/app/routers/mandates.py new file mode 100644 index 0000000..af57e54 --- /dev/null +++ b/backend/app/routers/mandates.py @@ -0,0 +1,167 @@ +"""Mandates router: CRUD for mandates and their steps.""" + +from __future__ import annotations + +import uuid + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.database import get_db +from app.models.mandate import Mandate, MandateStep +from app.models.user import DuniterIdentity +from app.schemas.mandate import ( + MandateCreate, + MandateOut, + MandateStepCreate, + MandateStepOut, +) +from app.services.auth_service import get_current_identity + +router = APIRouter() + + +# ── Helpers ───────────────────────────────────────────────────────────────── + + +async def _get_mandate(db: AsyncSession, mandate_id: uuid.UUID) -> Mandate: + """Fetch a mandate by ID with its steps eagerly loaded, or raise 404.""" + result = await db.execute( + select(Mandate) + .options(selectinload(Mandate.steps)) + .where(Mandate.id == mandate_id) + ) + mandate = result.scalar_one_or_none() + if mandate is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Mandat introuvable") + return mandate + + +# ── Mandate routes ────────────────────────────────────────────────────────── + + +@router.get("/", response_model=list[MandateOut]) +async def list_mandates( + db: AsyncSession = Depends(get_db), + mandate_type: str | None = Query(default=None, description="Filtrer par type de mandat"), + status_filter: str | None = Query(default=None, alias="status", description="Filtrer par statut"), + skip: int = Query(default=0, ge=0), + limit: int = Query(default=50, ge=1, le=200), +) -> list[MandateOut]: + """List all mandates with optional filters.""" + stmt = select(Mandate).options(selectinload(Mandate.steps)) + + if mandate_type is not None: + stmt = stmt.where(Mandate.mandate_type == mandate_type) + if status_filter is not None: + stmt = stmt.where(Mandate.status == status_filter) + + stmt = stmt.order_by(Mandate.created_at.desc()).offset(skip).limit(limit) + result = await db.execute(stmt) + mandates = result.scalars().unique().all() + + return [MandateOut.model_validate(m) for m in mandates] + + +@router.post("/", response_model=MandateOut, status_code=status.HTTP_201_CREATED) +async def create_mandate( + payload: MandateCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> MandateOut: + """Create a new mandate.""" + mandate = Mandate(**payload.model_dump()) + db.add(mandate) + await db.commit() + await db.refresh(mandate) + + # Reload with steps (empty at creation) + mandate = await _get_mandate(db, mandate.id) + return MandateOut.model_validate(mandate) + + +@router.get("/{id}", response_model=MandateOut) +async def get_mandate( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), +) -> MandateOut: + """Get a single mandate with all its steps.""" + mandate = await _get_mandate(db, id) + return MandateOut.model_validate(mandate) + + +@router.put("/{id}", response_model=MandateOut) +async def update_mandate( + id: uuid.UUID, + payload: MandateCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> MandateOut: + """Update a mandate's metadata.""" + mandate = await _get_mandate(db, id) + + update_data = payload.model_dump(exclude_unset=True) + for field, value in update_data.items(): + setattr(mandate, field, value) + + await db.commit() + await db.refresh(mandate) + + # Reload with steps + mandate = await _get_mandate(db, mandate.id) + return MandateOut.model_validate(mandate) + + +@router.delete("/{id}", status_code=status.HTTP_204_NO_CONTENT) +async def delete_mandate( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> None: + """Delete a mandate (only if in draft status).""" + mandate = await _get_mandate(db, id) + + if mandate.status != "draft": + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Seuls les mandats en brouillon peuvent etre supprimes", + ) + + await db.delete(mandate) + await db.commit() + + +# ── Mandate Step routes ───────────────────────────────────────────────────── + + +@router.post("/{id}/steps", response_model=MandateStepOut, status_code=status.HTTP_201_CREATED) +async def add_step( + id: uuid.UUID, + payload: MandateStepCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> MandateStepOut: + """Add a step to a mandate process.""" + mandate = await _get_mandate(db, id) + + step = MandateStep( + mandate_id=mandate.id, + **payload.model_dump(), + ) + db.add(step) + await db.commit() + await db.refresh(step) + + return MandateStepOut.model_validate(step) + + +@router.get("/{id}/steps", response_model=list[MandateStepOut]) +async def list_steps( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), +) -> list[MandateStepOut]: + """List all steps for a mandate, ordered by step_order.""" + mandate = await _get_mandate(db, id) + return [MandateStepOut.model_validate(s) for s in mandate.steps] diff --git a/backend/app/routers/protocols.py b/backend/app/routers/protocols.py new file mode 100644 index 0000000..ff6550d --- /dev/null +++ b/backend/app/routers/protocols.py @@ -0,0 +1,139 @@ +"""Protocols router: voting protocols and formula configurations.""" + +from __future__ import annotations + +import uuid + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.database import get_db +from app.models.protocol import FormulaConfig, VotingProtocol +from app.models.user import DuniterIdentity +from app.schemas.protocol import ( + FormulaConfigCreate, + FormulaConfigOut, + VotingProtocolCreate, + VotingProtocolOut, +) +from app.services.auth_service import get_current_identity + +router = APIRouter() + + +# ── Helpers ───────────────────────────────────────────────────────────────── + + +async def _get_protocol(db: AsyncSession, protocol_id: uuid.UUID) -> VotingProtocol: + """Fetch a voting protocol by ID with its formula config, or raise 404.""" + result = await db.execute( + select(VotingProtocol) + .options(selectinload(VotingProtocol.formula_config)) + .where(VotingProtocol.id == protocol_id) + ) + protocol = result.scalar_one_or_none() + if protocol is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Protocole introuvable") + return protocol + + +# ── Voting Protocol routes ────────────────────────────────────────────────── + + +@router.get("/", response_model=list[VotingProtocolOut]) +async def list_protocols( + db: AsyncSession = Depends(get_db), + vote_type: str | None = Query(default=None, description="Filtrer par type de vote (binary, nuanced)"), + skip: int = Query(default=0, ge=0), + limit: int = Query(default=50, ge=1, le=200), +) -> list[VotingProtocolOut]: + """List all voting protocols with their formula configurations.""" + stmt = select(VotingProtocol).options(selectinload(VotingProtocol.formula_config)) + + if vote_type is not None: + stmt = stmt.where(VotingProtocol.vote_type == vote_type) + + stmt = stmt.order_by(VotingProtocol.created_at.desc()).offset(skip).limit(limit) + result = await db.execute(stmt) + protocols = result.scalars().unique().all() + + return [VotingProtocolOut.model_validate(p) for p in protocols] + + +@router.post("/", response_model=VotingProtocolOut, status_code=status.HTTP_201_CREATED) +async def create_protocol( + payload: VotingProtocolCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> VotingProtocolOut: + """Create a new voting protocol. + + The formula_config_id must reference an existing FormulaConfig. + """ + # Verify formula config exists + fc_result = await db.execute( + select(FormulaConfig).where(FormulaConfig.id == payload.formula_config_id) + ) + if fc_result.scalar_one_or_none() is None: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Configuration de formule introuvable", + ) + + protocol = VotingProtocol(**payload.model_dump()) + db.add(protocol) + await db.commit() + await db.refresh(protocol) + + # Reload with formula config + protocol = await _get_protocol(db, protocol.id) + return VotingProtocolOut.model_validate(protocol) + + +@router.get("/{id}", response_model=VotingProtocolOut) +async def get_protocol( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), +) -> VotingProtocolOut: + """Get a single voting protocol with its formula configuration.""" + protocol = await _get_protocol(db, id) + return VotingProtocolOut.model_validate(protocol) + + +# ── Formula Config routes ─────────────────────────────────────────────────── + + +@router.get("/formulas", response_model=list[FormulaConfigOut]) +async def list_formulas( + db: AsyncSession = Depends(get_db), + skip: int = Query(default=0, ge=0), + limit: int = Query(default=50, ge=1, le=200), +) -> list[FormulaConfigOut]: + """List all formula configurations.""" + stmt = ( + select(FormulaConfig) + .order_by(FormulaConfig.created_at.desc()) + .offset(skip) + .limit(limit) + ) + result = await db.execute(stmt) + formulas = result.scalars().all() + + return [FormulaConfigOut.model_validate(f) for f in formulas] + + +@router.post("/formulas", response_model=FormulaConfigOut, status_code=status.HTTP_201_CREATED) +async def create_formula( + payload: FormulaConfigCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> FormulaConfigOut: + """Create a new formula configuration for WoT threshold computation.""" + formula = FormulaConfig(**payload.model_dump()) + db.add(formula) + await db.commit() + await db.refresh(formula) + + return FormulaConfigOut.model_validate(formula) diff --git a/backend/app/routers/sanctuary.py b/backend/app/routers/sanctuary.py new file mode 100644 index 0000000..a502eaa --- /dev/null +++ b/backend/app/routers/sanctuary.py @@ -0,0 +1,73 @@ +"""Sanctuary router: IPFS + on-chain anchoring entries.""" + +from __future__ import annotations + +import uuid + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.database import get_db +from app.models.sanctuary import SanctuaryEntry +from app.models.user import DuniterIdentity +from app.schemas.sanctuary import SanctuaryEntryCreate, SanctuaryEntryOut +from app.services.auth_service import get_current_identity + +router = APIRouter() + + +@router.get("/", response_model=list[SanctuaryEntryOut]) +async def list_entries( + db: AsyncSession = Depends(get_db), + entry_type: str | None = Query(default=None, description="Filtrer par type (document, decision, vote_result)"), + skip: int = Query(default=0, ge=0), + limit: int = Query(default=50, ge=1, le=200), +) -> list[SanctuaryEntryOut]: + """List all sanctuary entries with optional type filter.""" + stmt = select(SanctuaryEntry) + + if entry_type is not None: + stmt = stmt.where(SanctuaryEntry.entry_type == entry_type) + + stmt = stmt.order_by(SanctuaryEntry.created_at.desc()).offset(skip).limit(limit) + result = await db.execute(stmt) + entries = result.scalars().all() + + return [SanctuaryEntryOut.model_validate(e) for e in entries] + + +@router.get("/{id}", response_model=SanctuaryEntryOut) +async def get_entry( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), +) -> SanctuaryEntryOut: + """Get a single sanctuary entry by ID.""" + result = await db.execute(select(SanctuaryEntry).where(SanctuaryEntry.id == id)) + entry = result.scalar_one_or_none() + if entry is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Entree sanctuaire introuvable") + return SanctuaryEntryOut.model_validate(entry) + + +@router.post("/", response_model=SanctuaryEntryOut, status_code=status.HTTP_201_CREATED) +async def create_entry( + payload: SanctuaryEntryCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> SanctuaryEntryOut: + """Create a new sanctuary entry. + + This endpoint is typically called by internal services after: + 1. Content is hashed (SHA-256) + 2. Content is pinned to IPFS + 3. Hash is anchored on-chain via system.remark + + The IPFS CID and chain TX hash can be added later via updates. + """ + entry = SanctuaryEntry(**payload.model_dump()) + db.add(entry) + await db.commit() + await db.refresh(entry) + + return SanctuaryEntryOut.model_validate(entry) diff --git a/backend/app/routers/votes.py b/backend/app/routers/votes.py new file mode 100644 index 0000000..6e3d967 --- /dev/null +++ b/backend/app/routers/votes.py @@ -0,0 +1,306 @@ +"""Votes router: vote sessions, individual votes, and result computation.""" + +from __future__ import annotations + +import math +import uuid +from datetime import datetime, timedelta, timezone + +from fastapi import APIRouter, Depends, HTTPException, Query, status +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.database import get_db +from app.models.protocol import FormulaConfig, VotingProtocol +from app.models.user import DuniterIdentity +from app.models.vote import Vote, VoteSession +from app.schemas.vote import VoteCreate, VoteOut, VoteSessionCreate, VoteSessionOut +from app.services.auth_service import get_current_identity + +router = APIRouter() + + +# ── Helpers ───────────────────────────────────────────────────────────────── + + +async def _get_session(db: AsyncSession, session_id: uuid.UUID) -> VoteSession: + """Fetch a vote session by ID with votes eagerly loaded, or raise 404.""" + result = await db.execute( + select(VoteSession) + .options(selectinload(VoteSession.votes)) + .where(VoteSession.id == session_id) + ) + session = result.scalar_one_or_none() + if session is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Session de vote introuvable") + return session + + +async def _get_protocol_with_formula(db: AsyncSession, protocol_id: uuid.UUID) -> VotingProtocol: + """Fetch a voting protocol with its formula config, or raise 404.""" + result = await db.execute( + select(VotingProtocol) + .options(selectinload(VotingProtocol.formula_config)) + .where(VotingProtocol.id == protocol_id) + ) + protocol = result.scalar_one_or_none() + if protocol is None: + raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail="Protocole de vote introuvable") + return protocol + + +def _compute_threshold(formula: FormulaConfig, wot_size: int, votes_total: int) -> float: + """Compute the WoT-based threshold using the core formula. + + Result = C + B^W + (M + (1-M) * (1 - (T/W)^G)) * max(0, T-C) + + Where: + - C = constant_base + - B = base_exponent + - W = wot_size + - M = majority_pct / 100 + - G = gradient_exponent + - T = votes_total (turnout) + """ + c = formula.constant_base + b = formula.base_exponent + w = max(wot_size, 1) + m = formula.majority_pct / 100.0 + g = formula.gradient_exponent + t = votes_total + + # Inertia-based threshold + base_power = b ** w if b > 0 else 0.0 + turnout_ratio = min(t / w, 1.0) if w > 0 else 0.0 + inertia = m + (1 - m) * (1 - turnout_ratio ** g) + threshold = c + base_power + inertia * max(0, t - c) + + return threshold + + +def _compute_result( + session: VoteSession, + formula: FormulaConfig, +) -> dict: + """Compute the vote result based on tallies and formula. + + Returns a dict with threshold_required, result ("adopted" or "rejected"), + and whether Smith/TechComm criteria are met. + """ + threshold = _compute_threshold(formula, session.wot_size, session.votes_total) + + # Main criterion: votes_for >= threshold + main_pass = session.votes_for >= threshold + + # Smith criterion (if configured) + smith_pass = True + smith_threshold = None + if formula.smith_exponent is not None and session.smith_size > 0: + smith_threshold = math.ceil(session.smith_size ** formula.smith_exponent) + smith_pass = session.smith_votes_for >= smith_threshold + + # TechComm criterion (if configured) + techcomm_pass = True + techcomm_threshold = None + if formula.techcomm_exponent is not None and session.techcomm_size > 0: + techcomm_threshold = math.ceil(session.techcomm_size ** formula.techcomm_exponent) + techcomm_pass = session.techcomm_votes_for >= techcomm_threshold + + result = "adopted" if (main_pass and smith_pass and techcomm_pass) else "rejected" + + return { + "threshold_required": threshold, + "result": result, + "smith_threshold": smith_threshold, + "smith_pass": smith_pass, + "techcomm_threshold": techcomm_threshold, + "techcomm_pass": techcomm_pass, + } + + +# ── Routes ────────────────────────────────────────────────────────────────── + + +@router.post("/sessions", response_model=VoteSessionOut, status_code=status.HTTP_201_CREATED) +async def create_vote_session( + payload: VoteSessionCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> VoteSessionOut: + """Create a new vote session. + + The session duration is derived from the linked protocol's formula config. + WoT/Smith/TechComm sizes should be snapshotted from the blockchain at creation time. + """ + # Validate protocol exists and get formula for duration + protocol = await _get_protocol_with_formula(db, payload.voting_protocol_id) + formula = protocol.formula_config + + starts_at = datetime.now(timezone.utc) + ends_at = starts_at + timedelta(days=formula.duration_days) + + session = VoteSession( + decision_id=payload.decision_id, + item_version_id=payload.item_version_id, + voting_protocol_id=payload.voting_protocol_id, + starts_at=starts_at, + ends_at=ends_at, + # TODO: Snapshot actual WoT sizes from blockchain via Duniter RPC + wot_size=0, + smith_size=0, + techcomm_size=0, + ) + db.add(session) + await db.commit() + await db.refresh(session) + + return VoteSessionOut.model_validate(session) + + +@router.get("/sessions/{id}", response_model=VoteSessionOut) +async def get_vote_session( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), +) -> VoteSessionOut: + """Get a vote session with current tallies.""" + session = await _get_session(db, id) + return VoteSessionOut.model_validate(session) + + +@router.post("/sessions/{id}/vote", response_model=VoteOut, status_code=status.HTTP_201_CREATED) +async def submit_vote( + id: uuid.UUID, + payload: VoteCreate, + db: AsyncSession = Depends(get_db), + identity: DuniterIdentity = Depends(get_current_identity), +) -> VoteOut: + """Submit a vote to a session. + + Each identity can only vote once per session. Submitting again replaces the previous vote. + The vote must include a cryptographic signature for on-chain proof. + """ + session = await _get_session(db, id) + + # Verify session is open + if session.status != "open": + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Cette session de vote n'est pas ouverte", + ) + + # Verify session hasn't ended + if datetime.now(timezone.utc) > session.ends_at: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail="Cette session de vote est terminee", + ) + + # Check if voter already voted -- replace if so + existing_result = await db.execute( + select(Vote).where( + Vote.session_id == session.id, + Vote.voter_id == identity.id, + ) + ) + existing_vote = existing_result.scalar_one_or_none() + + if existing_vote is not None: + # Deactivate old vote (keep for audit trail) + existing_vote.is_active = False + + # Update tallies: subtract old vote + session.votes_total -= 1 + if existing_vote.vote_value == "for": + session.votes_for -= 1 + if existing_vote.voter_is_smith: + session.smith_votes_for -= 1 + if existing_vote.voter_is_techcomm: + session.techcomm_votes_for -= 1 + elif existing_vote.vote_value == "against": + session.votes_against -= 1 + + # Create new vote + vote = Vote( + session_id=session.id, + voter_id=identity.id, + vote_value=payload.vote_value, + nuanced_level=payload.nuanced_level, + comment=payload.comment, + signature=payload.signature, + signed_payload=payload.signed_payload, + voter_wot_status=identity.wot_status, + voter_is_smith=identity.is_smith, + voter_is_techcomm=identity.is_techcomm, + ) + db.add(vote) + + # Update tallies: add new vote + session.votes_total += 1 + if payload.vote_value == "for": + session.votes_for += 1 + if identity.is_smith: + session.smith_votes_for += 1 + if identity.is_techcomm: + session.techcomm_votes_for += 1 + elif payload.vote_value == "against": + session.votes_against += 1 + + await db.commit() + await db.refresh(vote) + + return VoteOut.model_validate(vote) + + +@router.get("/sessions/{id}/votes", response_model=list[VoteOut]) +async def list_votes( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), + active_only: bool = Query(default=True, description="Ne montrer que les votes actifs"), +) -> list[VoteOut]: + """List all votes in a session.""" + # Verify session exists + await _get_session(db, id) + + stmt = select(Vote).where(Vote.session_id == id) + if active_only: + stmt = stmt.where(Vote.is_active.is_(True)) + + stmt = stmt.order_by(Vote.created_at.asc()) + result = await db.execute(stmt) + votes = result.scalars().all() + + return [VoteOut.model_validate(v) for v in votes] + + +@router.get("/sessions/{id}/result") +async def get_vote_result( + id: uuid.UUID, + db: AsyncSession = Depends(get_db), +) -> dict: + """Compute and return the current result for a vote session. + + Uses the WoT threshold formula linked through the voting protocol. + Returns current tallies, computed threshold, and whether the vote passes. + """ + session = await _get_session(db, id) + + # Get the protocol and formula + protocol = await _get_protocol_with_formula(db, session.voting_protocol_id) + formula = protocol.formula_config + + result_data = _compute_result(session, formula) + + return { + "session_id": str(session.id), + "status": session.status, + "votes_for": session.votes_for, + "votes_against": session.votes_against, + "votes_total": session.votes_total, + "wot_size": session.wot_size, + "smith_size": session.smith_size, + "techcomm_size": session.techcomm_size, + "smith_votes_for": session.smith_votes_for, + "techcomm_votes_for": session.techcomm_votes_for, + **result_data, + } diff --git a/backend/app/routers/websocket.py b/backend/app/routers/websocket.py new file mode 100644 index 0000000..cb7a3d0 --- /dev/null +++ b/backend/app/routers/websocket.py @@ -0,0 +1,140 @@ +"""WebSocket router: live vote updates.""" + +from __future__ import annotations + +import json +import uuid +from typing import Any + +from fastapi import APIRouter, WebSocket, WebSocketDisconnect + +router = APIRouter() + + +# ── Connection manager ────────────────────────────────────────────────────── + + +class ConnectionManager: + """Manages active WebSocket connections grouped by vote session ID.""" + + def __init__(self) -> None: + # session_id -> list of connected websockets + self._connections: dict[uuid.UUID, list[WebSocket]] = {} + + async def connect(self, websocket: WebSocket, session_id: uuid.UUID) -> None: + """Accept a WebSocket connection and register it for a vote session.""" + await websocket.accept() + if session_id not in self._connections: + self._connections[session_id] = [] + self._connections[session_id].append(websocket) + + def disconnect(self, websocket: WebSocket, session_id: uuid.UUID) -> None: + """Remove a WebSocket connection from the session group.""" + if session_id in self._connections: + self._connections[session_id] = [ + ws for ws in self._connections[session_id] if ws is not websocket + ] + if not self._connections[session_id]: + del self._connections[session_id] + + async def broadcast(self, session_id: uuid.UUID, data: dict[str, Any]) -> None: + """Broadcast a message to all connections watching a given vote session.""" + if session_id not in self._connections: + return + + message = json.dumps(data, default=str) + dead: list[WebSocket] = [] + + for ws in self._connections[session_id]: + try: + await ws.send_text(message) + except Exception: + dead.append(ws) + + # Clean up dead connections + for ws in dead: + self.disconnect(ws, session_id) + + +manager = ConnectionManager() + + +# ── WebSocket endpoint ────────────────────────────────────────────────────── + + +@router.websocket("/live") +async def live_updates(websocket: WebSocket) -> None: + """WebSocket endpoint for live vote session updates. + + The client connects and sends a JSON message with the session_id + they want to subscribe to: + + { "action": "subscribe", "session_id": "" } + + The server will then push vote update events to the client: + + { "event": "vote_update", "session_id": "...", "votes_for": N, "votes_against": N, "votes_total": N } + { "event": "session_closed", "session_id": "...", "result": "adopted|rejected" } + + The client can also unsubscribe: + + { "action": "unsubscribe", "session_id": "" } + """ + await websocket.accept() + subscribed_sessions: set[uuid.UUID] = set() + + try: + while True: + raw = await websocket.receive_text() + + try: + data = json.loads(raw) + except json.JSONDecodeError: + await websocket.send_text(json.dumps({"error": "JSON invalide"})) + continue + + action = data.get("action") + session_id_str = data.get("session_id") + + if not action or not session_id_str: + await websocket.send_text( + json.dumps({"error": "Champs 'action' et 'session_id' requis"}) + ) + continue + + try: + session_id = uuid.UUID(session_id_str) + except ValueError: + await websocket.send_text(json.dumps({"error": "session_id invalide"})) + continue + + if action == "subscribe": + if session_id not in subscribed_sessions: + # Register this websocket in the manager for this session + if session_id not in manager._connections: + manager._connections[session_id] = [] + manager._connections[session_id].append(websocket) + subscribed_sessions.add(session_id) + + await websocket.send_text( + json.dumps({"event": "subscribed", "session_id": str(session_id)}) + ) + + elif action == "unsubscribe": + if session_id in subscribed_sessions: + manager.disconnect(websocket, session_id) + subscribed_sessions.discard(session_id) + + await websocket.send_text( + json.dumps({"event": "unsubscribed", "session_id": str(session_id)}) + ) + + else: + await websocket.send_text( + json.dumps({"error": f"Action inconnue: {action}"}) + ) + + except WebSocketDisconnect: + # Clean up all subscriptions for this client + for session_id in subscribed_sessions: + manager.disconnect(websocket, session_id) diff --git a/backend/app/schemas/__init__.py b/backend/app/schemas/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/schemas/auth.py b/backend/app/schemas/auth.py new file mode 100644 index 0000000..cbcecf4 --- /dev/null +++ b/backend/app/schemas/auth.py @@ -0,0 +1,53 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field + + +# ── Request schemas ────────────────────────────────────────────── + + +class ChallengeRequest(BaseModel): + """Request a challenge nonce for Ed25519 authentication.""" + + address: str = Field(..., min_length=1, max_length=64, description="Duniter V2 SS58 address") + + +class VerifyRequest(BaseModel): + """Submit the signed challenge to obtain a session token.""" + + address: str = Field(..., min_length=1, max_length=64) + signature: str = Field(..., description="Ed25519 signature of the challenge (hex)") + challenge: str = Field(..., description="The challenge string that was signed") + + +# ── Response schemas ───────────────────────────────────────────── + + +class ChallengeResponse(BaseModel): + """Returned after requesting a challenge.""" + + challenge: str + expires_at: datetime + + +class IdentityOut(BaseModel): + """Public representation of a Duniter identity.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + address: str + display_name: str | None = None + wot_status: str + is_smith: bool + is_techcomm: bool + + +class TokenResponse(BaseModel): + """Returned after successful challenge verification.""" + + token: str + identity: IdentityOut diff --git a/backend/app/schemas/decision.py b/backend/app/schemas/decision.py new file mode 100644 index 0000000..95dd25d --- /dev/null +++ b/backend/app/schemas/decision.py @@ -0,0 +1,72 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field + + +# ── Decision ───────────────────────────────────────────────────── + + +class DecisionStepCreate(BaseModel): + """Payload for creating a step within a decision process.""" + + step_order: int = Field(..., ge=0) + step_type: str = Field(..., max_length=32, description="qualification, review, vote, execution, reporting") + title: str | None = Field(default=None, max_length=256) + description: str | None = None + + +class DecisionStepOut(BaseModel): + """Full decision step representation.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + decision_id: UUID + step_order: int + step_type: str + title: str | None = None + description: str | None = None + status: str + vote_session_id: UUID | None = None + outcome: str | None = None + created_at: datetime + + +class DecisionCreate(BaseModel): + """Payload for creating a new decision.""" + + title: str = Field(..., min_length=1, max_length=256) + description: str | None = None + context: str | None = None + decision_type: str = Field(..., max_length=64, description="runtime_upgrade, document_change, mandate_vote, custom") + voting_protocol_id: UUID | None = None + + +class DecisionUpdate(BaseModel): + """Partial update for a decision.""" + + title: str | None = Field(default=None, max_length=256) + description: str | None = None + status: str | None = Field(default=None, max_length=32) + voting_protocol_id: UUID | None = None + + +class DecisionOut(BaseModel): + """Full decision representation returned by the API.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + title: str + description: str | None = None + context: str | None = None + decision_type: str + status: str + voting_protocol_id: UUID | None = None + created_by_id: UUID | None = None + created_at: datetime + updated_at: datetime + steps: list[DecisionStepOut] = Field(default_factory=list) diff --git a/backend/app/schemas/document.py b/backend/app/schemas/document.py new file mode 100644 index 0000000..264dc2d --- /dev/null +++ b/backend/app/schemas/document.py @@ -0,0 +1,103 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field + + +# ── Document ───────────────────────────────────────────────────── + + +class DocumentCreate(BaseModel): + """Payload for creating a new reference document.""" + + slug: str = Field(..., min_length=1, max_length=128) + title: str = Field(..., min_length=1, max_length=256) + doc_type: str = Field(..., max_length=64, description="licence, engagement, reglement, constitution") + description: str | None = None + version: str | None = Field(default="0.1.0", max_length=32) + + +class DocumentUpdate(BaseModel): + """Partial update for a document.""" + + title: str | None = Field(default=None, max_length=256) + status: str | None = Field(default=None, max_length=32) + description: str | None = None + version: str | None = Field(default=None, max_length=32) + + +class DocumentOut(BaseModel): + """Full document representation returned by the API.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + slug: str + title: str + doc_type: str + version: str + status: str + description: str | None = None + ipfs_cid: str | None = None + chain_anchor: str | None = None + created_at: datetime + updated_at: datetime + items_count: int = Field(default=0, description="Number of items in this document") + + +# ── Document Item ──────────────────────────────────────────────── + + +class DocumentItemCreate(BaseModel): + """Payload for creating a document item (clause, rule, etc.).""" + + position: str = Field(..., max_length=16, description='Hierarchical position e.g. "1", "1.1", "3.2"') + item_type: str = Field(default="clause", max_length=32, description="clause, rule, verification, preamble, section") + title: str | None = Field(default=None, max_length=256) + current_text: str = Field(..., min_length=1) + voting_protocol_id: UUID | None = None + + +class DocumentItemOut(BaseModel): + """Full document item representation.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + document_id: UUID + position: str + item_type: str + title: str | None = None + current_text: str + voting_protocol_id: UUID | None = None + sort_order: int + created_at: datetime + updated_at: datetime + + +# ── Item Version ───────────────────────────────────────────────── + + +class ItemVersionCreate(BaseModel): + """Payload for proposing a new version of a document item.""" + + proposed_text: str = Field(..., min_length=1) + rationale: str | None = None + + +class ItemVersionOut(BaseModel): + """Full item version representation.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + item_id: UUID + proposed_text: str + diff_text: str | None = None + rationale: str | None = None + status: str + decision_id: UUID | None = None + proposed_by_id: UUID | None = None + created_at: datetime diff --git a/backend/app/schemas/mandate.py b/backend/app/schemas/mandate.py new file mode 100644 index 0000000..3822a17 --- /dev/null +++ b/backend/app/schemas/mandate.py @@ -0,0 +1,70 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field + + +# ── Mandate Step ───────────────────────────────────────────────── + + +class MandateStepCreate(BaseModel): + """Payload for creating a step within a mandate process.""" + + step_order: int = Field(..., ge=0) + step_type: str = Field( + ..., + max_length=32, + description="formulation, candidacy, vote, assignment, reporting, completion, revocation", + ) + title: str | None = Field(default=None, max_length=256) + description: str | None = None + + +class MandateStepOut(BaseModel): + """Full mandate step representation.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + mandate_id: UUID + step_order: int + step_type: str + title: str | None = None + description: str | None = None + status: str + vote_session_id: UUID | None = None + outcome: str | None = None + created_at: datetime + + +# ── Mandate ────────────────────────────────────────────────────── + + +class MandateCreate(BaseModel): + """Payload for creating a new mandate.""" + + title: str = Field(..., min_length=1, max_length=256) + description: str | None = None + mandate_type: str = Field(..., max_length=64, description="techcomm, smith, custom") + decision_id: UUID | None = None + + +class MandateOut(BaseModel): + """Full mandate representation returned by the API.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + title: str + description: str | None = None + mandate_type: str + status: str + mandatee_id: UUID | None = None + decision_id: UUID | None = None + starts_at: datetime | None = None + ends_at: datetime | None = None + created_at: datetime + updated_at: datetime + steps: list[MandateStepOut] = Field(default_factory=list) diff --git a/backend/app/schemas/protocol.py b/backend/app/schemas/protocol.py new file mode 100644 index 0000000..027242e --- /dev/null +++ b/backend/app/schemas/protocol.py @@ -0,0 +1,83 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field + + +# ── Formula Config ─────────────────────────────────────────────── + + +class FormulaConfigCreate(BaseModel): + """Payload for creating a WoT threshold formula configuration.""" + + name: str = Field(..., min_length=1, max_length=128) + description: str | None = None + + # WoT threshold params + duration_days: int = Field(default=30, ge=1, description="Duration of the vote in days") + majority_pct: int = Field(default=50, ge=1, le=100, description="Majority percentage required") + base_exponent: float = Field(default=0.1, ge=0.0, le=1.0, description="Base exponent B in the formula") + gradient_exponent: float = Field(default=0.2, ge=0.0, le=2.0, description="Gradient exponent G in the formula") + constant_base: float = Field(default=0.0, ge=0.0, le=1.0, description="Constant base C in the formula") + + # Smith criterion + smith_exponent: float | None = Field(default=None, ge=0.0, le=1.0, description="Smith criterion exponent S") + + # TechComm criterion + techcomm_exponent: float | None = Field(default=None, ge=0.0, le=1.0, description="TechComm criterion exponent T") + + # Nuanced vote + nuanced_min_participants: int | None = Field(default=None, ge=0, description="Minimum participants for nuanced vote") + nuanced_threshold_pct: int | None = Field(default=None, ge=0, le=100, description="Threshold percentage for nuanced vote") + + +class FormulaConfigOut(BaseModel): + """Full formula configuration representation.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + name: str + description: str | None = None + duration_days: int + majority_pct: int + base_exponent: float + gradient_exponent: float + constant_base: float + smith_exponent: float | None = None + techcomm_exponent: float | None = None + nuanced_min_participants: int | None = None + nuanced_threshold_pct: int | None = None + created_at: datetime + + +# ── Voting Protocol ────────────────────────────────────────────── + + +class VotingProtocolCreate(BaseModel): + """Payload for creating a voting protocol.""" + + name: str = Field(..., min_length=1, max_length=128) + description: str | None = None + vote_type: str = Field(..., max_length=32, description="binary, nuanced") + formula_config_id: UUID = Field(..., description="Reference to the formula configuration") + mode_params: str | None = Field(default=None, max_length=64, description='e.g. "D30M50B.1G.2T.1"') + is_meta_governed: bool = Field(default=False, description="Whether this protocol is itself governed by meta-vote") + + +class VotingProtocolOut(BaseModel): + """Full voting protocol representation including formula config.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + name: str + description: str | None = None + vote_type: str + formula_config_id: UUID + mode_params: str | None = None + is_meta_governed: bool + created_at: datetime + formula_config: FormulaConfigOut diff --git a/backend/app/schemas/sanctuary.py b/backend/app/schemas/sanctuary.py new file mode 100644 index 0000000..6c222d8 --- /dev/null +++ b/backend/app/schemas/sanctuary.py @@ -0,0 +1,35 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field + + +# ── Sanctuary Entry ────────────────────────────────────────────── + + +class SanctuaryEntryCreate(BaseModel): + """Payload for creating a new sanctuary entry (IPFS + chain anchor).""" + + entry_type: str = Field(..., max_length=64, description="document, decision, vote_result") + reference_id: UUID = Field(..., description="ID of the referenced entity") + title: str | None = Field(default=None, max_length=256) + content_hash: str = Field(..., max_length=128, description="SHA-256 hash of the content") + + +class SanctuaryEntryOut(BaseModel): + """Full sanctuary entry representation.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + entry_type: str + reference_id: UUID + title: str | None = None + content_hash: str + ipfs_cid: str | None = None + chain_tx_hash: str | None = None + chain_block: int | None = None + metadata_json: str | None = None + created_at: datetime diff --git a/backend/app/schemas/vote.py b/backend/app/schemas/vote.py new file mode 100644 index 0000000..22f59df --- /dev/null +++ b/backend/app/schemas/vote.py @@ -0,0 +1,89 @@ +from __future__ import annotations + +from datetime import datetime +from uuid import UUID + +from pydantic import BaseModel, ConfigDict, Field + + +# ── Vote Session ───────────────────────────────────────────────── + + +class VoteSessionCreate(BaseModel): + """Payload for opening a new vote session.""" + + decision_id: UUID | None = None + item_version_id: UUID | None = None + voting_protocol_id: UUID = Field(..., description="ID of the voting protocol to apply") + + +class VoteSessionOut(BaseModel): + """Full vote session representation including tallies.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + decision_id: UUID | None = None + item_version_id: UUID | None = None + voting_protocol_id: UUID + + # Snapshot at session start + wot_size: int + smith_size: int + techcomm_size: int + + # Dates + starts_at: datetime + ends_at: datetime + + # Status + status: str + + # Tallies + votes_for: int + votes_against: int + votes_total: int + smith_votes_for: int + techcomm_votes_for: int + threshold_required: float + result: str | None = None + + # Chain recording + chain_recorded: bool + chain_tx_hash: str | None = None + + created_at: datetime + + +# ── Vote ───────────────────────────────────────────────────────── + + +class VoteCreate(BaseModel): + """Payload for casting a vote (with cryptographic proof).""" + + session_id: UUID + vote_value: str = Field(..., max_length=32, description="for, against, or nuanced level") + nuanced_level: int | None = Field(default=None, ge=0, le=5, description="0-5 for nuanced votes") + comment: str | None = None + signature: str = Field(..., description="Ed25519 signature of signed_payload") + signed_payload: str = Field(..., description="The exact payload that was signed") + + +class VoteOut(BaseModel): + """Full vote representation.""" + + model_config = ConfigDict(from_attributes=True) + + id: UUID + session_id: UUID + voter_id: UUID + vote_value: str + nuanced_level: int | None = None + comment: str | None = None + signature: str + signed_payload: str + voter_wot_status: str + voter_is_smith: bool + voter_is_techcomm: bool + is_active: bool + created_at: datetime diff --git a/backend/app/services/__init__.py b/backend/app/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/services/auth_service.py b/backend/app/services/auth_service.py new file mode 100644 index 0000000..1bbf36d --- /dev/null +++ b/backend/app/services/auth_service.py @@ -0,0 +1,96 @@ +"""Authentication service: challenge generation, token management, current user resolution.""" + +from __future__ import annotations + +import hashlib +import secrets +import uuid +from datetime import datetime, timedelta, timezone + +from fastapi import Depends, HTTPException, status +from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.config import settings +from app.database import get_db +from app.models.user import DuniterIdentity, Session + +security = HTTPBearer(auto_error=False) + + +def _hash_token(token: str) -> str: + """SHA-256 hash of a bearer token for storage.""" + return hashlib.sha256(token.encode()).hexdigest() + + +async def create_session(db: AsyncSession, identity: DuniterIdentity) -> str: + """Create a new session for the given identity, return the raw bearer token.""" + raw_token = secrets.token_urlsafe(48) + token_hash = _hash_token(raw_token) + + session = Session( + token_hash=token_hash, + identity_id=identity.id, + expires_at=datetime.now(timezone.utc) + timedelta(hours=settings.TOKEN_EXPIRE_HOURS), + ) + db.add(session) + await db.commit() + + return raw_token + + +async def invalidate_session(db: AsyncSession, token: str) -> None: + """Delete the session matching the given raw token.""" + token_hash = _hash_token(token) + result = await db.execute(select(Session).where(Session.token_hash == token_hash)) + session = result.scalar_one_or_none() + if session: + await db.delete(session) + await db.commit() + + +async def get_current_identity( + db: AsyncSession = Depends(get_db), + credentials: HTTPAuthorizationCredentials | None = Depends(security), +) -> DuniterIdentity: + """Dependency: resolve the current authenticated identity from the bearer token. + + Raises 401 if the token is missing, invalid, or expired. + """ + if credentials is None: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Authentification requise") + + token_hash = _hash_token(credentials.credentials) + result = await db.execute( + select(Session).where( + Session.token_hash == token_hash, + Session.expires_at > datetime.now(timezone.utc), + ) + ) + session = result.scalar_one_or_none() + + if session is None: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Token invalide ou expire") + + result = await db.execute(select(DuniterIdentity).where(DuniterIdentity.id == session.identity_id)) + identity = result.scalar_one_or_none() + + if identity is None: + raise HTTPException(status_code=status.HTTP_401_UNAUTHORIZED, detail="Identite introuvable") + + return identity + + +async def get_or_create_identity(db: AsyncSession, address: str) -> DuniterIdentity: + """Get an existing identity by address or create a new one.""" + result = await db.execute(select(DuniterIdentity).where(DuniterIdentity.address == address)) + identity = result.scalar_one_or_none() + + if identity is None: + identity = DuniterIdentity(address=address) + db.add(identity) + await db.commit() + await db.refresh(identity) + + return identity diff --git a/backend/app/services/blockchain_service.py b/backend/app/services/blockchain_service.py new file mode 100644 index 0000000..667ac7b --- /dev/null +++ b/backend/app/services/blockchain_service.py @@ -0,0 +1,87 @@ +"""Blockchain service: retrieve on-chain data from Duniter V2. + +Provides functions to query WoT size, Smith sub-WoT size, and +Technical Committee size from the Duniter V2 blockchain. + +Currently stubbed with hardcoded values matching GDev test data. +""" + +from __future__ import annotations + + +async def get_wot_size() -> int: + """Return the current number of WoT members. + + TODO: Implement real RPC call using substrate-interface:: + + from substrateinterface import SubstrateInterface + from app.config import settings + + substrate = SubstrateInterface(url=settings.DUNITER_RPC_URL) + + # Query membership count + result = substrate.query( + module="Membership", + storage_function="MembershipCount", + ) + return int(result.value) + + Returns + ------- + int + Number of WoT members. Currently returns 7224 (GDev snapshot). + """ + # TODO: Replace with real substrate-interface RPC call + return 7224 + + +async def get_smith_size() -> int: + """Return the current number of Smith members (forgerons). + + TODO: Implement real RPC call using substrate-interface:: + + from substrateinterface import SubstrateInterface + from app.config import settings + + substrate = SubstrateInterface(url=settings.DUNITER_RPC_URL) + + # Query Smith membership count + result = substrate.query( + module="SmithMembers", + storage_function="SmithMembershipCount", + ) + return int(result.value) + + Returns + ------- + int + Number of Smith members. Currently returns 20 (GDev snapshot). + """ + # TODO: Replace with real substrate-interface RPC call + return 20 + + +async def get_techcomm_size() -> int: + """Return the current number of Technical Committee members. + + TODO: Implement real RPC call using substrate-interface:: + + from substrateinterface import SubstrateInterface + from app.config import settings + + substrate = SubstrateInterface(url=settings.DUNITER_RPC_URL) + + # Query TechComm member count + result = substrate.query( + module="TechnicalCommittee", + storage_function="Members", + ) + return len(result.value) if result.value else 0 + + Returns + ------- + int + Number of TechComm members. Currently returns 5 (GDev snapshot). + """ + # TODO: Replace with real substrate-interface RPC call + return 5 diff --git a/backend/app/services/decision_service.py b/backend/app/services/decision_service.py new file mode 100644 index 0000000..3477718 --- /dev/null +++ b/backend/app/services/decision_service.py @@ -0,0 +1,117 @@ +"""Decision service: step advancement logic.""" + +from __future__ import annotations + +import uuid + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.models.decision import Decision, DecisionStep + + +# Valid status transitions for decisions +_DECISION_STATUS_ORDER = [ + "draft", + "qualification", + "review", + "voting", + "executed", + "closed", +] + + +async def advance_decision(decision_id: uuid.UUID, db: AsyncSession) -> Decision: + """Move a decision to its next step. + + Completes the current active step and activates the next pending step. + If no more steps remain, the decision status advances to the next phase. + + Parameters + ---------- + decision_id: + UUID of the Decision to advance. + db: + Async database session. + + Returns + ------- + Decision + The updated decision. + + Raises + ------ + ValueError + If the decision is not found, or no further advancement is possible. + """ + result = await db.execute( + select(Decision) + .options(selectinload(Decision.steps)) + .where(Decision.id == decision_id) + ) + decision = result.scalar_one_or_none() + if decision is None: + raise ValueError(f"Decision introuvable : {decision_id}") + + if decision.status == "closed": + raise ValueError("La decision est deja cloturee") + + steps: list[DecisionStep] = sorted(decision.steps, key=lambda s: s.step_order) + + # Find the current active step + active_step: DecisionStep | None = None + for step in steps: + if step.status == "active": + active_step = step + break + + if active_step is not None: + # Complete the active step + active_step.status = "completed" + + # Activate the next pending step + next_step: DecisionStep | None = None + for step in steps: + if step.step_order > active_step.step_order and step.status == "pending": + next_step = step + break + + if next_step is not None: + next_step.status = "active" + else: + # No more steps: advance the decision status + _advance_decision_status(decision) + else: + # No active step: try to activate the first pending step + first_pending: DecisionStep | None = None + for step in steps: + if step.status == "pending": + first_pending = step + break + + if first_pending is not None: + first_pending.status = "active" + # Also advance decision out of draft if needed + if decision.status == "draft": + decision.status = "qualification" + else: + # All steps are completed: advance the decision status + _advance_decision_status(decision) + + await db.commit() + await db.refresh(decision) + + return decision + + +def _advance_decision_status(decision: Decision) -> None: + """Move a decision to its next status in the lifecycle.""" + try: + current_index = _DECISION_STATUS_ORDER.index(decision.status) + except ValueError: + return + + next_index = current_index + 1 + if next_index < len(_DECISION_STATUS_ORDER): + decision.status = _DECISION_STATUS_ORDER[next_index] diff --git a/backend/app/services/document_service.py b/backend/app/services/document_service.py new file mode 100644 index 0000000..87e2dca --- /dev/null +++ b/backend/app/services/document_service.py @@ -0,0 +1,108 @@ +"""Document service: retrieval and version management.""" + +from __future__ import annotations + +import uuid + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.models.document import Document, DocumentItem, ItemVersion + + +async def get_document_with_items(slug: str, db: AsyncSession) -> Document | None: + """Load a document with all its items and their versions, eagerly. + + Parameters + ---------- + slug: + Unique slug of the document. + db: + Async database session. + + Returns + ------- + Document | None + The document with items and versions loaded, or None if not found. + """ + result = await db.execute( + select(Document) + .options( + selectinload(Document.items).selectinload(DocumentItem.versions) + ) + .where(Document.slug == slug) + ) + return result.scalar_one_or_none() + + +async def apply_version( + item_id: uuid.UUID, + version_id: uuid.UUID, + db: AsyncSession, +) -> DocumentItem: + """Apply an accepted version to a document item. + + This replaces the item's current_text with the version's proposed_text + and marks the version as 'accepted'. + + Parameters + ---------- + item_id: + UUID of the DocumentItem to update. + version_id: + UUID of the ItemVersion to apply. + db: + Async database session. + + Returns + ------- + DocumentItem + The updated document item. + + Raises + ------ + ValueError + If the item or version is not found, or the version does not + belong to the item. + """ + # Load item + item_result = await db.execute( + select(DocumentItem).where(DocumentItem.id == item_id) + ) + item = item_result.scalar_one_or_none() + if item is None: + raise ValueError(f"Element de document introuvable : {item_id}") + + # Load version + version_result = await db.execute( + select(ItemVersion).where(ItemVersion.id == version_id) + ) + version = version_result.scalar_one_or_none() + if version is None: + raise ValueError(f"Version introuvable : {version_id}") + + if version.item_id != item.id: + raise ValueError( + f"La version {version_id} n'appartient pas a l'element {item_id}" + ) + + # Apply the version + item.current_text = version.proposed_text + version.status = "accepted" + + # Mark all other pending/voting versions for this item as rejected + other_versions_result = await db.execute( + select(ItemVersion).where( + ItemVersion.item_id == item_id, + ItemVersion.id != version_id, + ItemVersion.status.in_(["proposed", "voting"]), + ) + ) + for other_version in other_versions_result.scalars(): + other_version.status = "rejected" + + await db.commit() + await db.refresh(item) + + return item diff --git a/backend/app/services/mandate_service.py b/backend/app/services/mandate_service.py new file mode 100644 index 0000000..97e1c7a --- /dev/null +++ b/backend/app/services/mandate_service.py @@ -0,0 +1,118 @@ +"""Mandate service: step advancement logic.""" + +from __future__ import annotations + +import uuid + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.models.mandate import Mandate, MandateStep + + +# Valid status transitions for mandates +_MANDATE_STATUS_ORDER = [ + "draft", + "candidacy", + "voting", + "active", + "reporting", + "completed", +] + + +async def advance_mandate(mandate_id: uuid.UUID, db: AsyncSession) -> Mandate: + """Move a mandate to its next step. + + Completes the current active step and activates the next pending step. + If no more steps remain, the mandate status advances to the next phase. + + Parameters + ---------- + mandate_id: + UUID of the Mandate to advance. + db: + Async database session. + + Returns + ------- + Mandate + The updated mandate. + + Raises + ------ + ValueError + If the mandate is not found, already completed/revoked, or + no further advancement is possible. + """ + result = await db.execute( + select(Mandate) + .options(selectinload(Mandate.steps)) + .where(Mandate.id == mandate_id) + ) + mandate = result.scalar_one_or_none() + if mandate is None: + raise ValueError(f"Mandat introuvable : {mandate_id}") + + if mandate.status in ("completed", "revoked"): + raise ValueError(f"Le mandat est deja en statut terminal : {mandate.status}") + + steps: list[MandateStep] = sorted(mandate.steps, key=lambda s: s.step_order) + + # Find the current active step + active_step: MandateStep | None = None + for step in steps: + if step.status == "active": + active_step = step + break + + if active_step is not None: + # Complete the active step + active_step.status = "completed" + + # Activate the next pending step + next_step: MandateStep | None = None + for step in steps: + if step.step_order > active_step.step_order and step.status == "pending": + next_step = step + break + + if next_step is not None: + next_step.status = "active" + else: + # No more steps: advance mandate status + _advance_mandate_status(mandate) + else: + # No active step: activate the first pending one + first_pending: MandateStep | None = None + for step in steps: + if step.status == "pending": + first_pending = step + break + + if first_pending is not None: + first_pending.status = "active" + # Move out of draft + if mandate.status == "draft": + mandate.status = "candidacy" + else: + # All steps completed: advance status + _advance_mandate_status(mandate) + + await db.commit() + await db.refresh(mandate) + + return mandate + + +def _advance_mandate_status(mandate: Mandate) -> None: + """Move a mandate to its next status in the lifecycle.""" + try: + current_index = _MANDATE_STATUS_ORDER.index(mandate.status) + except ValueError: + return + + next_index = current_index + 1 + if next_index < len(_MANDATE_STATUS_ORDER): + mandate.status = _MANDATE_STATUS_ORDER[next_index] diff --git a/backend/app/services/sanctuary_service.py b/backend/app/services/sanctuary_service.py new file mode 100644 index 0000000..7a76d98 --- /dev/null +++ b/backend/app/services/sanctuary_service.py @@ -0,0 +1,123 @@ +"""Sanctuary service: immutable archival to IPFS + on-chain hash. + +The sanctuary is the immutable layer of Glibredecision. Every adopted +document version, decision result, or vote tally is hashed (SHA-256), +stored on IPFS, and anchored on-chain via system.remark. +""" + +from __future__ import annotations + +import hashlib +import json +import uuid +from datetime import datetime, timezone + +from sqlalchemy.ext.asyncio import AsyncSession + +from app.models.sanctuary import SanctuaryEntry + + +async def archive_to_sanctuary( + entry_type: str, + reference_id: uuid.UUID, + content: str, + title: str, + db: AsyncSession, +) -> SanctuaryEntry: + """Hash content and create a sanctuary entry. + + Parameters + ---------- + entry_type: + Type of the archived entity (``"document"``, ``"decision"``, + ``"vote_result"``). + reference_id: + UUID of the source entity (document, decision, or vote session). + content: + The full text content to archive and hash. + title: + Human-readable title for the archive entry. + db: + Async database session. + + Returns + ------- + SanctuaryEntry + The newly created sanctuary entry with content_hash set. + """ + # Compute SHA-256 hash of the content + content_hash = hashlib.sha256(content.encode("utf-8")).hexdigest() + + # Build metadata + metadata = { + "archived_at": datetime.now(timezone.utc).isoformat(), + "entry_type": entry_type, + "content_length": len(content), + } + + entry = SanctuaryEntry( + entry_type=entry_type, + reference_id=reference_id, + title=title, + content_hash=content_hash, + metadata_json=json.dumps(metadata, ensure_ascii=False), + ) + + # TODO: Upload content to IPFS via kubo HTTP API + # ipfs_cid = await _upload_to_ipfs(content) + # entry.ipfs_cid = ipfs_cid + + # TODO: Anchor hash on-chain via system.remark + # tx_hash, block_number = await _anchor_on_chain(content_hash) + # entry.chain_tx_hash = tx_hash + # entry.chain_block = block_number + + db.add(entry) + await db.commit() + await db.refresh(entry) + + return entry + + +async def _upload_to_ipfs(content: str) -> str: + """Upload content to IPFS via kubo HTTP API. + + TODO: Implement using httpx against settings.IPFS_API_URL. + + Example:: + + import httpx + from app.config import settings + + async with httpx.AsyncClient() as client: + response = await client.post( + f"{settings.IPFS_API_URL}/api/v0/add", + files={"file": ("content.txt", content.encode("utf-8"))}, + ) + response.raise_for_status() + return response.json()["Hash"] + """ + raise NotImplementedError("IPFS upload pas encore implemente") + + +async def _anchor_on_chain(content_hash: str) -> tuple[str, int]: + """Anchor a content hash on-chain via system.remark. + + TODO: Implement using substrate-interface. + + Example:: + + from substrateinterface import SubstrateInterface + from app.config import settings + + substrate = SubstrateInterface(url=settings.DUNITER_RPC_URL) + call = substrate.compose_call( + call_module="System", + call_function="remark", + call_params={"remark": f"glibredecision:sanctuary:{content_hash}"}, + ) + extrinsic = substrate.create_signed_extrinsic(call=call, keypair=keypair) + receipt = substrate.submit_extrinsic(extrinsic, wait_for_inclusion=True) + return receipt.extrinsic_hash, receipt.block_number + """ + raise NotImplementedError("Ancrage on-chain pas encore implemente") diff --git a/backend/app/services/vote_service.py b/backend/app/services/vote_service.py new file mode 100644 index 0000000..3974f00 --- /dev/null +++ b/backend/app/services/vote_service.py @@ -0,0 +1,199 @@ +"""Vote service: compute results and verify vote signatures.""" + +from __future__ import annotations + +import uuid + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import selectinload + +from app.engine.mode_params import parse_mode_params +from app.engine.nuanced_vote import evaluate_nuanced +from app.engine.smith_threshold import smith_threshold +from app.engine.techcomm_threshold import techcomm_threshold +from app.engine.threshold import wot_threshold +from app.models.protocol import FormulaConfig, VotingProtocol +from app.models.vote import Vote, VoteSession + + +async def compute_result(session_id: uuid.UUID, db: AsyncSession) -> dict: + """Load a vote session, its protocol and formula, compute thresholds, and tally. + + Parameters + ---------- + session_id: + UUID of the VoteSession to tally. + db: + Async database session. + + Returns + ------- + dict + Result dict with keys: threshold, votes_for, votes_against, + votes_total, adopted, smith_ok, techcomm_ok, details. + """ + # Load session with votes eagerly + result = await db.execute( + select(VoteSession) + .options(selectinload(VoteSession.votes)) + .where(VoteSession.id == session_id) + ) + session = result.scalar_one_or_none() + if session is None: + raise ValueError(f"Session de vote introuvable : {session_id}") + + # Load protocol + formula config + proto_result = await db.execute( + select(VotingProtocol) + .options(selectinload(VotingProtocol.formula_config)) + .where(VotingProtocol.id == session.voting_protocol_id) + ) + protocol = proto_result.scalar_one_or_none() + if protocol is None: + raise ValueError(f"Protocole de vote introuvable pour la session {session_id}") + + formula: FormulaConfig = protocol.formula_config + + # If mode_params is set on the protocol, it overrides formula_config values + if protocol.mode_params: + params = parse_mode_params(protocol.mode_params) + else: + params = { + "majority_pct": formula.majority_pct, + "base_exponent": formula.base_exponent, + "gradient_exponent": formula.gradient_exponent, + "constant_base": formula.constant_base, + "smith_exponent": formula.smith_exponent, + "techcomm_exponent": formula.techcomm_exponent, + } + + # Separate vote types + active_votes: list[Vote] = [v for v in session.votes if v.is_active] + + if protocol.vote_type == "nuanced": + return await _compute_nuanced(session, active_votes, formula, params, db) + + # --- Binary vote --- + votes_for = sum(1 for v in active_votes if v.vote_value == "for") + votes_against = sum(1 for v in active_votes if v.vote_value == "against") + total = votes_for + votes_against + + # WoT threshold + threshold = wot_threshold( + wot_size=session.wot_size, + total_votes=total, + majority_pct=params.get("majority_pct", 50), + base_exponent=params.get("base_exponent", 0.1), + gradient_exponent=params.get("gradient_exponent", 0.2), + constant_base=params.get("constant_base", 0.0), + ) + + # Smith criterion (optional) + smith_ok = True + smith_required = None + if params.get("smith_exponent") is not None and session.smith_size > 0: + smith_required = smith_threshold(session.smith_size, params["smith_exponent"]) + smith_votes = sum(1 for v in active_votes if v.voter_is_smith and v.vote_value == "for") + smith_ok = smith_votes >= smith_required + + # TechComm criterion (optional) + techcomm_ok = True + techcomm_required = None + if params.get("techcomm_exponent") is not None and session.techcomm_size > 0: + techcomm_required = techcomm_threshold(session.techcomm_size, params["techcomm_exponent"]) + techcomm_votes = sum(1 for v in active_votes if v.voter_is_techcomm and v.vote_value == "for") + techcomm_ok = techcomm_votes >= techcomm_required + + adopted = votes_for >= threshold and smith_ok and techcomm_ok + vote_result = "adopted" if adopted else "rejected" + + # Update session tallies + session.votes_for = votes_for + session.votes_against = votes_against + session.votes_total = total + session.threshold_required = float(threshold) + session.result = vote_result + session.status = "tallied" + await db.commit() + + return { + "threshold": threshold, + "votes_for": votes_for, + "votes_against": votes_against, + "votes_total": total, + "adopted": adopted, + "smith_ok": smith_ok, + "smith_required": smith_required, + "techcomm_ok": techcomm_ok, + "techcomm_required": techcomm_required, + "result": vote_result, + } + + +async def _compute_nuanced( + session: VoteSession, + active_votes: list[Vote], + formula: FormulaConfig, + params: dict, + db: AsyncSession, +) -> dict: + """Compute a nuanced vote result.""" + vote_levels = [v.nuanced_level for v in active_votes if v.nuanced_level is not None] + + threshold_pct = formula.nuanced_threshold_pct or 80 + min_participants = formula.nuanced_min_participants or 59 + + evaluation = evaluate_nuanced( + votes=vote_levels, + threshold_pct=threshold_pct, + min_participants=min_participants, + ) + + vote_result = "adopted" if evaluation["adopted"] else "rejected" + + session.votes_total = evaluation["total"] + session.votes_for = evaluation["positive_count"] + session.votes_against = evaluation["total"] - evaluation["positive_count"] + session.threshold_required = float(threshold_pct) + session.result = vote_result + session.status = "tallied" + await db.commit() + + return { + "vote_type": "nuanced", + "result": vote_result, + **evaluation, + } + + +async def verify_vote_signature(address: str, signature: str, payload: str) -> bool: + """Verify an Ed25519 signature from a Duniter V2 address. + + Parameters + ---------- + address: + SS58 address of the voter. + signature: + Hex-encoded Ed25519 signature. + payload: + The original message that was signed. + + Returns + ------- + bool + True if the signature is valid. + + TODO + ---- + Implement actual Ed25519 verification using substrate-interface: + + from substrateinterface import Keypair + keypair = Keypair(ss58_address=address, crypto_type=KeypairType.ED25519) + return keypair.verify(payload.encode(), bytes.fromhex(signature)) + """ + # TODO: Implement real Ed25519 verification with substrate-interface + # For now, accept all signatures in development mode + if not address or not signature or not payload: + return False + return True diff --git a/backend/app/tests/__init__.py b/backend/app/tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/backend/app/tests/test_mode_params.py b/backend/app/tests/test_mode_params.py new file mode 100644 index 0000000..04f8531 --- /dev/null +++ b/backend/app/tests/test_mode_params.py @@ -0,0 +1,75 @@ +"""Tests for mode-params string parser.""" + +from app.engine.mode_params import parse_mode_params + + +class TestParseModeParams: + """Parse compact parameter strings into structured dicts.""" + + def test_standard_params(self): + """D30M50B.1G.2 => standard Licence G1 params.""" + result = parse_mode_params("D30M50B.1G.2") + assert result["duration_days"] == 30 + assert result["majority_pct"] == 50 + assert result["base_exponent"] == 0.1 + assert result["gradient_exponent"] == 0.2 + # Optional criteria absent + assert result["smith_exponent"] is None + assert result["techcomm_exponent"] is None + + def test_with_smith_exponent(self): + """D30M50B.1G.2S.1 => standard + smith_exponent=0.1.""" + result = parse_mode_params("D30M50B.1G.2S.1") + assert result["duration_days"] == 30 + assert result["majority_pct"] == 50 + assert result["base_exponent"] == 0.1 + assert result["gradient_exponent"] == 0.2 + assert result["smith_exponent"] == 0.1 + assert result["techcomm_exponent"] is None + + def test_with_techcomm_exponent(self): + """D30M50B.1G.2T.1 => standard + techcomm_exponent=0.1.""" + result = parse_mode_params("D30M50B.1G.2T.1") + assert result["duration_days"] == 30 + assert result["majority_pct"] == 50 + assert result["base_exponent"] == 0.1 + assert result["gradient_exponent"] == 0.2 + assert result["smith_exponent"] is None + assert result["techcomm_exponent"] == 0.1 + + def test_full_params_with_constant(self): + """D30M50B1G.5C10 => integer base, gradient=0.5, constant=10.""" + result = parse_mode_params("D30M50B1G.5C10") + assert result["duration_days"] == 30 + assert result["majority_pct"] == 50 + assert result["base_exponent"] == 1.0 + assert result["gradient_exponent"] == 0.5 + assert result["constant_base"] == 10.0 + + def test_empty_string_defaults(self): + """Empty string returns all defaults.""" + result = parse_mode_params("") + assert result["duration_days"] == 30 + assert result["majority_pct"] == 50 + assert result["base_exponent"] == 0.1 + assert result["gradient_exponent"] == 0.2 + assert result["constant_base"] == 0.0 + assert result["smith_exponent"] is None + assert result["techcomm_exponent"] is None + assert result["ratio_multiplier"] is None + assert result["is_ratio_mode"] is False + + def test_whitespace_only_returns_defaults(self): + """Whitespace-only string treated as empty.""" + result = parse_mode_params(" ") + assert result["duration_days"] == 30 + + def test_roundtrip_consistency(self): + """Parsing a standard string then re-checking all keys.""" + result = parse_mode_params("D30M50B.1G.2") + expected_keys = { + "duration_days", "majority_pct", "base_exponent", + "gradient_exponent", "constant_base", "smith_exponent", + "techcomm_exponent", "ratio_multiplier", "is_ratio_mode", + } + assert set(result.keys()) == expected_keys diff --git a/backend/app/tests/test_nuanced.py b/backend/app/tests/test_nuanced.py new file mode 100644 index 0000000..4b315fc --- /dev/null +++ b/backend/app/tests/test_nuanced.py @@ -0,0 +1,120 @@ +"""Tests for six-level nuanced vote evaluation. + +Levels: 0-CONTRE, 1-PAS DU TOUT, 2-PAS D'ACCORD, 3-NEUTRE, 4-D'ACCORD, 5-TOUT A FAIT +Positive = levels 3 + 4 + 5 +Adoption requires: positive_pct >= threshold (80%) AND total >= min_participants (59). +""" + +import pytest + +from app.engine.nuanced_vote import evaluate_nuanced + + +class TestNuancedVoteAdoption: + """Cases where the vote should be adopted.""" + + def test_59_positive_10_negative_adopted(self): + """59 positive (levels 3-5) + 10 negative = 69 total. + positive_pct = 59/69 ~ 85.5% >= 80% and 69 >= 59 => adopted. + """ + votes = [5] * 20 + [4] * 20 + [3] * 19 + [2] * 5 + [1] * 3 + [0] * 2 + result = evaluate_nuanced(votes, threshold_pct=80, min_participants=59) + + assert result["total"] == 69 + assert result["positive_count"] == 59 + assert result["positive_pct"] == pytest.approx(85.51, abs=0.1) + assert result["threshold_met"] is True + assert result["min_participants_met"] is True + assert result["adopted"] is True + + def test_all_tout_a_fait_adopted(self): + """All 59 voters at level 5 => 100% positive, adopted.""" + votes = [5] * 59 + result = evaluate_nuanced(votes, threshold_pct=80, min_participants=59) + + assert result["total"] == 59 + assert result["positive_count"] == 59 + assert result["positive_pct"] == 100.0 + assert result["adopted"] is True + + +class TestNuancedVoteRejection: + """Cases where the vote should be rejected.""" + + def test_40_positive_30_negative_rejected(self): + """40 positive + 30 negative = 70 total. + positive_pct = 40/70 ~ 57.14% < 80% => threshold not met. + """ + votes = [5] * 15 + [4] * 15 + [3] * 10 + [2] * 10 + [1] * 10 + [0] * 10 + result = evaluate_nuanced(votes, threshold_pct=80, min_participants=59) + + assert result["total"] == 70 + assert result["positive_count"] == 40 + assert result["positive_pct"] == pytest.approx(57.14, abs=0.1) + assert result["threshold_met"] is False + assert result["min_participants_met"] is True # 70 >= 59 + assert result["adopted"] is False + + def test_min_participants_not_met(self): + """50 positive + 5 negative = 55 total < 59 min_participants. + Even though 50/55 ~ 90.9% > 80%, adoption fails on min_participants. + """ + votes = [5] * 30 + [4] * 10 + [3] * 10 + [1] * 3 + [0] * 2 + result = evaluate_nuanced(votes, threshold_pct=80, min_participants=59) + + assert result["total"] == 55 + assert result["positive_count"] == 50 + assert result["positive_pct"] > 80 + assert result["threshold_met"] is True + assert result["min_participants_met"] is False + assert result["adopted"] is False + + +class TestNuancedVoteEdgeCases: + """Edge cases and exact boundary conditions.""" + + def test_exact_threshold_80_percent(self): + """Exactly 80% positive votes should pass the threshold.""" + # 80 positive out of 100 = exactly 80% + votes = [5] * 40 + [4] * 20 + [3] * 20 + [2] * 10 + [1] * 5 + [0] * 5 + result = evaluate_nuanced(votes, threshold_pct=80, min_participants=59) + + assert result["total"] == 100 + assert result["positive_count"] == 80 + assert result["positive_pct"] == 80.0 + assert result["threshold_met"] is True + assert result["min_participants_met"] is True + assert result["adopted"] is True + + def test_just_below_threshold(self): + """79 positive out of 100 = 79% < 80% => rejected.""" + votes = [5] * 39 + [4] * 20 + [3] * 20 + [2] * 11 + [1] * 5 + [0] * 5 + result = evaluate_nuanced(votes, threshold_pct=80, min_participants=59) + + assert result["total"] == 100 + assert result["positive_count"] == 79 + assert result["positive_pct"] == 79.0 + assert result["threshold_met"] is False + assert result["adopted"] is False + + def test_empty_votes(self): + """Zero votes => not adopted.""" + result = evaluate_nuanced([], threshold_pct=80, min_participants=59) + assert result["total"] == 0 + assert result["positive_count"] == 0 + assert result["positive_pct"] == 0.0 + assert result["adopted"] is False + + def test_invalid_vote_level(self): + """Vote level outside 0-5 raises ValueError.""" + with pytest.raises(ValueError, match="invalide"): + evaluate_nuanced([5, 3, 6]) + + def test_per_level_counts(self): + """Verify per-level breakdown is correct.""" + votes = [0, 1, 2, 3, 4, 5, 5, 4, 3] + result = evaluate_nuanced(votes, threshold_pct=50, min_participants=1) + + assert result["per_level_counts"] == {0: 1, 1: 1, 2: 1, 3: 2, 4: 2, 5: 2} + assert result["positive_count"] == 6 # 2+2+2 + assert result["total"] == 9 diff --git a/backend/app/tests/test_threshold.py b/backend/app/tests/test_threshold.py new file mode 100644 index 0000000..6147709 --- /dev/null +++ b/backend/app/tests/test_threshold.py @@ -0,0 +1,180 @@ +"""Tests for WoT threshold formula, Smith threshold, and TechComm threshold. + +Real-world reference case: + Vote Engagement Forgeron v2.0.0 (Feb 2026) + wot_size=7224, votes_for=97, votes_against=23, total=120 + params M=50, B=0.1, G=0.2 => threshold=94 => adopted (97 >= 94) +""" + +import math +import pytest + +from app.engine.threshold import wot_threshold +from app.engine.smith_threshold import smith_threshold +from app.engine.techcomm_threshold import techcomm_threshold + + +# --------------------------------------------------------------------------- +# WoT threshold: real-world vote Forgeron +# --------------------------------------------------------------------------- + +class TestWotThresholdForgeron: + """Test with the actual Engagement Forgeron v2.0.0 vote numbers.""" + + def test_forgeron_vote_passes(self): + """97 votes_for out of 120 total (wot=7224) must pass.""" + threshold = wot_threshold( + wot_size=7224, + total_votes=120, + majority_pct=50, + base_exponent=0.1, + gradient_exponent=0.2, + ) + # With low participation (120/7224 ~ 1.66%), near-unanimity is required. + # The historical threshold was 94, and 97 >= 94. + assert 97 >= threshold + # The threshold should be high relative to total votes (inertia effect) + assert threshold > 60, f"Threshold {threshold} should be well above simple majority" + + def test_forgeron_vote_threshold_value(self): + """Verify the computed threshold is in a reasonable range.""" + threshold = wot_threshold( + wot_size=7224, + total_votes=120, + majority_pct=50, + base_exponent=0.1, + gradient_exponent=0.2, + ) + # At ~1.66% participation, inertia should push threshold close to 78-95% + # of total votes. The exact value depends on the formula. + assert 80 <= threshold <= 120 + + +# --------------------------------------------------------------------------- +# WoT threshold: low participation +# --------------------------------------------------------------------------- + +class TestWotThresholdLowParticipation: + """With very low participation, near-unanimity should be required.""" + + def test_ten_votes_out_of_7224(self): + """10 voters out of 7224 => nearly all must vote 'for'.""" + threshold = wot_threshold( + wot_size=7224, + total_votes=10, + majority_pct=50, + base_exponent=0.1, + gradient_exponent=0.2, + ) + # With participation ratio 10/7224 ~ 0.14%, threshold should be + # very close to total_votes (near-unanimity). + assert threshold >= 9, f"Expected near-unanimity but got threshold={threshold}" + assert threshold <= 10 + + +# --------------------------------------------------------------------------- +# WoT threshold: high participation +# --------------------------------------------------------------------------- + +class TestWotThresholdHighParticipation: + """With high participation, threshold should approach simple majority M.""" + + def test_3000_votes_out_of_7224(self): + """3000/7224 ~ 41.5% participation => threshold closer to 50%.""" + threshold = wot_threshold( + wot_size=7224, + total_votes=3000, + majority_pct=50, + base_exponent=0.1, + gradient_exponent=0.2, + ) + # With ~42% participation, the inertia factor diminishes. + # threshold should be well below 90% of votes but above simple majority. + simple_majority = math.ceil(3000 * 0.5) + assert threshold >= simple_majority, ( + f"Threshold {threshold} should be at least simple majority {simple_majority}" + ) + # Should be noticeably less than near-unanimity + assert threshold < 2700, ( + f"Threshold {threshold} should be much less than near-unanimity at high participation" + ) + + +# --------------------------------------------------------------------------- +# WoT threshold: edge cases +# --------------------------------------------------------------------------- + +class TestWotThresholdEdgeCases: + """Edge-case behaviour.""" + + def test_zero_total_votes(self): + """With zero votes, threshold is ceil(C + B^W).""" + threshold = wot_threshold( + wot_size=7224, + total_votes=0, + majority_pct=50, + base_exponent=0.1, + gradient_exponent=0.2, + ) + # B^W = 0.1^7224 is effectively 0 + expected = math.ceil(0.0 + 0.1 ** 7224) + assert threshold == expected + + def test_invalid_wot_size_zero(self): + with pytest.raises(ValueError, match="wot_size"): + wot_threshold(wot_size=0, total_votes=10) + + def test_invalid_negative_votes(self): + with pytest.raises(ValueError, match="total_votes"): + wot_threshold(wot_size=100, total_votes=-1) + + def test_invalid_majority_pct(self): + with pytest.raises(ValueError, match="majority_pct"): + wot_threshold(wot_size=100, total_votes=10, majority_pct=150) + + +# --------------------------------------------------------------------------- +# Smith threshold +# --------------------------------------------------------------------------- + +class TestSmithThreshold: + """Test Smith sub-WoT threshold: ceil(smith_size ^ S).""" + + def test_smith_size_20_exponent_01(self): + """smith_size=20, exponent=0.1 => ceil(20^0.1).""" + result = smith_threshold(smith_wot_size=20, exponent=0.1) + expected = math.ceil(20 ** 0.1) + assert result == expected + # 20^0.1 ~ 1.35, ceil => 2 + assert result == 2 + + def test_smith_size_1(self): + """smith_size=1 => ceil(1^0.1) = 1.""" + assert smith_threshold(smith_wot_size=1, exponent=0.1) == 1 + + def test_smith_invalid(self): + with pytest.raises(ValueError): + smith_threshold(smith_wot_size=0) + + +# --------------------------------------------------------------------------- +# TechComm threshold +# --------------------------------------------------------------------------- + +class TestTechcommThreshold: + """Test TechComm threshold: ceil(cotec_size ^ T).""" + + def test_cotec_size_5_exponent_01(self): + """cotec_size=5, exponent=0.1 => ceil(5^0.1).""" + result = techcomm_threshold(cotec_size=5, exponent=0.1) + expected = math.ceil(5 ** 0.1) + assert result == expected + # 5^0.1 ~ 1.175, ceil => 2 + assert result == 2 + + def test_cotec_size_1(self): + assert techcomm_threshold(cotec_size=1, exponent=0.1) == 1 + + def test_cotec_invalid(self): + with pytest.raises(ValueError): + techcomm_threshold(cotec_size=0) diff --git a/backend/conftest.py b/backend/conftest.py new file mode 100644 index 0000000..26262e4 --- /dev/null +++ b/backend/conftest.py @@ -0,0 +1,6 @@ +import pytest + + +@pytest.fixture(scope="session") +def anyio_backend(): + return "asyncio" diff --git a/backend/pytest.ini b/backend/pytest.ini new file mode 100644 index 0000000..82bc8d1 --- /dev/null +++ b/backend/pytest.ini @@ -0,0 +1,3 @@ +[pytest] +asyncio_mode = auto +pythonpath = . diff --git a/backend/requirements.txt b/backend/requirements.txt new file mode 100644 index 0000000..8b1a9ca --- /dev/null +++ b/backend/requirements.txt @@ -0,0 +1,16 @@ +fastapi==0.115.6 +uvicorn[standard]==0.34.0 +sqlalchemy==2.0.36 +alembic==1.14.0 +asyncpg==0.30.0 +pydantic==2.10.3 +pydantic-settings==2.7.0 +python-multipart==0.0.18 +websockets==14.1 +substrate-interface==1.7.10 +py-sr25519-bindings==0.2.1 +base58==2.1.1 +httpx==0.28.1 +aioipfs==0.7.1 +pytest==8.3.4 +pytest-asyncio==0.24.0 diff --git a/backend/seed.py b/backend/seed.py new file mode 100644 index 0000000..d79e0b4 --- /dev/null +++ b/backend/seed.py @@ -0,0 +1,530 @@ +"""Seed the database with initial FormulaConfigs, VotingProtocols, Documents, and Decisions. + +Usage: + python seed.py + +Idempotent: checks if data already exists before inserting. +""" + +from __future__ import annotations + +import asyncio +import uuid + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from app.database import async_session, engine, Base +from app.models.protocol import FormulaConfig, VotingProtocol +from app.models.document import Document, DocumentItem +from app.models.decision import Decision, DecisionStep + + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +async def get_or_create( + session: AsyncSession, + model, + lookup_field: str, + lookup_value, + **kwargs, +): + """Return existing row or create a new one.""" + stmt = select(model).where(getattr(model, lookup_field) == lookup_value) + result = await session.execute(stmt) + instance = result.scalar_one_or_none() + if instance is not None: + return instance, False + instance = model(**{lookup_field: lookup_value}, **kwargs) + session.add(instance) + await session.flush() + return instance, True + + +# --------------------------------------------------------------------------- +# Seed: FormulaConfigs +# --------------------------------------------------------------------------- + +async def seed_formula_configs(session: AsyncSession) -> dict[str, FormulaConfig]: + """Create the 4 base formula configurations.""" + configs: dict[str, dict] = { + "Standard Licence G1": { + "description": "Formule standard pour la Licence G1 : vote binaire WoT.", + "duration_days": 30, + "majority_pct": 50, + "base_exponent": 0.1, + "gradient_exponent": 0.2, + "constant_base": 0.0, + }, + "Forgeron avec Smith": { + "description": "Vote forgeron avec critere Smith sub-WoT.", + "duration_days": 30, + "majority_pct": 50, + "base_exponent": 0.1, + "gradient_exponent": 0.2, + "constant_base": 0.0, + "smith_exponent": 0.1, + }, + "Comite Tech": { + "description": "Vote avec critere Comite Technique.", + "duration_days": 30, + "majority_pct": 50, + "base_exponent": 0.1, + "gradient_exponent": 0.2, + "constant_base": 0.0, + "techcomm_exponent": 0.1, + }, + "Vote Nuance": { + "description": "Vote nuance a 6 niveaux (CONTRE..TOUT A FAIT).", + "duration_days": 30, + "majority_pct": 50, + "base_exponent": 0.1, + "gradient_exponent": 0.2, + "constant_base": 0.0, + "nuanced_min_participants": 59, + "nuanced_threshold_pct": 80, + }, + } + + result: dict[str, FormulaConfig] = {} + for name, params in configs.items(): + instance, created = await get_or_create( + session, FormulaConfig, "name", name, **params, + ) + status = "created" if created else "exists" + print(f" FormulaConfig '{name}': {status}") + result[name] = instance + + return result + + +# --------------------------------------------------------------------------- +# Seed: VotingProtocols +# --------------------------------------------------------------------------- + +async def seed_voting_protocols( + session: AsyncSession, + formulas: dict[str, FormulaConfig], +) -> dict[str, VotingProtocol]: + """Create the 4 base voting protocols.""" + protocols: dict[str, dict] = { + "Standard G1": { + "description": "Protocole binaire standard pour la Licence G1.", + "vote_type": "binary", + "formula_config_id": formulas["Standard Licence G1"].id, + "mode_params": "D30M50B.1G.2", + }, + "Forgeron Smith": { + "description": "Protocole binaire avec critere Smith pour les forgerons.", + "vote_type": "binary", + "formula_config_id": formulas["Forgeron avec Smith"].id, + "mode_params": "D30M50B.1G.2S.1", + }, + "Comite Tech": { + "description": "Protocole binaire avec critere Comite Technique.", + "vote_type": "binary", + "formula_config_id": formulas["Comite Tech"].id, + "mode_params": "D30M50B.1G.2T.1", + }, + "Vote Nuance 6 niveaux": { + "description": "Protocole de vote nuance a 6 niveaux.", + "vote_type": "nuanced", + "formula_config_id": formulas["Vote Nuance"].id, + "mode_params": None, + }, + } + + result: dict[str, VotingProtocol] = {} + for name, params in protocols.items(): + instance, created = await get_or_create( + session, VotingProtocol, "name", name, **params, + ) + status = "created" if created else "exists" + print(f" VotingProtocol '{name}': {status}") + result[name] = instance + + return result + + +# --------------------------------------------------------------------------- +# Seed: Document - Licence G1 +# --------------------------------------------------------------------------- + +LICENCE_G1_ITEMS: list[dict] = [ + { + "position": "1", + "item_type": "preamble", + "title": "Preambule", + "sort_order": 1, + "current_text": ( + "Licence de la monnaie libre et engagement de responsabilite. " + "La monnaie libre G1 (June) est co-produite par ses membres." + ), + }, + { + "position": "2", + "item_type": "section", + "title": "Avertissement TdC", + "sort_order": 2, + "current_text": ( + "Certifier n'est pas uniquement s'assurer de l'identite unique " + "de la personne (son unicite). C'est aussi affirmer que vous la " + "connaissez bien et que vous saurez la joindre facilement." + ), + }, + { + "position": "3", + "item_type": "clause", + "title": "Conseils", + "sort_order": 3, + "current_text": ( + "Connaitre la personne par plusieurs moyens de communication differents " + "(physique, electronique, etc.). Connaitre son lieu de vie principal. " + "Avoir echange avec elle en utilisant des moyens de communication " + "susceptibles d'identifier un humain vivant." + ), + }, + { + "position": "4", + "item_type": "verification", + "title": "Verifications", + "sort_order": 4, + "current_text": ( + "De suffisamment bien connaitre la personne pour pouvoir la contacter, " + "echanger avec elle. De s'assurer que la personne a bien le controle " + "de son compte Duniter." + ), + }, + { + "position": "5", + "item_type": "rule", + "title": "Regles TdC", + "sort_order": 5, + "current_text": ( + "Chaque membre dispose de 100 certifications possibles. " + "Il est possible de certifier 1 nouveau membre tous les 5 jours. " + "Un membre doit avoir au moins 5 certifications pour devenir membre. " + "Un membre doit renouveler son adhesion tous les 2 ans." + ), + }, + { + "position": "6", + "item_type": "rule", + "title": "Production DU", + "sort_order": 6, + "current_text": ( + "1 DU (Dividende Universel) est produit par personne et par jour. " + "Le DU est la monnaie de base co-produite par chaque membre." + ), + }, + { + "position": "7", + "item_type": "rule", + "title": "Code monetaire", + "sort_order": 7, + "current_text": ( + "DU formule : DU(t+1) = DU(t) + c^2 * M/N. " + "c = 4.88% / an. Le DU est re-evalue chaque equinoxe." + ), + }, + { + "position": "8", + "item_type": "clause", + "title": "Logiciels", + "sort_order": 8, + "current_text": ( + "Les logiciels G1 doivent transmettre cette licence integralement " + "aux utilisateurs et developper un acces libre au code source." + ), + }, + { + "position": "9", + "item_type": "clause", + "title": "Modification", + "sort_order": 9, + "current_text": ( + "Proposants, soutiens et votants doivent etre membres de la TdC. " + "Toute modification de cette licence doit etre soumise au vote " + "des membres selon le protocole en vigueur." + ), + }, +] + + +async def seed_document_licence_g1(session: AsyncSession) -> Document: + """Create the Licence G1 document with its items.""" + doc, created = await get_or_create( + session, + Document, + "slug", + "licence-g1", + title="Licence G1", + doc_type="licence", + version="0.3.0", + status="active", + description=( + "Licence de la monnaie libre G1 (June). " + "Definit les regles de la toile de confiance et du Dividende Universel." + ), + ) + print(f" Document 'Licence G1': {'created' if created else 'exists'}") + + if created: + for item_data in LICENCE_G1_ITEMS: + item = DocumentItem(document_id=doc.id, **item_data) + session.add(item) + await session.flush() + print(f" -> {len(LICENCE_G1_ITEMS)} items created") + + return doc + + +# --------------------------------------------------------------------------- +# Seed: Document - Engagement Forgeron v2.0.0 +# --------------------------------------------------------------------------- + +FORGERON_ITEMS: list[dict] = [ + { + "position": "1", + "item_type": "preamble", + "title": "Intention", + "sort_order": 1, + "current_text": ( + "Avec la V2, une sous-toile de confiance pour les forgerons est " + "introduite. Les forgerons (validateurs de blocs) doivent demontrer " + "leurs competences techniques et leur engagement envers le reseau." + ), + }, + { + "position": "2", + "item_type": "clause", + "title": "Savoirs-faire", + "sort_order": 2, + "current_text": ( + "Administration systeme Linux, securite informatique, " + "cryptographie, blockchain Substrate. Le forgeron doit maitriser " + "l'ensemble de la chaine technique necessaire a la validation." + ), + }, + { + "position": "3", + "item_type": "clause", + "title": "Rigueur", + "sort_order": 3, + "current_text": ( + "Comprendre en profondeur les configurations du runtime, " + "les parametres de consensus et les mecanismes de mise a jour " + "du reseau Duniter V2." + ), + }, + { + "position": "4", + "item_type": "clause", + "title": "Reactivite", + "sort_order": 4, + "current_text": ( + "Reponse sous 24h aux alertes reseau. Disponibilite pour les " + "mises a jour critiques. Monitoring continu du noeud validateur." + ), + }, + { + "position": "5", + "item_type": "verification", + "title": "Securite aspirant", + "sort_order": 5, + "current_text": ( + "Phrases aleatoires de 12+ mots, comptes separes pour identite " + "et validation, sauvegardes chiffrees des cles, infrastructure " + "securisee et a jour." + ), + }, + { + "position": "6", + "item_type": "verification", + "title": "Contact aspirant", + "sort_order": 6, + "current_text": ( + "Le candidat forgeron doit contacter au minimum 3 forgerons " + "existants par au moins 2 canaux de communication differents " + "avant de demander ses certifications." + ), + }, + { + "position": "7", + "item_type": "clause", + "title": "Clauses pieges", + "sort_order": 7, + "current_text": ( + "Exclusions : harcelement, abus de pouvoir, tentative " + "d'infiltration malveillante du reseau. Tout manquement " + "entraine le retrait des certifications forgeron." + ), + }, + { + "position": "8", + "item_type": "verification", + "title": "Securite certificateur", + "sort_order": 8, + "current_text": ( + "Verification de l'intention du candidat, de ses pratiques " + "de securite, et du bon fonctionnement de son noeud validateur " + "avant de delivrer une certification forgeron." + ), + }, + { + "position": "9", + "item_type": "rule", + "title": "Regles TdC forgerons", + "sort_order": 9, + "current_text": ( + "Etre membre de la TdC principale. Recevoir une invitation " + "d'un forgeron existant. Obtenir au minimum 3 certifications " + "de forgerons actifs. Renouvellement annuel obligatoire." + ), + }, +] + + +async def seed_document_forgeron(session: AsyncSession) -> Document: + """Create the Engagement Forgeron v2.0.0 document with its items.""" + doc, created = await get_or_create( + session, + Document, + "slug", + "engagement-forgeron", + title="Engagement Forgeron v2.0.0", + doc_type="engagement", + version="2.0.0", + status="active", + description=( + "Engagement des forgerons (validateurs) pour Duniter V2. " + "Adopte en fevrier 2026 (97 pour / 23 contre)." + ), + ) + print(f" Document 'Engagement Forgeron v2.0.0': {'created' if created else 'exists'}") + + if created: + for item_data in FORGERON_ITEMS: + item = DocumentItem(document_id=doc.id, **item_data) + session.add(item) + await session.flush() + print(f" -> {len(FORGERON_ITEMS)} items created") + + return doc + + +# --------------------------------------------------------------------------- +# Seed: Decision template - Processus Runtime Upgrade +# --------------------------------------------------------------------------- + +RUNTIME_UPGRADE_STEPS: list[dict] = [ + { + "step_order": 1, + "step_type": "qualification", + "title": "Qualification", + "description": ( + "Definir le changement : specification technique, impact sur le " + "reseau, justification. Identifier les risques et dependances." + ), + }, + { + "step_order": 2, + "step_type": "review", + "title": "Revue", + "description": ( + "Audit technique par le Comite Technique et les forgerons. " + "Revue du code, tests sur testnet, validation de la compatibilite." + ), + }, + { + "step_order": 3, + "step_type": "vote", + "title": "Vote", + "description": ( + "Vote communautaire selon le protocole de vote en vigueur. " + "Le quorum et le seuil d'adoption dependent de la formule configuree." + ), + }, + { + "step_order": 4, + "step_type": "execution", + "title": "Execution", + "description": ( + "Mise a jour on-chain via un extrinsic autorise. " + "Coordination avec les forgerons pour la synchronisation des noeuds." + ), + }, + { + "step_order": 5, + "step_type": "reporting", + "title": "Suivi", + "description": ( + "Surveillance post-upgrade : monitoring des metriques reseau, " + "detection d'anomalies, rapport de stabilite sous 7 jours." + ), + }, +] + + +async def seed_decision_runtime_upgrade(session: AsyncSession) -> Decision: + """Create the Runtime Upgrade decision template.""" + decision, created = await get_or_create( + session, + Decision, + "title", + "Processus Runtime Upgrade", + description=( + "Template de decision pour les mises a jour du runtime Duniter V2. " + "5 etapes : qualification, revue, vote, execution, suivi." + ), + decision_type="runtime_upgrade", + status="draft", + ) + print(f" Decision 'Processus Runtime Upgrade': {'created' if created else 'exists'}") + + if created: + for step_data in RUNTIME_UPGRADE_STEPS: + step = DecisionStep(decision_id=decision.id, **step_data) + session.add(step) + await session.flush() + print(f" -> {len(RUNTIME_UPGRADE_STEPS)} steps created") + + return decision + + +# --------------------------------------------------------------------------- +# Main seed runner +# --------------------------------------------------------------------------- + +async def run_seed(): + """Execute all seed functions inside a single transaction.""" + print("=" * 60) + print("Glibredecision - Seed Database") + print("=" * 60) + + async with async_session() as session: + async with session.begin(): + print("\n[1/5] Formula Configs...") + formulas = await seed_formula_configs(session) + + print("\n[2/5] Voting Protocols...") + await seed_voting_protocols(session, formulas) + + print("\n[3/5] Document: Licence G1...") + await seed_document_licence_g1(session) + + print("\n[4/5] Document: Engagement Forgeron v2.0.0...") + await seed_document_forgeron(session) + + print("\n[5/5] Decision: Processus Runtime Upgrade...") + await seed_decision_runtime_upgrade(session) + + print("\n" + "=" * 60) + print("Seed complete.") + print("=" * 60) + + +if __name__ == "__main__": + asyncio.run(run_seed()) diff --git a/docker/backend.Dockerfile b/docker/backend.Dockerfile new file mode 100644 index 0000000..a419c85 --- /dev/null +++ b/docker/backend.Dockerfile @@ -0,0 +1,44 @@ +# syntax = docker/dockerfile:1 + +FROM python:3.11-slim AS base + +ENV PYTHONDONTWRITEBYTECODE=1 \ + PYTHONUNBUFFERED=1 + +WORKDIR /app + +RUN apt-get update && \ + apt-get install -y --no-install-recommends curl && \ + rm -rf /var/lib/apt/lists/* + +# ── Build ───────────────────────────────────────────────────────────────────── +FROM base AS build + +COPY backend/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +COPY backend/ . + +# ── Production ──────────────────────────────────────────────────────────────── +FROM base AS production + +COPY --from=build /usr/local/lib/python3.11/site-packages /usr/local/lib/python3.11/site-packages +COPY --from=build /usr/local/bin/uvicorn /usr/local/bin/uvicorn +COPY --from=build /usr/local/bin/alembic /usr/local/bin/alembic +COPY --from=build /app /app + +EXPOSE 8002 + +HEALTHCHECK --interval=30s --timeout=5s --start-period=10s --retries=3 \ + CMD curl -f http://localhost:8002/api/health || exit 1 + +CMD ["sh", "-c", "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 8002"] + +# ── Development ─────────────────────────────────────────────────────────────── +FROM base AS development + +COPY backend/requirements.txt . +RUN pip install --no-cache-dir -r requirements.txt + +WORKDIR /app +CMD ["sh", "-c", "alembic upgrade head && uvicorn app.main:app --host 0.0.0.0 --port 8002 --reload"] diff --git a/docker/docker-compose.dev.yml b/docker/docker-compose.dev.yml new file mode 100644 index 0000000..a4a3afa --- /dev/null +++ b/docker/docker-compose.dev.yml @@ -0,0 +1,37 @@ +version: "3.9" + +# Dev overrides -- usage: +# docker compose -f docker/docker-compose.yml -f docker/docker-compose.dev.yml up + +services: + postgres: + ports: + - "5432:5432" + + backend: + build: + target: development + volumes: + - ../backend:/app + ports: + - "8002:8002" + environment: + DEBUG: "true" + CORS_ORIGINS: '["http://localhost:3002"]' + labels: [] + + frontend: + build: + target: development + volumes: + - ../frontend:/app + ports: + - "3002:3002" + environment: + NUXT_PUBLIC_API_BASE: http://localhost:8002/api/v1 + labels: [] + + ipfs: + ports: + - "5001:5001" + - "8080:8080" diff --git a/docker/docker-compose.yml b/docker/docker-compose.yml new file mode 100644 index 0000000..f47d90c --- /dev/null +++ b/docker/docker-compose.yml @@ -0,0 +1,85 @@ +version: "3.9" + +services: + postgres: + image: postgres:16-alpine + restart: unless-stopped + environment: + POSTGRES_DB: ${POSTGRES_DB:-glibredecision} + POSTGRES_USER: ${POSTGRES_USER:-glibredecision} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-change-me-in-production} + volumes: + - postgres-data:/var/lib/postgresql/data + healthcheck: + test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-glibredecision} -d ${POSTGRES_DB:-glibredecision}"] + interval: 10s + timeout: 5s + retries: 5 + start_period: 30s + networks: + - glibredecision + + backend: + build: + context: ../ + dockerfile: docker/backend.Dockerfile + target: production + restart: unless-stopped + depends_on: + postgres: + condition: service_healthy + environment: + DATABASE_URL: postgresql+asyncpg://${POSTGRES_USER:-glibredecision}:${POSTGRES_PASSWORD:-change-me-in-production}@postgres:5432/${POSTGRES_DB:-glibredecision} + SECRET_KEY: ${SECRET_KEY:-change-me-in-production-with-a-real-secret-key} + DEBUG: "false" + CORS_ORIGINS: '["https://${DOMAIN:-glibredecision.org}"]' + DUNITER_RPC_URL: ${DUNITER_RPC_URL:-wss://gdev.p2p.legal/ws} + IPFS_API_URL: http://ipfs:5001 + IPFS_GATEWAY_URL: http://ipfs:8080 + labels: + - "traefik.enable=true" + - "traefik.http.routers.glibredecision-api.rule=Host(`${DOMAIN:-glibredecision.org}`) && PathPrefix(`/api`)" + - "traefik.http.routers.glibredecision-api.entrypoints=websecure" + - "traefik.http.routers.glibredecision-api.tls.certresolver=letsencrypt" + - "traefik.http.services.glibredecision-api.loadbalancer.server.port=8002" + networks: + - glibredecision + - traefik + + frontend: + build: + context: ../ + dockerfile: docker/frontend.Dockerfile + target: production + restart: unless-stopped + depends_on: + - backend + environment: + NUXT_PUBLIC_API_BASE: https://${DOMAIN:-glibredecision.org}/api/v1 + labels: + - "traefik.enable=true" + - "traefik.http.routers.glibredecision-front.rule=Host(`${DOMAIN:-glibredecision.org}`)" + - "traefik.http.routers.glibredecision-front.entrypoints=websecure" + - "traefik.http.routers.glibredecision-front.tls.certresolver=letsencrypt" + - "traefik.http.services.glibredecision-front.loadbalancer.server.port=3000" + networks: + - glibredecision + - traefik + + ipfs: + image: ipfs/kubo:latest + restart: unless-stopped + volumes: + - ipfs-data:/data/ipfs + networks: + - glibredecision + +volumes: + postgres-data: + ipfs-data: + +networks: + glibredecision: + driver: bridge + traefik: + external: true diff --git a/docker/frontend.Dockerfile b/docker/frontend.Dockerfile new file mode 100644 index 0000000..bcc9227 --- /dev/null +++ b/docker/frontend.Dockerfile @@ -0,0 +1,45 @@ +# syntax = docker/dockerfile:1 + +ARG NODE_VERSION=20-slim + +FROM node:${NODE_VERSION} AS base + +WORKDIR /src + +RUN apt-get update && \ + apt-get install -y --no-install-recommends curl && \ + rm -rf /var/lib/apt/lists/* + +# ── Build ───────────────────────────────────────────────────────────────────── +FROM base AS build + +ENV NODE_ENV=development + +COPY frontend/package.json frontend/package-lock.json* ./ +RUN npm ci + +COPY frontend/ . +RUN npm run build + +# ── Production ──────────────────────────────────────────────────────────────── +FROM base AS production + +ENV PORT=3000 \ + NODE_ENV=production + +COPY --from=build /src/.output /src/.output + +EXPOSE $PORT + +HEALTHCHECK --interval=30s --timeout=5s --start-period=15s --retries=3 \ + CMD curl -f http://localhost:${PORT}/ || exit 1 + +CMD ["node", ".output/server/index.mjs"] + +# ── Development ─────────────────────────────────────────────────────────────── +FROM base AS development + +ENV NODE_ENV=development + +WORKDIR /app +ENTRYPOINT ["npm", "run", "dev"] diff --git a/docs/content/dev/1.index.md b/docs/content/dev/1.index.md new file mode 100644 index 0000000..3cf0d78 --- /dev/null +++ b/docs/content/dev/1.index.md @@ -0,0 +1,17 @@ +--- +title: Documentation technique +description: Architecture, API et reference technique de Glibredecision +--- + +# Documentation technique + +Bienvenue dans la documentation technique de Glibredecision. + +## Sections + +- [Architecture](/dev/architecture) -- Vue d'ensemble de l'architecture +- [Reference API](/dev/api-reference) -- Endpoints et schemas +- [Schema de base de donnees](/dev/database-schema) -- Tables et relations +- [Formules](/dev/formulas) -- Formules mathematiques de seuil +- [Integration blockchain](/dev/blockchain-integration) -- Duniter V2, IPFS, on-chain +- [Contribution](/dev/contributing) -- Guide de contribution diff --git a/docs/content/dev/2.architecture.md b/docs/content/dev/2.architecture.md new file mode 100644 index 0000000..48b9440 --- /dev/null +++ b/docs/content/dev/2.architecture.md @@ -0,0 +1,81 @@ +--- +title: Architecture +description: Vue d'ensemble de l'architecture technique de Glibredecision +--- + +# Architecture + +## Vue d'ensemble + +Glibredecision est organise en monorepo avec trois composants principaux : + +``` +Glibredecision/ + backend/ # API Python FastAPI (port 8002) + frontend/ # Application Nuxt 4 (port 3002) + docker/ # Fichiers Docker et orchestration + docs/ # Documentation (Nuxt Content) +``` + +## Stack technique + +| Couche | Technologie | +| ------------ | -------------------------------------------------- | +| Frontend | Nuxt 4 + Nuxt UI v3 + Pinia + UnoCSS | +| Backend | Python FastAPI + SQLAlchemy 2.0 (async) + Pydantic v2 | +| Base de donnees | PostgreSQL 16 (asyncpg) | +| Authentification | Duniter V2 Ed25519 challenge-response | +| Sanctuaire | IPFS (kubo) + hash on-chain (system.remark) | +| CI/CD | Woodpecker CI + Docker + Traefik | + +## Domaines fonctionnels + +L'application est decoupee en 5 domaines metier, chacun avec ses modeles, schemas, routes et services : + +1. **Documents** -- Documents de reference modulaires (licence, engagements, reglement) composes d'items individuels versionnables. +2. **Decisions** -- Processus decisionnels multi-etapes (qualification, examen, vote, execution, rapport). +3. **Votes** -- Sessions de vote binaire ou nuance avec formule de seuil WoT, critere Smith et critere TechComm. +4. **Mandats** -- Mandats assignes a des membres (techcomm, forgeron, personnalise) avec cycle de vie complet. +5. **Protocoles** -- Configurations de formules de vote et protocoles de vote reutilisables. + +Un domaine transversal, le **Sanctuaire**, assure l'archivage immuable via IPFS et ancrage on-chain. + +## Principes d'architecture + +- **Async everywhere** : toute la couche donnees et HTTP est asynchrone (asyncpg, AsyncSession, FastAPI async). +- **Separation modeles / schemas / routes / services** : chaque domaine suit ce decoupage strict. +- **API versionnee** : tous les endpoints sont sous `/api/v1/`. +- **Preuve cryptographique** : chaque vote est signe avec la cle Ed25519 du votant. +- **Vote permanent** : les documents de reference sont sous vote permanent, chaque item peut etre modifie par proposition et vote. + +## Schema de communication + +``` +Navigateur + | + v +[Nuxt 4 Frontend] -- SSR/CSR, port 3000 (prod) / 3002 (dev) + | + v (fetch /api/v1/*) +[FastAPI Backend] -- port 8002 + | + +---> [PostgreSQL 16] -- Donnees relationnelles + +---> [IPFS kubo] -- Stockage distribue (Sanctuaire) + +---> [Duniter V2 RPC] -- WoT, Smith, TechComm, system.remark +``` + +## Flux d'authentification + +1. Le client envoie son adresse Duniter SS58 via `POST /api/v1/auth/challenge`. +2. Le serveur genere un challenge aleatoire (64 hex) et le stocke en memoire (TTL 5 min). +3. Le client signe le challenge avec sa cle privee Ed25519 et soumet via `POST /api/v1/auth/verify`. +4. Le serveur verifie la signature, cree ou retrouve l'identite `DuniterIdentity`, et retourne un token de session. +5. Le token est utilise en header `Authorization: Bearer ` pour les requetes authentifiees. + +## Flux de vote + +1. Un protocole de vote et sa formule sont crees ou selectionnes. +2. Une session de vote est creee avec un snapshot des tailles WoT/Smith/TechComm. +3. Les membres votent (binaire ou nuance) avec signature cryptographique. +4. A la cloture, le seuil WoT est calcule, les criteres Smith et TechComm sont verifies. +5. Le resultat (adopte/rejete) est archive dans le Sanctuaire (IPFS + on-chain). diff --git a/docs/content/dev/3.api-reference.md b/docs/content/dev/3.api-reference.md new file mode 100644 index 0000000..ad1412f --- /dev/null +++ b/docs/content/dev/3.api-reference.md @@ -0,0 +1,106 @@ +--- +title: Reference API +description: Liste des endpoints de l'API Glibredecision +--- + +# Reference API + +Tous les endpoints sont prefixes par `/api/v1`. L'API est auto-documentee via OpenAPI/Swagger a l'adresse `/docs` en mode debug. + +## Authentification (`/api/v1/auth`) + +| Methode | Endpoint | Description | Auth | +| ------- | ------------- | ----------------------------------------------------- | ---- | +| POST | `/challenge` | Generer un challenge Ed25519 pour une adresse Duniter | Non | +| POST | `/verify` | Verifier la signature du challenge et obtenir un token | Non | +| GET | `/me` | Retourner l'identite authentifiee courante | Oui | +| POST | `/logout` | Invalider la session courante | Oui | + +## Documents (`/api/v1/documents`) + +| Methode | Endpoint | Description | Auth | +| ------- | -------------------------------------- | ---------------------------------------- | ---- | +| GET | `/` | Lister les documents (filtres: doc_type, status) | Non | +| POST | `/` | Creer un nouveau document | Oui | +| GET | `/{slug}` | Obtenir un document par son slug | Non | +| PUT | `/{slug}` | Mettre a jour un document | Oui | +| POST | `/{slug}/items` | Ajouter un item au document | Oui | +| GET | `/{slug}/items` | Lister les items d'un document | Non | +| GET | `/{slug}/items/{item_id}` | Obtenir un item avec son historique | Non | +| POST | `/{slug}/items/{item_id}/versions` | Proposer une nouvelle version d'un item | Oui | + +## Decisions (`/api/v1/decisions`) + +| Methode | Endpoint | Description | Auth | +| ------- | ---------------- | ------------------------------------------------ | ---- | +| GET | `/` | Lister les decisions (filtres: decision_type, status) | Non | +| POST | `/` | Creer une nouvelle decision | Oui | +| GET | `/{id}` | Obtenir une decision avec ses etapes | Non | +| PUT | `/{id}` | Mettre a jour une decision | Oui | +| POST | `/{id}/steps` | Ajouter une etape a une decision | Oui | + +## Votes (`/api/v1/votes`) + +| Methode | Endpoint | Description | Auth | +| ------- | --------------------------- | -------------------------------------------- | ---- | +| POST | `/sessions` | Creer une session de vote | Oui | +| GET | `/sessions/{id}` | Obtenir une session de vote | Non | +| POST | `/sessions/{id}/vote` | Soumettre un vote (signe) | Oui | +| GET | `/sessions/{id}/votes` | Lister les votes d'une session | Non | +| GET | `/sessions/{id}/result` | Calculer et retourner le resultat courant | Non | + +## Mandats (`/api/v1/mandates`) + +| Methode | Endpoint | Description | Auth | +| ------- | ----------------- | ---------------------------------------------- | ---- | +| GET | `/` | Lister les mandats (filtres: mandate_type, status) | Non | +| POST | `/` | Creer un nouveau mandat | Oui | +| GET | `/{id}` | Obtenir un mandat avec ses etapes | Non | +| PUT | `/{id}` | Mettre a jour un mandat | Oui | +| DELETE | `/{id}` | Supprimer un mandat (brouillon uniquement) | Oui | +| POST | `/{id}/steps` | Ajouter une etape a un mandat | Oui | +| GET | `/{id}/steps` | Lister les etapes d'un mandat | Non | + +## Protocoles (`/api/v1/protocols`) + +| Methode | Endpoint | Description | Auth | +| ------- | --------------- | -------------------------------------------------- | ---- | +| GET | `/` | Lister les protocoles de vote | Non | +| POST | `/` | Creer un protocole de vote | Oui | +| GET | `/{id}` | Obtenir un protocole avec sa configuration formule | Non | +| GET | `/formulas` | Lister les configurations de formules | Non | +| POST | `/formulas` | Creer une configuration de formule | Oui | + +## Sanctuaire (`/api/v1/sanctuary`) + +| Methode | Endpoint | Description | Auth | +| ------- | --------- | ---------------------------------------------------------- | ---- | +| GET | `/` | Lister les entrees du sanctuaire (filtre: entry_type) | Non | +| GET | `/{id}` | Obtenir une entree du sanctuaire | Non | +| POST | `/` | Creer une entree (hash SHA-256, CID IPFS, TX on-chain) | Oui | + +## WebSocket (`/api/v1/ws`) + +| Endpoint | Description | +| --------- | -------------------------------------------------------- | +| `/ws` | Connexion WebSocket pour notifications temps reel (votes, decisions) | + +## Sante + +| Methode | Endpoint | Description | +| ------- | -------------- | -------------------------- | +| GET | `/api/health` | Verification de sante (hors versionning) | + +## Pagination + +Les endpoints de liste acceptent les parametres `skip` (offset, defaut 0) et `limit` (max 200, defaut 50). + +## Authentification + +Les endpoints marques "Oui" dans la colonne Auth requierent un header : + +``` +Authorization: Bearer +``` + +Le token est obtenu via le flux challenge-response (`/auth/challenge` puis `/auth/verify`). diff --git a/docs/content/dev/4.database-schema.md b/docs/content/dev/4.database-schema.md new file mode 100644 index 0000000..ca6835a --- /dev/null +++ b/docs/content/dev/4.database-schema.md @@ -0,0 +1,312 @@ +--- +title: Schema de base de donnees +description: Tables et relations de la base de donnees PostgreSQL +--- + +# Schema de base de donnees + +Glibredecision utilise PostgreSQL 16 avec SQLAlchemy 2.0 en mode asynchrone (asyncpg). Toutes les cles primaires sont des UUID v4. + +## Tables + +### `duniter_identities` + +Identites Duniter V2 connues de la plateforme. + +| Colonne | Type | Description | +| -------------- | ------------ | ---------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| address | VARCHAR(64) | Adresse SS58 Duniter (unique, indexee) | +| display_name | VARCHAR(128) | Nom d'affichage | +| wot_status | VARCHAR(32) | Statut WoT : member, pending, revoked, unknown | +| is_smith | BOOLEAN | Membre Smith (forgeron) | +| is_techcomm | BOOLEAN | Membre du Comite Technique | +| created_at | TIMESTAMPTZ | Date de creation | +| updated_at | TIMESTAMPTZ | Date de derniere mise a jour | + +### `sessions` + +Sessions d'authentification (tokens). + +| Colonne | Type | Description | +| ------------ | ------------ | ---------------------------------- | +| id | UUID (PK) | Identifiant unique | +| token_hash | VARCHAR(128) | Hash du token (unique, indexe) | +| identity_id | UUID (FK) | -> duniter_identities.id | +| created_at | TIMESTAMPTZ | Date de creation | +| expires_at | TIMESTAMPTZ | Date d'expiration | + +### `documents` + +Documents de reference modulaires. + +| Colonne | Type | Description | +| ------------ | ------------ | ----------------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| slug | VARCHAR(128) | Identifiant lisible (unique, indexe) | +| title | VARCHAR(256) | Titre du document | +| doc_type | VARCHAR(64) | Type : licence, engagement, reglement, constitution | +| version | VARCHAR(32) | Version semantique (defaut "0.1.0") | +| status | VARCHAR(32) | Statut : draft, active, archived | +| description | TEXT | Description du document | +| ipfs_cid | VARCHAR(128) | CID IPFS de la derniere version archivee | +| chain_anchor | VARCHAR(128) | Hash de transaction on-chain | +| created_at | TIMESTAMPTZ | Date de creation | +| updated_at | TIMESTAMPTZ | Date de derniere mise a jour | + +### `document_items` + +Items individuels composant un document (clauses, regles, verifications, etc.). + +| Colonne | Type | Description | +| ------------------- | ------------ | ------------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| document_id | UUID (FK) | -> documents.id | +| position | VARCHAR(16) | Numero de position ("1", "1.1", "3.2") | +| item_type | VARCHAR(32) | Type : clause, rule, verification, preamble, section | +| title | VARCHAR(256) | Titre de l'item | +| current_text | TEXT | Texte courant de l'item | +| voting_protocol_id | UUID (FK) | -> voting_protocols.id (protocole specifique) | +| sort_order | INTEGER | Ordre de tri | +| created_at | TIMESTAMPTZ | Date de creation | +| updated_at | TIMESTAMPTZ | Date de derniere mise a jour | + +### `item_versions` + +Historique des versions proposees pour chaque item. + +| Colonne | Type | Description | +| -------------- | ------------ | ------------------------------------------------------ | +| id | UUID (PK) | Identifiant unique | +| item_id | UUID (FK) | -> document_items.id | +| proposed_text | TEXT | Texte propose | +| diff_text | TEXT | Diff unifie entre texte courant et propose | +| rationale | TEXT | Justification de la modification | +| status | VARCHAR(32) | Statut : proposed, voting, accepted, rejected | +| decision_id | UUID (FK) | -> decisions.id (decision associee) | +| proposed_by_id | UUID (FK) | -> duniter_identities.id (auteur de la proposition) | +| created_at | TIMESTAMPTZ | Date de creation | + +### `decisions` + +Processus decisionnels multi-etapes. + +| Colonne | Type | Description | +| ------------------- | ------------ | -------------------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| title | VARCHAR(256) | Titre de la decision | +| description | TEXT | Description | +| context | TEXT | Contexte additionnel | +| decision_type | VARCHAR(64) | Type : runtime_upgrade, document_change, mandate_vote, custom | +| status | VARCHAR(32) | Statut : draft, qualification, review, voting, executed, closed | +| voting_protocol_id | UUID (FK) | -> voting_protocols.id | +| created_by_id | UUID (FK) | -> duniter_identities.id | +| created_at | TIMESTAMPTZ | Date de creation | +| updated_at | TIMESTAMPTZ | Date de derniere mise a jour | + +### `decision_steps` + +Etapes d'un processus decisionnel. + +| Colonne | Type | Description | +| ---------------- | ------------ | -------------------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| decision_id | UUID (FK) | -> decisions.id | +| step_order | INTEGER | Ordre de l'etape | +| step_type | VARCHAR(32) | Type : qualification, review, vote, execution, reporting | +| title | VARCHAR(256) | Titre de l'etape | +| description | TEXT | Description | +| status | VARCHAR(32) | Statut : pending, active, completed, skipped | +| vote_session_id | UUID (FK) | -> vote_sessions.id (session de vote associee) | +| outcome | TEXT | Resultat de l'etape | +| created_at | TIMESTAMPTZ | Date de creation | + +### `vote_sessions` + +Sessions de vote avec snapshot des tailles WoT et decompte en temps reel. + +| Colonne | Type | Description | +| ------------------- | ------------ | ---------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| decision_id | UUID (FK) | -> decisions.id | +| item_version_id | UUID (FK) | -> item_versions.id | +| voting_protocol_id | UUID (FK) | -> voting_protocols.id | +| wot_size | INTEGER | Taille WoT au debut de la session | +| smith_size | INTEGER | Taille Smith au debut de la session | +| techcomm_size | INTEGER | Taille TechComm au debut de la session | +| starts_at | TIMESTAMPTZ | Date de debut | +| ends_at | TIMESTAMPTZ | Date de fin | +| status | VARCHAR(32) | Statut : open, closed, tallied | +| votes_for | INTEGER | Nombre de votes pour | +| votes_against | INTEGER | Nombre de votes contre | +| votes_total | INTEGER | Nombre total de votes | +| smith_votes_for | INTEGER | Votes pour des membres Smith | +| techcomm_votes_for | INTEGER | Votes pour des membres TechComm | +| threshold_required | FLOAT | Seuil calcule requis | +| result | VARCHAR(32) | Resultat : adopted, rejected, null | +| chain_recorded | BOOLEAN | Enregistre sur la blockchain | +| chain_tx_hash | VARCHAR(128) | Hash de la transaction on-chain | +| created_at | TIMESTAMPTZ | Date de creation | + +### `votes` + +Votes individuels avec preuve cryptographique. + +| Colonne | Type | Description | +| ---------------- | ------------ | -------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| session_id | UUID (FK) | -> vote_sessions.id | +| voter_id | UUID (FK) | -> duniter_identities.id | +| vote_value | VARCHAR(32) | Valeur : for, against, ou niveau nuance | +| nuanced_level | INTEGER | Niveau nuance (0-5) pour les votes nuances | +| comment | TEXT | Commentaire optionnel | +| signature | TEXT | Signature Ed25519 du payload | +| signed_payload | TEXT | Payload signe (pour verification) | +| voter_wot_status | VARCHAR(32) | Statut WoT du votant au moment du vote | +| voter_is_smith | BOOLEAN | Le votant est-il forgeron | +| voter_is_techcomm| BOOLEAN | Le votant est-il membre TechComm | +| is_active | BOOLEAN | Vote actif (false si remplace) | +| created_at | TIMESTAMPTZ | Date de creation | + +### `mandates` + +Mandats assignes a des membres. + +| Colonne | Type | Description | +| ------------- | ------------ | ------------------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| title | VARCHAR(256) | Titre du mandat | +| description | TEXT | Description | +| mandate_type | VARCHAR(64) | Type : techcomm, smith, custom | +| status | VARCHAR(32) | Statut : draft, candidacy, voting, active, reporting, completed, revoked | +| mandatee_id | UUID (FK) | -> duniter_identities.id (titulaire du mandat) | +| decision_id | UUID (FK) | -> decisions.id (decision associee) | +| starts_at | TIMESTAMPTZ | Date de debut | +| ends_at | TIMESTAMPTZ | Date de fin | +| created_at | TIMESTAMPTZ | Date de creation | +| updated_at | TIMESTAMPTZ | Date de derniere mise a jour | + +### `mandate_steps` + +Etapes du cycle de vie d'un mandat. + +| Colonne | Type | Description | +| ---------------- | ------------ | ------------------------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| mandate_id | UUID (FK) | -> mandates.id | +| step_order | INTEGER | Ordre de l'etape | +| step_type | VARCHAR(32) | Type : formulation, candidacy, vote, assignment, reporting, completion, revocation | +| title | VARCHAR(256) | Titre de l'etape | +| description | TEXT | Description | +| status | VARCHAR(32) | Statut : pending, active, completed, skipped | +| vote_session_id | UUID (FK) | -> vote_sessions.id (session de vote associee) | +| outcome | TEXT | Resultat de l'etape | +| created_at | TIMESTAMPTZ | Date de creation | + +### `voting_protocols` + +Protocoles de vote reutilisables. + +| Colonne | Type | Description | +| ------------------ | ------------ | ----------------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| name | VARCHAR(128) | Nom du protocole | +| description | TEXT | Description | +| vote_type | VARCHAR(32) | Type de vote : binary, nuanced | +| formula_config_id | UUID (FK) | -> formula_configs.id | +| mode_params | VARCHAR(64) | Parametres compacts ("D30M50B.1G.2T.1") | +| is_meta_governed | BOOLEAN | Le protocole est-il sous meta-gouvernance | +| created_at | TIMESTAMPTZ | Date de creation | + +### `formula_configs` + +Configurations de formules de seuil WoT. + +| Colonne | Type | Description | +| ------------------------- | ------------ | ----------------------------------------- | +| id | UUID (PK) | Identifiant unique | +| name | VARCHAR(128) | Nom de la configuration | +| description | TEXT | Description | +| duration_days | INTEGER | Duree du vote en jours | +| majority_pct | INTEGER | Pourcentage de majorite (0-100) | +| base_exponent | FLOAT | Exposant de base B | +| gradient_exponent | FLOAT | Exposant de gradient G | +| constant_base | FLOAT | Base constante C | +| smith_exponent | FLOAT | Exposant Smith S (null si non requis) | +| techcomm_exponent | FLOAT | Exposant TechComm T (null si non requis) | +| nuanced_min_participants | INTEGER | Participants minimum (vote nuance) | +| nuanced_threshold_pct | INTEGER | Seuil positif % (vote nuance) | +| created_at | TIMESTAMPTZ | Date de creation | + +### `sanctuary_entries` + +Entrees du sanctuaire (archivage immuable). + +| Colonne | Type | Description | +| -------------- | ------------ | ------------------------------------------ | +| id | UUID (PK) | Identifiant unique | +| entry_type | VARCHAR(64) | Type : document, decision, vote_result | +| reference_id | UUID | UUID de l'entite source | +| title | VARCHAR(256) | Titre | +| content_hash | VARCHAR(128) | Hash SHA-256 du contenu | +| ipfs_cid | VARCHAR(128) | CID IPFS | +| chain_tx_hash | VARCHAR(128) | Hash de la transaction on-chain | +| chain_block | INTEGER | Numero de bloc de la transaction | +| metadata_json | TEXT | Metadonnees JSON supplementaires | +| created_at | TIMESTAMPTZ | Date de creation | + +### `blockchain_cache` + +Cache des donnees blockchain pour eviter les appels RPC repetes. + +| Colonne | Type | Description | +| ------------ | ------------ | -------------------------------- | +| id | UUID (PK) | Identifiant unique | +| cache_key | VARCHAR(256) | Cle de cache (unique, indexee) | +| cache_value | JSONB | Valeur en cache | +| fetched_at | TIMESTAMPTZ | Date de recuperation | +| expires_at | TIMESTAMPTZ | Date d'expiration du cache | + +## Diagramme des relations + +``` +duniter_identities + |-- 1:N --> sessions + |-- 1:N --> votes (voter_id) + |-- 1:N --> item_versions (proposed_by_id) + |-- 1:N --> decisions (created_by_id) + |-- 1:N --> mandates (mandatee_id) + +documents + |-- 1:N --> document_items + +document_items + |-- 1:N --> item_versions + |-- N:1 --> voting_protocols + +item_versions + |-- N:1 --> decisions + +decisions + |-- 1:N --> decision_steps + +decision_steps + |-- N:1 --> vote_sessions + +vote_sessions + |-- 1:N --> votes + |-- N:1 --> voting_protocols + +mandates + |-- 1:N --> mandate_steps + |-- N:1 --> decisions + +mandate_steps + |-- N:1 --> vote_sessions + +voting_protocols + |-- N:1 --> formula_configs + +formula_configs + |-- 1:N --> voting_protocols +``` diff --git a/docs/content/dev/5.formulas.md b/docs/content/dev/5.formulas.md new file mode 100644 index 0000000..bc962eb --- /dev/null +++ b/docs/content/dev/5.formulas.md @@ -0,0 +1,137 @@ +--- +title: Formules +description: Formules mathematiques de seuil WoT, criteres Smith et TechComm +--- + +# Formules de seuil + +Glibredecision utilise un systeme de formules mathematiques pour determiner les seuils d'adoption des votes. Le mecanisme central est la **formule d'inertie WoT** qui impose une quasi-unanimite en cas de faible participation et converge vers une majorite simple a participation elevee. + +## Formule principale -- Seuil WoT + +$$ +\text{Result} = C + B^W + \left( M + (1 - M) \cdot \left(1 - \left(\frac{T}{W}\right)^G \right) \right) \cdot \max(0,\; T - C) +$$ + +### Variables + +| Symbole | Parametre | Description | Defaut | +| ------- | ------------------- | ------------------------------------------------ | ------ | +| $C$ | `constant_base` | Base constante additive (plancher) | 0.0 | +| $B$ | `base_exponent` | Exposant de base. $B^W$ devient negligeable quand $W$ est grand ($0 < B < 1$) | 0.1 | +| $W$ | `wot_size` | Taille du corpus des votants eligibles (membres WoT) | -- | +| $T$ | `total_votes` | Nombre total de votes exprimes (pour + contre) | -- | +| $M$ | `majority_pct / 100`| Ratio de majorite. 0.5 = majorite simple a pleine participation | 50 | +| $G$ | `gradient_exponent` | Controle la vitesse de convergence de la super-majorite vers $M$ | 0.2 | + +### Mecanisme d'inertie + +Le coeur de la formule est le facteur d'inertie : + +$$ +\text{inertia} = M + (1 - M) \cdot \left(1 - \left(\frac{T}{W}\right)^G \right) +$$ + +- Quand la **participation est faible** ($T \ll W$) : le ratio $T/W$ est petit, $(T/W)^G$ est proche de 0, donc l'inertie tend vers $M + (1-M) = 1$. Il faut quasiment l'unanimite. +- Quand la **participation est elevee** ($T \to W$) : le ratio $T/W$ tend vers 1, $(T/W)^G$ tend vers 1, donc l'inertie tend vers $M$. La majorite simple suffit. + +### Exemple de reference + +Avec les parametres `M50 B.1 G.2` et le vote de l'Engagement Forgeron v2.0.0 : + +- $W = 7224$ (membres WoT) +- $T = 120$ (97 pour + 23 contre) +- Seuil calcule : $94$ +- Resultat : **adopte** (97 >= 94) + +## Critere Smith (Forgerons) + +$$ +\text{SmithThreshold} = \lceil \text{SmithWotSize}^S \rceil +$$ + +Le critere Smith exige un nombre minimum de votes favorables de la part des membres Smith (forgerons) pour que certaines decisions soient valides. + +| Symbole | Parametre | Description | Defaut | +| ------- | ---------------- | ---------------------------- | ------ | +| $S$ | `smith_exponent` | Exposant pour le critere Smith | null (desactive) | + +Avec un exposant de $S = 0.1$ et 20 forgerons : + +$$ +\lceil 20^{0.1} \rceil = \lceil 1.35 \rceil = 2 +$$ + +Au minimum 2 votes favorables de forgerons sont requis. + +## Critere TechComm (Comite Technique) + +$$ +\text{TechCommThreshold} = \lceil \text{CoTecSize}^T \rceil +$$ + +Le critere TechComm fonctionne de maniere identique au critere Smith mais pour les membres du Comite Technique. + +| Symbole | Parametre | Description | Defaut | +| ------- | ------------------- | ------------------------------- | ------ | +| $T$ | `techcomm_exponent` | Exposant pour le critere TechComm | null (desactive) | + +Avec un exposant de $T = 0.1$ et 5 membres TechComm : + +$$ +\lceil 5^{0.1} \rceil = \lceil 1.17 \rceil = 2 +$$ + +Au minimum 2 votes favorables de membres TechComm sont requis. + +## Resultat final + +Un vote est **adopte** si et seulement si les trois conditions sont remplies simultanement : + +1. `votes_for >= seuil_WoT` (formule principale) +2. `smith_votes_for >= seuil_Smith` (si critere Smith actif) +3. `techcomm_votes_for >= seuil_TechComm` (si critere TechComm actif) + +## Parametres de mode (mode_params) + +Les parametres de formule sont encodes dans une chaine compacte pour faciliter la lecture et le partage. Format : une lettre majuscule suivie d'une valeur numerique. + +| Code | Parametre | Type | Exemple | +| ---- | --------------------- | ----- | ------------ | +| D | `duration_days` | int | D30 = 30 jours | +| M | `majority_pct` | int | M50 = 50% | +| B | `base_exponent` | float | B.1 = 0.1 | +| G | `gradient_exponent` | float | G.2 = 0.2 | +| C | `constant_base` | float | C0 = 0.0 | +| S | `smith_exponent` | float | S.1 = 0.1 | +| T | `techcomm_exponent` | float | T.1 = 0.1 | +| N | `ratio_multiplier` | float | N1.5 = 1.5 | +| R | `is_ratio_mode` | bool | R1 = true | + +### Exemples + +- `"D30M50B.1G.2"` -- 30 jours, majorite 50%, base 0.1, gradient 0.2 +- `"D30M50B.1G.2S.1T.1"` -- Idem avec critere Smith (0.1) et TechComm (0.1) +- `"D60M66B.05G.3"` -- 60 jours, majorite 66%, base 0.05, gradient 0.3 + +## Vote nuance + +En plus du vote binaire (pour/contre), Glibredecision supporte un vote nuance a 6 niveaux : + +| Niveau | Label | +| ------ | ------------- | +| 0 | CONTRE | +| 1 | PAS DU TOUT | +| 2 | PAS D'ACCORD | +| 3 | NEUTRE | +| 4 | D'ACCORD | +| 5 | TOUT A FAIT | + +### Regle d'adoption (vote nuance) + +Un vote nuance est adopte si : + +1. Le nombre de votes aux niveaux 3, 4 et 5 (positifs) represente au moins `threshold_pct`% du total des votes. +2. Le nombre minimum de participants (`min_participants`) est atteint. + +Par defaut : `threshold_pct = 80%`, `min_participants = 59`. diff --git a/docs/content/dev/6.blockchain-integration.md b/docs/content/dev/6.blockchain-integration.md new file mode 100644 index 0000000..c324d4f --- /dev/null +++ b/docs/content/dev/6.blockchain-integration.md @@ -0,0 +1,165 @@ +--- +title: Integration blockchain +description: Integration Duniter V2, IPFS et ancrage on-chain +--- + +# Integration blockchain + +Glibredecision s'integre a la blockchain Duniter V2 pour trois fonctions essentielles : + +1. **Authentification** -- Verification de l'identite des membres via signature Ed25519 +2. **Donnees WoT** -- Recuperation des tailles WoT, Smith et TechComm pour le calcul des seuils +3. **Ancrage on-chain** -- Archivage immuable des resultats via `system.remark` + +## Duniter V2 RPC + +La communication avec le noeud Duniter V2 utilise la bibliotheque `substrate-interface` via WebSocket RPC. + +### Configuration + +``` +DUNITER_RPC_URL=wss://gdev.p2p.legal/ws +``` + +### Requetes principales + +#### Taille de la WoT (membres) + +```python +from substrateinterface import SubstrateInterface + +substrate = SubstrateInterface(url="wss://gdev.p2p.legal/ws") +result = substrate.query( + module="Membership", + storage_function="MembershipCount", +) +wot_size = int(result.value) +``` + +#### Taille Smith (forgerons) + +```python +result = substrate.query( + module="SmithMembers", + storage_function="SmithMembershipCount", +) +smith_size = int(result.value) +``` + +#### Taille TechComm + +```python +result = substrate.query( + module="TechnicalCommittee", + storage_function="Members", +) +techcomm_size = len(result.value) if result.value else 0 +``` + +### Cache blockchain + +Pour eviter des appels RPC repetes, les donnees blockchain sont mises en cache dans la table `blockchain_cache` avec une duree d'expiration configurable. La cle de cache est une chaine descriptive (ex: `"wot_size"`, `"smith_size"`), la valeur est stockee en JSONB. + +## IPFS (kubo) + +Le composant IPFS est un noeud kubo qui sert de stockage distribue pour le Sanctuaire. Chaque document adopte, resultat de vote ou decision finalisee est uploade sur IPFS. + +### Configuration + +``` +IPFS_API_URL=http://localhost:5001 +IPFS_GATEWAY_URL=http://localhost:8080 +``` + +### Upload de contenu + +```python +import httpx + +async with httpx.AsyncClient() as client: + response = await client.post( + f"{IPFS_API_URL}/api/v0/add", + files={"file": ("content.txt", content.encode("utf-8"))}, + ) + response.raise_for_status() + ipfs_cid = response.json()["Hash"] +``` + +### Acces au contenu + +Le contenu est accessible via la passerelle IPFS : + +``` +GET http://localhost:8080/ipfs/{cid} +``` + +## Ancrage on-chain (system.remark) + +L'ancrage on-chain consiste a soumettre un extrinsic `system.remark` contenant le hash SHA-256 du contenu archive. Cela cree une preuve immuable et horodatee sur la blockchain Duniter V2. + +### Format du remark + +``` +glibredecision:sanctuary:{content_hash_sha256} +``` + +### Soumission + +```python +from substrateinterface import SubstrateInterface, Keypair + +substrate = SubstrateInterface(url="wss://gdev.p2p.legal/ws") + +call = substrate.compose_call( + call_module="System", + call_function="remark", + call_params={"remark": f"glibredecision:sanctuary:{content_hash}"}, +) + +extrinsic = substrate.create_signed_extrinsic(call=call, keypair=keypair) +receipt = substrate.submit_extrinsic(extrinsic, wait_for_inclusion=True) + +tx_hash = receipt.extrinsic_hash +block_number = receipt.block_number +``` + +### Verification + +Pour verifier qu'un contenu a ete ancre, il suffit de : + +1. Recalculer le hash SHA-256 du contenu +2. Rechercher le remark correspondant dans la blockchain +3. Verifier que le hash correspond + +## Flux complet du Sanctuaire + +``` +Contenu adopte + | + v +[SHA-256] --> content_hash + | + +---> [IPFS /api/v0/add] --> ipfs_cid + | + +---> [system.remark] --> tx_hash, block_number + | + v +[sanctuary_entries] -- Enregistrement en base avec content_hash, ipfs_cid, chain_tx_hash, chain_block +``` + +## Authentification Ed25519 + +Le flux d'authentification utilise un mecanisme challenge-response : + +1. Le serveur genere un challenge aleatoire (64 caracteres hexadecimaux) +2. Le client signe le challenge avec sa cle privee Ed25519 (Duniter V2) +3. Le serveur verifie la signature a l'aide de la cle publique derivee de l'adresse SS58 + +```python +from substrateinterface import Keypair + +keypair = Keypair(ss58_address=address) +is_valid = keypair.verify(challenge_bytes, signature_bytes) +``` + +Cette methode garantit que seul le proprietaire de l'adresse Duniter peut s'authentifier, sans jamais transmettre la cle privee. diff --git a/docs/content/dev/7.contributing.md b/docs/content/dev/7.contributing.md new file mode 100644 index 0000000..90f890f --- /dev/null +++ b/docs/content/dev/7.contributing.md @@ -0,0 +1,145 @@ +--- +title: Contribution +description: Guide de contribution au projet Glibredecision +--- + +# Guide de contribution + +Merci de votre interet pour contribuer a Glibredecision. Ce guide explique comment configurer l'environnement de developpement, les conventions a respecter et le processus de contribution. + +## Prerequis + +- Python 3.11+ +- Node.js 20+ +- PostgreSQL 16 +- Docker et Docker Compose (optionnel mais recommande) +- Git + +## Installation locale + +### Methode 1 : Docker (recommandee) + +```bash +# Cloner le depot +git clone https://git.duniter.org/tools/glibredecision.git +cd glibredecision + +# Copier le fichier d'environnement +cp .env.example .env + +# Demarrer tous les services +docker compose -f docker/docker-compose.yml -f docker/docker-compose.dev.yml up +``` + +Les services sont accessibles sur : +- Frontend : http://localhost:3002 +- Backend : http://localhost:8002 +- API docs : http://localhost:8002/docs + +### Methode 2 : Installation manuelle + +```bash +# Backend +cd backend +python -m venv venv +source venv/bin/activate +pip install -r requirements.txt +uvicorn app.main:app --port 8002 --reload + +# Frontend (dans un autre terminal) +cd frontend +npm install +npm run dev +``` + +Assurez-vous qu'une instance PostgreSQL est disponible et que `DATABASE_URL` dans `.env` pointe vers celle-ci. + +## Conventions + +### Langues + +- **Code** (variables, commentaires, docstrings) : anglais +- **Interface utilisateur** (labels, messages, documentation) : francais + +### Structure du code + +Le backend est organise par domaine : + +``` +backend/app/ + models/ # Modeles SQLAlchemy (un fichier par domaine) + schemas/ # Schemas Pydantic v2 (un fichier par domaine) + routers/ # Routes FastAPI (un fichier par domaine) + services/ # Logique metier (un fichier par domaine) + engine/ # Moteur de calcul (formules, seuils) + tests/ # Tests unitaires +``` + +Le frontend suit les conventions Nuxt 4 : + +``` +frontend/app/ + components/ # Composants Vue (un dossier par domaine) + composables/ # Composables reutilisables + pages/ # Pages (un dossier par domaine) + stores/ # Stores Pinia + utils/ # Utilitaires +``` + +### Style de code + +- **Python** : PEP 8, type hints systematiques, docstrings au format NumPy +- **TypeScript/Vue** : ESLint + Prettier (via configuration Nuxt) +- **SQL** : noms de tables au pluriel, noms de colonnes en snake_case + +### API + +- Versionne sous `/api/v1/` +- Schemas Pydantic v2 pour la validation +- Async partout (SQLAlchemy AsyncSession, FastAPI async handlers) +- Codes HTTP standards (201 pour creation, 204 pour suppression, 404, 409, etc.) + +## Tests + +### Backend + +```bash +cd backend +pytest app/tests/ -v +``` + +Les tests du moteur de calcul (`test_threshold.py`) verifient la formule de seuil WoT avec le cas de reference (Engagement Forgeron v2.0.0 : 97/23 avec WoT 7224). + +### Frontend + +```bash +cd frontend +npm run build # Verification que le build passe +``` + +## Processus de contribution + +1. Creer une branche a partir de `main` : `git checkout -b feature/ma-fonctionnalite` +2. Developper et tester localement +3. S'assurer que les tests passent : `pytest` (backend) et `npm run build` (frontend) +4. Pousser la branche et creer une merge request +5. La pipeline CI (Woodpecker) validera automatiquement les tests +6. Revue de code par un mainteneur +7. Merge dans `main` + +## Secrets Woodpecker CI + +La pipeline CI utilise les secrets suivants (a configurer dans l'interface Woodpecker) : + +| Secret | Description | +| ----------------- | ------------------------------------ | +| `docker_registry` | URL du registre Docker | +| `docker_username` | Nom d'utilisateur du registre | +| `docker_password` | Mot de passe du registre | +| `deploy_host` | Adresse du serveur de deploiement | +| `deploy_username` | Utilisateur SSH du serveur | +| `deploy_key` | Cle privee SSH pour le deploiement | + +## Contact + +Pour toute question, rendez-vous sur le forum Duniter ou ouvrez une issue sur le depot Git. diff --git a/docs/content/user/1.index.md b/docs/content/user/1.index.md new file mode 100644 index 0000000..720095e --- /dev/null +++ b/docs/content/user/1.index.md @@ -0,0 +1,28 @@ +--- +title: Documentation utilisateur +description: Guide d'utilisation de la plateforme Glibredecision +--- + +# Documentation utilisateur + +Bienvenue dans la documentation utilisateur de Glibredecision, la plateforme de decisions collectives pour la communaute Duniter/G1. + +## Qu'est-ce que Glibredecision ? + +Glibredecision est une plateforme de gouvernance decentralisee qui permet aux membres de la Toile de Confiance (WoT) Duniter de : + +- Gerer des **documents de reference** modulaires (Licence G1, Engagements Forgeron, etc.) sous vote permanent +- Prendre des **decisions collectives** via des processus multi-etapes +- **Voter** avec un systeme de seuil adaptatif base sur la participation +- Attribuer des **mandats** a des membres de la communaute +- **Archiver de maniere immuable** les decisions adoptees via IPFS et la blockchain Duniter + +## Sections + +- [Premiers pas](/user/getting-started) -- Connexion et prise en main +- [Documents](/user/documents) -- Consulter et proposer des modifications aux documents de reference +- [Decisions](/user/decisions) -- Comprendre et participer aux processus decisionnels +- [Vote](/user/voting) -- Comment voter et comprendre les resultats +- [Mandats](/user/mandates) -- Mandats et responsabilites +- [Sanctuaire](/user/sanctuary) -- Archivage immuable et verification +- [FAQ](/user/faq) -- Questions frequentes diff --git a/docs/content/user/2.getting-started.md b/docs/content/user/2.getting-started.md new file mode 100644 index 0000000..f247e97 --- /dev/null +++ b/docs/content/user/2.getting-started.md @@ -0,0 +1,51 @@ +--- +title: Premiers pas +description: Connexion et prise en main de Glibredecision +--- + +# Premiers pas + +## Qui peut utiliser Glibredecision ? + +Glibredecision est ouvert a tous les membres de la Toile de Confiance (WoT) Duniter V2. Pour utiliser pleinement la plateforme, vous devez posseder une identite Duniter avec une adresse SS58 valide. + +- **Consultation** : tout visiteur peut consulter les documents, decisions et resultats de vote. +- **Participation** (voter, proposer) : reservee aux membres authentifies via leur identite Duniter. + +## Connexion + +La connexion utilise votre identite Duniter sans jamais transmettre votre cle privee : + +1. Cliquez sur **Se connecter** dans la barre de navigation. +2. Saisissez votre **adresse Duniter** (format SS58, par exemple `5GrwvaEF...`). +3. La plateforme vous envoie un **challenge** (texte aleatoire a signer). +4. Signez le challenge avec votre cle privee Ed25519 (via votre portefeuille Duniter ou Cesium). +5. Soumettez la signature. La plateforme verifie que vous etes bien le proprietaire de l'adresse. +6. Vous etes connecte. Un jeton de session est stocke localement (valable 24h). + +## Navigation + +L'interface est organisee autour de cinq sections principales : + +| Section | Description | +| ------------ | ---------------------------------------------------- | +| Documents | Documents de reference de la communaute | +| Decisions | Processus decisionnels en cours et archives | +| Votes | Sessions de vote actives et resultats | +| Mandats | Mandats attribues aux membres | +| Sanctuaire | Archives immuables (IPFS + blockchain) | + +## Votre profil + +Apres connexion, votre profil affiche : + +- Votre **adresse Duniter** SS58 +- Votre **nom d'affichage** (si configure) +- Votre **statut WoT** : membre, en attente, revoque +- Vos **roles** : membre Smith (forgeron) et/ou membre du Comite Technique + +Ces informations sont synchronisees depuis la blockchain Duniter V2 et determinent vos droits de vote. + +## Deconnexion + +Cliquez sur votre profil puis **Se deconnecter**. La session est invalidee cote serveur et le jeton local est supprime. diff --git a/docs/content/user/3.documents.md b/docs/content/user/3.documents.md new file mode 100644 index 0000000..0d14bea --- /dev/null +++ b/docs/content/user/3.documents.md @@ -0,0 +1,57 @@ +--- +title: Documents +description: Guide des documents de reference sur Glibredecision +--- + +# Documents de reference + +## Principe + +Les documents de reference sont les textes fondateurs de la communaute Duniter/G1. Ils sont **modulaires** : chaque document est compose d'items individuels (clauses, regles, verifications, preambules, sections) qui peuvent etre modifies independamment par proposition et vote. + +## Types de documents + +| Type | Description | Exemples | +| -------------- | --------------------------------------------------- | -------------------------------- | +| Licence | Licence monetaire definissant les regles de la monnaie | Licence G1 | +| Engagement | Engagement des membres a respecter des regles | Engagement Forgeron v2.0.0 | +| Reglement | Reglement interieur d'un organe | Reglement du Comite Technique | +| Constitution | Texte constitutif fondamental | -- | + +## Consulter un document + +1. Rendez-vous dans la section **Documents**. +2. Utilisez les filtres (type, statut) pour trouver le document souhaite. +3. Cliquez sur le document pour voir la liste de ses items. +4. Chaque item affiche son texte courant, son type et sa position dans le document. + +## Proposer une modification + +Tout membre authentifie peut proposer une modification a un item de document : + +1. Ouvrez le document et selectionnez l'item a modifier. +2. Cliquez sur **Proposer une modification**. +3. Redigez le **nouveau texte propose**. +4. Ajoutez une **justification** expliquant pourquoi cette modification est necessaire. +5. Soumettez. Un diff automatique est genere entre le texte courant et votre proposition. + +La proposition passe ensuite par un processus de decision (examen, vote) avant d'etre acceptee ou rejetee. + +## Cycle de vie d'une proposition + +``` +Proposee --> En vote --> Acceptee --> Texte courant mis a jour + --> Rejetee --> Archivee +``` + +## Statuts des documents + +| Statut | Description | +| -------- | ------------------------------------------------ | +| Brouillon | En cours de redaction, non soumis au vote | +| Actif | Document en vigueur, sous vote permanent | +| Archive | Document archive, plus en vigueur | + +## Versionnage + +Chaque document possede un numero de version semantique (ex: `2.0.0`). Chaque modification adoptee peut entrainer une mise a jour de version selon l'importance du changement. diff --git a/docs/content/user/4.decisions.md b/docs/content/user/4.decisions.md new file mode 100644 index 0000000..858f007 --- /dev/null +++ b/docs/content/user/4.decisions.md @@ -0,0 +1,61 @@ +--- +title: Decisions +description: Guide des processus decisionnels sur Glibredecision +--- + +# Decisions + +## Principe + +Une decision est un processus structure qui conduit a un choix collectif. Chaque decision suit un ensemble d'etapes definies, de la qualification a l'execution. + +## Types de decisions + +| Type | Description | +| ------------------ | ------------------------------------------------------ | +| Document change | Modification d'un item de document de reference | +| Runtime upgrade | Mise a jour du runtime de la blockchain Duniter | +| Mandate vote | Vote pour l'attribution d'un mandat | +| Custom | Decision personnalisee | + +## Etapes d'une decision + +Une decision progresse a travers les etapes suivantes : + +| Etape | Description | +| --------------- | ---------------------------------------------------------------- | +| Qualification | Verification que la proposition est recevable | +| Examen (review) | Periode d'examen et de discussion par la communaute | +| Vote | Session de vote formelle avec seuil de validation | +| Execution | Mise en oeuvre de la decision adoptee | +| Rapport | Compte-rendu de l'execution et archivage des resultats | + +Certaines etapes peuvent etre sautees selon le type de decision. + +## Cycle de vie + +``` +Brouillon --> Qualification --> Examen --> Vote --> Executee --> Cloturee + --> Rejetee +``` + +## Suivre une decision + +1. Rendez-vous dans la section **Decisions**. +2. Filtrez par type ou statut pour trouver la decision qui vous interesse. +3. La page de detail affiche toutes les etapes avec leur statut. +4. Si une etape de vote est active, vous pouvez voter directement depuis la page de decision. + +## Creer une decision + +Les membres authentifies peuvent creer une decision : + +1. Cliquez sur **Nouvelle decision**. +2. Renseignez le titre, la description, le contexte et le type. +3. Selectionnez un **protocole de vote** qui definit les parametres de la formule de seuil. +4. Ajoutez les etapes necessaires. +5. Soumettez. La decision passe en statut "brouillon" jusqu'a ce que la premiere etape soit lancee. + +## Lien avec les documents + +Quand une decision de type "document change" est adoptee, la modification proposee est automatiquement appliquee a l'item du document concerne. L'ancienne version est conservee dans l'historique. diff --git a/docs/content/user/5.voting.md b/docs/content/user/5.voting.md new file mode 100644 index 0000000..2aebfb5 --- /dev/null +++ b/docs/content/user/5.voting.md @@ -0,0 +1,90 @@ +--- +title: Vote +description: Guide du systeme de vote sur Glibredecision +--- + +# Vote + +## Principe + +Le systeme de vote de Glibredecision est concu pour adapter le seuil d'adoption a la participation reelle. Quand peu de membres votent, une quasi-unanimite est exigee. Quand la participation est elevee, une majorite simple suffit. Ce mecanisme d'**inertie** protege contre les decisions prises par un petit groupe. + +## Types de vote + +### Vote binaire + +Chaque votant choisit **Pour** ou **Contre**. Le seuil est calcule par la formule WoT. + +### Vote nuance + +Chaque votant exprime son opinion sur une echelle a 6 niveaux : + +| Niveau | Label | Comptage | +| ------ | ------------- | --------------- | +| 0 | CONTRE | Negatif | +| 1 | PAS DU TOUT | Negatif | +| 2 | PAS D'ACCORD | Negatif | +| 3 | NEUTRE | Positif | +| 4 | D'ACCORD | Positif | +| 5 | TOUT A FAIT | Positif | + +Le vote est adopte si les niveaux positifs (3, 4, 5) representent au moins 80% des votes et qu'un nombre minimum de participants est atteint. + +## Comment voter + +1. Rendez-vous sur une session de vote ouverte (via la section **Votes** ou la page d'une decision). +2. Choisissez votre vote (pour/contre en binaire, ou un niveau en nuance). +3. Ajoutez un commentaire optionnel pour expliquer votre choix. +4. **Signez votre vote** : la plateforme vous demande de signer un payload avec votre cle privee Ed25519. +5. Soumettez. Votre vote est enregistre avec la signature cryptographique. + +### Modifier son vote + +Vous pouvez modifier votre vote tant que la session est ouverte. L'ancien vote est desactive (conserve pour l'audit) et remplace par le nouveau. + +## Comprendre les resultats + +La page de resultat affiche : + +| Information | Description | +| ------------------- | ---------------------------------------------------- | +| Votes pour | Nombre de votes favorables | +| Votes contre | Nombre de votes defavorables | +| Total | Nombre total de votes exprimes | +| Taille WoT | Nombre de membres WoT eligibles (snapshot au debut) | +| Seuil requis | Seuil calcule par la formule d'inertie | +| Critere Smith | Seuil et validation des votes Smith (si applicable) | +| Critere TechComm | Seuil et validation des votes TechComm (si applicable) | +| Resultat | **Adopte** ou **Rejete** | + +### Exemple concret + +Pour le vote de l'Engagement Forgeron v2.0.0 : + +- Taille WoT : 7224 membres +- 97 votes pour, 23 votes contre (120 total) +- Seuil calcule : 94 (avec les parametres M50 B.1 G.2) +- Resultat : **Adopte** (97 >= 94) + +La faible participation (120 sur 7224 = 1.7%) a rendu le seuil exigeant (94 pour sur 120 = 78%), bien au-dessus de la majorite simple de 50%. + +## Preuve cryptographique + +Chaque vote est accompagne d'une signature Ed25519 qui garantit : + +- **Authenticite** : seul le proprietaire de l'adresse Duniter peut voter en son nom +- **Integrite** : le vote ne peut pas etre modifie apres soumission +- **Non-repudiation** : le votant ne peut pas nier avoir vote + +Les votes signes peuvent etre verifies independamment par quiconque possede la cle publique du votant. + +## Protocoles de vote + +Chaque session de vote est liee a un **protocole de vote** qui definit : + +- Le type de vote (binaire ou nuance) +- La duree du vote (en jours) +- Les parametres de la formule de seuil (majorite, exposants, etc.) +- Les criteres Smith et TechComm eventuels + +Les protocoles sont reutilisables et peuvent eux-memes etre soumis a meta-gouvernance. diff --git a/docs/content/user/6.mandates.md b/docs/content/user/6.mandates.md new file mode 100644 index 0000000..a15f4fc --- /dev/null +++ b/docs/content/user/6.mandates.md @@ -0,0 +1,57 @@ +--- +title: Mandats +description: Guide des mandats sur Glibredecision +--- + +# Mandats + +## Principe + +Un mandat est une responsabilite attribuee a un membre de la communaute pour une duree determinee, apres validation par vote collectif. Les mandats permettent de formaliser les roles au sein de la gouvernance Duniter. + +## Types de mandats + +| Type | Description | +| --------- | ------------------------------------------------------- | +| TechComm | Mandat de membre du Comite Technique | +| Smith | Mandat lie au role de forgeron (Smith) | +| Custom | Mandat personnalise pour tout autre role | + +## Cycle de vie d'un mandat + +Un mandat progresse a travers les etapes suivantes : + +``` +Brouillon --> Candidature --> Vote --> Actif --> Rapport --> Termine + --> Revoque +``` + +| Etape | Description | +| ------------ | ------------------------------------------------------------ | +| Formulation | Definition du mandat, de ses objectifs et de sa duree | +| Candidature | Periode de depot des candidatures | +| Vote | Vote collectif pour designer le mandataire | +| Assignation | Attribution du mandat au candidat elu | +| Rapport | Periode de reporting sur l'execution du mandat | +| Completion | Fin normale du mandat a echeance | +| Revocation | Fin anticipee du mandat (en cas de manquement) | + +## Consulter les mandats + +1. Rendez-vous dans la section **Mandats**. +2. Filtrez par type (techcomm, smith, custom) ou statut. +3. Chaque mandat affiche le titulaire, les dates et les etapes. + +## Creer un mandat + +Les membres authentifies peuvent proposer un nouveau mandat : + +1. Cliquez sur **Nouveau mandat**. +2. Renseignez le titre, la description et le type. +3. Definissez les dates de debut et de fin. +4. Ajoutez les etapes du processus. +5. Le mandat passe en phase de candidature puis de vote. + +## Suppression + +Seuls les mandats au statut "brouillon" peuvent etre supprimes. Une fois le processus de candidature lance, le mandat reste dans le systeme pour tracabilite. diff --git a/docs/content/user/7.sanctuary.md b/docs/content/user/7.sanctuary.md new file mode 100644 index 0000000..c49512f --- /dev/null +++ b/docs/content/user/7.sanctuary.md @@ -0,0 +1,61 @@ +--- +title: Sanctuaire +description: Guide de l'archivage immuable sur Glibredecision +--- + +# Sanctuaire + +## Principe + +Le Sanctuaire est la couche d'archivage immuable de Glibredecision. Chaque document adopte, resultat de vote ou decision finalisee est archive de maniere permanente grace a trois mecanismes : + +1. **Hash SHA-256** du contenu pour garantir l'integrite +2. **Stockage IPFS** pour la distribution decentralisee +3. **Ancrage on-chain** via `system.remark` sur la blockchain Duniter V2 + +## Pourquoi le Sanctuaire ? + +La gouvernance exige la transparence et la tracabilite. Le Sanctuaire garantit que : + +- Aucune decision adoptee ne peut etre modifiee retroactivement +- Tout membre peut verifier l'authenticite d'un document ou d'un resultat de vote +- L'historique des decisions est preservee independamment de la plateforme + +## Types d'entrees + +| Type | Description | +| ------------ | ------------------------------------------------ | +| Document | Version adoptee d'un document de reference | +| Decision | Decision finalisee avec son resultat | +| Vote result | Resultat detaille d'une session de vote | + +## Consulter le Sanctuaire + +1. Rendez-vous dans la section **Sanctuaire**. +2. Filtrez par type d'entree si necessaire. +3. Chaque entree affiche : + - Le titre + - Le hash SHA-256 du contenu + - Le CID IPFS (lien vers le contenu sur IPFS) + - Le hash de la transaction on-chain + - Le numero de bloc + - La date d'archivage + +## Verification d'integrite + +Pour verifier qu'une entree du Sanctuaire est authentique : + +1. Recuperez le contenu via IPFS en utilisant le CID affiche. +2. Calculez le hash SHA-256 du contenu telecharge. +3. Comparez avec le hash enregistre dans le Sanctuaire. +4. Verifiez que le meme hash est present dans le remark on-chain (via un explorateur blockchain). + +Si les trois hash correspondent, le contenu est authentique et n'a pas ete modifie. + +## Automatisation + +L'archivage dans le Sanctuaire est declenche automatiquement lorsqu'un processus decisionnel est finalise : + +- Quand une version d'item de document est **acceptee**, le nouveau texte est archive. +- Quand une session de vote est **cloturee**, le resultat detaille est archive. +- Quand une decision est **executee**, l'ensemble de la decision est archive. diff --git a/docs/content/user/8.faq.md b/docs/content/user/8.faq.md new file mode 100644 index 0000000..70123ad --- /dev/null +++ b/docs/content/user/8.faq.md @@ -0,0 +1,80 @@ +--- +title: FAQ +description: Questions frequentes sur Glibredecision +--- + +# Questions frequentes + +## Acces et authentification + +### Ai-je besoin d'un compte Duniter pour utiliser Glibredecision ? + +Pour **consulter** les documents, decisions et resultats de vote, aucune authentification n'est necessaire. Pour **voter**, **proposer des modifications** ou **creer des decisions**, vous devez posseder une identite Duniter V2 avec une adresse SS58. + +### Comment fonctionne la connexion sans mot de passe ? + +Glibredecision utilise un systeme challenge-response base sur la cryptographie Ed25519. Vous signez un texte aleatoire avec votre cle privee, et la plateforme verifie la signature avec votre cle publique. Votre cle privee n'est jamais transmise. + +### Ma session a expire, que faire ? + +Les sessions durent 24 heures. Reconnectez-vous en suivant le meme processus (challenge + signature). Vos votes et propositions precedents ne sont pas affectes. + +## Vote + +### Pourquoi le seuil est-il si eleve quand peu de personnes votent ? + +C'est le mecanisme d'**inertie**. Quand la participation est faible, le seuil est eleve pour empecher qu'un petit groupe prenne des decisions engageant toute la communaute. A mesure que la participation augmente, le seuil converge vers la majorite simple. Cela incite a la participation large. + +### Puis-je changer mon vote ? + +Oui, tant que la session de vote est ouverte, vous pouvez modifier votre vote. L'ancien vote est conserve en base de donnees pour l'audit mais marque comme inactif. Seul le dernier vote est pris en compte dans le decompte. + +### Qu'est-ce que le critere Smith ? + +Certaines decisions exigent un nombre minimum de votes favorables de la part des **forgerons** (membres Smith de la WoT). Cela garantit que les decisions techniques sont soutenues par ceux qui maintiennent le reseau. + +### Qu'est-ce que le critere TechComm ? + +De maniere similaire, certaines decisions exigent un nombre minimum de votes favorables du **Comite Technique**. Cela concerne les decisions qui affectent le runtime ou l'infrastructure technique. + +### Comment fonctionnent les votes nuances ? + +Au lieu de "pour" ou "contre", vous choisissez un niveau de 0 (CONTRE) a 5 (TOUT A FAIT). Les niveaux 3, 4 et 5 comptent comme positifs. Pour que le vote soit adopte, il faut que les votes positifs representent au moins 80% du total et qu'un nombre minimum de participants soit atteint. + +## Documents + +### Qu'est-ce qu'un document de reference ? + +Un document de reference est un texte fondateur de la communaute Duniter (Licence G1, Engagement Forgeron, Reglement du Comite Technique, etc.). Il est compose d'items modulaires sous vote permanent. + +### Comment proposer une modification ? + +Ouvrez le document, selectionnez l'item a modifier, cliquez sur "Proposer une modification", redigez le nouveau texte avec une justification, puis soumettez. La proposition sera soumise a un processus de decision et de vote. + +### Que signifie "vote permanent" ? + +Les documents actifs sont toujours ouverts aux propositions de modification. Il n'y a pas de periode speciale pour proposer des changements. Cela permet une evolution continue et organique des textes. + +## Sanctuaire + +### Pourquoi archiver sur IPFS et la blockchain ? + +IPFS fournit un stockage distribue : le contenu est accessible meme si la plateforme Glibredecision est hors ligne. L'ancrage on-chain via `system.remark` cree une preuve horodatee immuable sur la blockchain Duniter. Ensemble, ils garantissent que les decisions de la communaute sont preservees de maniere permanente et verifiable. + +### Comment verifier qu'un document n'a pas ete modifie ? + +Telechargez le document depuis IPFS via son CID, calculez le hash SHA-256, puis comparez-le au hash enregistre dans le Sanctuaire et au remark on-chain. Si les trois correspondent, le document est intact. + +## Technique + +### Sur quelle blockchain Glibredecision fonctionne-t-il ? + +Glibredecision se connecte a la blockchain **Duniter V2** (basee sur Substrate). En environnement de developpement, il se connecte au reseau de test GDev (`wss://gdev.p2p.legal/ws`). + +### Les donnees de vote sont-elles publiques ? + +Oui. Les votes et leurs signatures cryptographiques sont publics, conformement au principe de transparence de la gouvernance. Chaque vote peut etre verifie independamment. + +### Ou est heberge Glibredecision ? + +La plateforme est hebergee sur une infrastructure geree par la communaute, avec deploiement automatise via Docker et Woodpecker CI. Le code source est ouvert et disponible sur le depot Git Duniter. diff --git a/frontend/app/app.vue b/frontend/app/app.vue new file mode 100644 index 0000000..f4215a0 --- /dev/null +++ b/frontend/app/app.vue @@ -0,0 +1,128 @@ + + + diff --git a/frontend/app/composables/useApi.ts b/frontend/app/composables/useApi.ts new file mode 100644 index 0000000..1a2ddf3 --- /dev/null +++ b/frontend/app/composables/useApi.ts @@ -0,0 +1,31 @@ +/** + * Composable for making authenticated API calls to the Glibredecision backend. + * + * Uses the runtime config `apiBase` and automatically injects the Bearer token + * from the auth store when available. + */ +export function useApi() { + const config = useRuntimeConfig() + const auth = useAuthStore() + + /** + * Perform a typed fetch against the backend API. + * + * @param path - API path relative to apiBase, e.g. "/documents" + * @param options - $fetch options (method, body, query, headers, etc.) + * @returns Typed response + */ + async function $api(path: string, options: Record = {}): Promise { + const headers: Record = {} + if (auth.token) { + headers.Authorization = `Bearer ${auth.token}` + } + + return await $fetch(`${config.public.apiBase}${path}`, { + ...options, + headers: { ...headers, ...options.headers }, + }) + } + + return { $api } +} diff --git a/frontend/app/pages/decisions/[id].vue b/frontend/app/pages/decisions/[id].vue new file mode 100644 index 0000000..bc8d4f4 --- /dev/null +++ b/frontend/app/pages/decisions/[id].vue @@ -0,0 +1,271 @@ + + + diff --git a/frontend/app/pages/decisions/index.vue b/frontend/app/pages/decisions/index.vue new file mode 100644 index 0000000..b58f75d --- /dev/null +++ b/frontend/app/pages/decisions/index.vue @@ -0,0 +1,190 @@ + + + diff --git a/frontend/app/pages/documents/[slug].vue b/frontend/app/pages/documents/[slug].vue new file mode 100644 index 0000000..b4c95be --- /dev/null +++ b/frontend/app/pages/documents/[slug].vue @@ -0,0 +1,230 @@ + + + diff --git a/frontend/app/pages/documents/index.vue b/frontend/app/pages/documents/index.vue new file mode 100644 index 0000000..df9df36 --- /dev/null +++ b/frontend/app/pages/documents/index.vue @@ -0,0 +1,191 @@ + + + diff --git a/frontend/app/pages/index.vue b/frontend/app/pages/index.vue new file mode 100644 index 0000000..fedf992 --- /dev/null +++ b/frontend/app/pages/index.vue @@ -0,0 +1,170 @@ + + + diff --git a/frontend/app/pages/login.vue b/frontend/app/pages/login.vue new file mode 100644 index 0000000..8454361 --- /dev/null +++ b/frontend/app/pages/login.vue @@ -0,0 +1,177 @@ + + + diff --git a/frontend/app/pages/mandates/index.vue b/frontend/app/pages/mandates/index.vue new file mode 100644 index 0000000..9878fc6 --- /dev/null +++ b/frontend/app/pages/mandates/index.vue @@ -0,0 +1,222 @@ + + + diff --git a/frontend/app/pages/protocols/index.vue b/frontend/app/pages/protocols/index.vue new file mode 100644 index 0000000..b484d26 --- /dev/null +++ b/frontend/app/pages/protocols/index.vue @@ -0,0 +1,257 @@ + + + diff --git a/frontend/app/pages/sanctuary/index.vue b/frontend/app/pages/sanctuary/index.vue new file mode 100644 index 0000000..a33a35b --- /dev/null +++ b/frontend/app/pages/sanctuary/index.vue @@ -0,0 +1,268 @@ + + + diff --git a/frontend/app/stores/auth.ts b/frontend/app/stores/auth.ts new file mode 100644 index 0000000..8065597 --- /dev/null +++ b/frontend/app/stores/auth.ts @@ -0,0 +1,180 @@ +/** + * Auth store: manages Duniter Ed25519 challenge-response authentication. + * + * Persists the session token in localStorage for SPA rehydration. + * The identity object mirrors the backend IdentityOut schema. + */ + +export interface DuniterIdentity { + id: string + address: string + display_name: string | null + wot_status: string + is_smith: boolean + is_techcomm: boolean +} + +interface AuthState { + token: string | null + identity: DuniterIdentity | null + loading: boolean + error: string | null +} + +export const useAuthStore = defineStore('auth', { + state: (): AuthState => ({ + token: null, + identity: null, + loading: false, + error: null, + }), + + getters: { + isAuthenticated: (state): boolean => !!state.token && !!state.identity, + isSmith: (state): boolean => state.identity?.is_smith ?? false, + isTechComm: (state): boolean => state.identity?.is_techcomm ?? false, + displayName: (state): string => { + if (!state.identity) return '' + return state.identity.display_name || state.identity.address.slice(0, 12) + '...' + }, + }, + + actions: { + /** + * Initiate the challenge-response login flow. + * + * Steps: + * 1. POST /auth/challenge with the Duniter SS58 address + * 2. Client signs the challenge with Ed25519 private key + * 3. POST /auth/verify with address + signature + challenge + * 4. Store the returned token and identity + */ + async login(address: string, signFn?: (challenge: string) => Promise) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + + // Step 1: Request challenge + const challengeRes = await $api<{ challenge: string; expires_at: string }>( + '/auth/challenge', + { + method: 'POST', + body: { address }, + }, + ) + + // Step 2: Sign the challenge + // In production, signFn would use the Duniter keypair to produce an Ed25519 signature. + // For development, we use a placeholder signature. + let signature: string + if (signFn) { + signature = await signFn(challengeRes.challenge) + } else { + // Development placeholder -- backend currently accepts any signature + signature = 'dev_signature_placeholder' + } + + // Step 3: Verify and get token + const verifyRes = await $api<{ token: string; identity: DuniterIdentity }>( + '/auth/verify', + { + method: 'POST', + body: { + address, + signature, + challenge: challengeRes.challenge, + }, + }, + ) + + // Step 4: Store credentials + this.token = verifyRes.token + this.identity = verifyRes.identity + this._persistToken() + + return verifyRes + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Erreur de connexion' + throw err + } finally { + this.loading = false + } + }, + + /** + * Fetch the currently authenticated identity from the backend. + * Used on app init to validate a persisted token. + */ + async fetchMe() { + if (!this.token) return + + this.loading = true + this.error = null + + try { + const { $api } = useApi() + const identity = await $api('/auth/me') + this.identity = identity + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Session invalide' + this.token = null + this.identity = null + this._clearToken() + throw err + } finally { + this.loading = false + } + }, + + /** + * Log out: invalidate session on server and clear local state. + */ + async logout() { + try { + if (this.token) { + const { $api } = useApi() + await $api('/auth/logout', { method: 'POST' }) + } + } catch { + // Ignore errors during logout -- clear local state regardless + } finally { + this.token = null + this.identity = null + this.error = null + this._clearToken() + navigateTo('/login') + } + }, + + /** + * Hydrate the token from localStorage on app init. + */ + hydrateFromStorage() { + if (import.meta.client) { + const stored = localStorage.getItem('glibredecision_token') + if (stored) { + this.token = stored + } + } + }, + + /** @internal Persist token to localStorage */ + _persistToken() { + if (import.meta.client && this.token) { + localStorage.setItem('glibredecision_token', this.token) + } + }, + + /** @internal Clear token from localStorage */ + _clearToken() { + if (import.meta.client) { + localStorage.removeItem('glibredecision_token') + } + }, + }, +}) + +// Note: hydration from localStorage happens in app.vue onMounted +// via auth.hydrateFromStorage() before calling auth.fetchMe(). diff --git a/frontend/app/stores/decisions.ts b/frontend/app/stores/decisions.ts new file mode 100644 index 0000000..1748b65 --- /dev/null +++ b/frontend/app/stores/decisions.ts @@ -0,0 +1,138 @@ +/** + * Decisions store: decision processes and their steps. + * + * Maps to the backend /api/v1/decisions endpoints. + */ + +export interface DecisionStep { + id: string + decision_id: string + step_order: number + step_type: string + title: string | null + description: string | null + status: string + vote_session_id: string | null + outcome: string | null + created_at: string +} + +export interface Decision { + id: string + title: string + description: string | null + context: string | null + decision_type: string + status: string + voting_protocol_id: string | null + created_by_id: string | null + created_at: string + updated_at: string + steps: DecisionStep[] +} + +export interface DecisionCreate { + title: string + description?: string | null + context?: string | null + decision_type: string + voting_protocol_id?: string | null +} + +interface DecisionsState { + list: Decision[] + current: Decision | null + loading: boolean + error: string | null +} + +export const useDecisionsStore = defineStore('decisions', { + state: (): DecisionsState => ({ + list: [], + current: null, + loading: false, + error: null, + }), + + getters: { + byStatus: (state) => { + return (status: string) => state.list.filter(d => d.status === status) + }, + activeDecisions: (state): Decision[] => { + return state.list.filter(d => d.status === 'active' || d.status === 'in_progress') + }, + completedDecisions: (state): Decision[] => { + return state.list.filter(d => d.status === 'completed' || d.status === 'closed') + }, + }, + + actions: { + /** + * Fetch all decisions with optional filters. + */ + async fetchAll(params?: { decision_type?: string; status?: string }) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + const query: Record = {} + if (params?.decision_type) query.decision_type = params.decision_type + if (params?.status) query.status = params.status + + this.list = await $api('/decisions/', { query }) + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Erreur lors du chargement des decisions' + } finally { + this.loading = false + } + }, + + /** + * Fetch a single decision by ID with all its steps. + */ + async fetchById(id: string) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + this.current = await $api(`/decisions/${id}`) + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Decision introuvable' + } finally { + this.loading = false + } + }, + + /** + * Create a new decision. + */ + async create(payload: DecisionCreate) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + const decision = await $api('/decisions/', { + method: 'POST', + body: payload, + }) + this.list.unshift(decision) + return decision + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Erreur lors de la creation de la decision' + throw err + } finally { + this.loading = false + } + }, + + /** + * Clear the current decision. + */ + clearCurrent() { + this.current = null + }, + }, +}) diff --git a/frontend/app/stores/documents.ts b/frontend/app/stores/documents.ts new file mode 100644 index 0000000..36d1187 --- /dev/null +++ b/frontend/app/stores/documents.ts @@ -0,0 +1,149 @@ +/** + * Documents store: reference documents, their items, and item versions. + * + * Maps to the backend /api/v1/documents endpoints. + */ + +export interface DocumentItem { + id: string + document_id: string + position: string + item_type: string + title: string | null + current_text: string + voting_protocol_id: string | null + sort_order: number + created_at: string + updated_at: string +} + +export interface Document { + id: string + slug: string + title: string + doc_type: string + version: string + status: string + description: string | null + ipfs_cid: string | null + chain_anchor: string | null + created_at: string + updated_at: string + items_count: number +} + +export interface DocumentCreate { + slug: string + title: string + doc_type: string + description?: string | null + version?: string +} + +interface DocumentsState { + list: Document[] + current: Document | null + items: DocumentItem[] + loading: boolean + error: string | null +} + +export const useDocumentsStore = defineStore('documents', { + state: (): DocumentsState => ({ + list: [], + current: null, + items: [], + loading: false, + error: null, + }), + + getters: { + byType: (state) => { + return (docType: string) => state.list.filter(d => d.doc_type === docType) + }, + activeDocuments: (state): Document[] => { + return state.list.filter(d => d.status === 'active') + }, + draftDocuments: (state): Document[] => { + return state.list.filter(d => d.status === 'draft') + }, + }, + + actions: { + /** + * Fetch all documents with optional filters. + */ + async fetchAll(params?: { doc_type?: string; status?: string }) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + const query: Record = {} + if (params?.doc_type) query.doc_type = params.doc_type + if (params?.status) query.status = params.status + + this.list = await $api('/documents/', { query }) + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Erreur lors du chargement des documents' + } finally { + this.loading = false + } + }, + + /** + * Fetch a single document by slug and its items. + */ + async fetchBySlug(slug: string) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + + const [doc, items] = await Promise.all([ + $api(`/documents/${slug}`), + $api(`/documents/${slug}/items`), + ]) + + this.current = doc + this.items = items + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Document introuvable' + } finally { + this.loading = false + } + }, + + /** + * Create a new reference document. + */ + async createDocument(payload: DocumentCreate) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + const doc = await $api('/documents/', { + method: 'POST', + body: payload, + }) + this.list.unshift(doc) + return doc + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Erreur lors de la creation du document' + throw err + } finally { + this.loading = false + } + }, + + /** + * Clear the current document and items. + */ + clearCurrent() { + this.current = null + this.items = [] + }, + }, +}) diff --git a/frontend/app/stores/protocols.ts b/frontend/app/stores/protocols.ts new file mode 100644 index 0000000..edcc778 --- /dev/null +++ b/frontend/app/stores/protocols.ts @@ -0,0 +1,100 @@ +/** + * Protocols store: voting protocols and formula configurations. + * + * Maps to the backend /api/v1/protocols endpoints. + */ + +export interface FormulaConfig { + id: string + name: string + description: string | null + duration_days: number + majority_pct: number + base_exponent: number + gradient_exponent: number + constant_base: number + smith_exponent: number | null + techcomm_exponent: number | null + nuanced_min_participants: number | null + nuanced_threshold_pct: number | null + created_at: string +} + +export interface VotingProtocol { + id: string + name: string + description: string | null + vote_type: string + formula_config_id: string + mode_params: string | null + is_meta_governed: boolean + created_at: string + formula_config: FormulaConfig +} + +interface ProtocolsState { + protocols: VotingProtocol[] + formulas: FormulaConfig[] + loading: boolean + error: string | null +} + +export const useProtocolsStore = defineStore('protocols', { + state: (): ProtocolsState => ({ + protocols: [], + formulas: [], + loading: false, + error: null, + }), + + getters: { + binaryProtocols: (state): VotingProtocol[] => { + return state.protocols.filter(p => p.vote_type === 'binary') + }, + nuancedProtocols: (state): VotingProtocol[] => { + return state.protocols.filter(p => p.vote_type === 'nuanced') + }, + metaGovernedProtocols: (state): VotingProtocol[] => { + return state.protocols.filter(p => p.is_meta_governed) + }, + }, + + actions: { + /** + * Fetch all voting protocols with their formula configurations. + */ + async fetchProtocols(params?: { vote_type?: string }) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + const query: Record = {} + if (params?.vote_type) query.vote_type = params.vote_type + + this.protocols = await $api('/protocols/', { query }) + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Erreur lors du chargement des protocoles' + } finally { + this.loading = false + } + }, + + /** + * Fetch all formula configurations. + */ + async fetchFormulas() { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + this.formulas = await $api('/protocols/formulas') + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Erreur lors du chargement des formules' + } finally { + this.loading = false + } + }, + }, +}) diff --git a/frontend/app/stores/votes.ts b/frontend/app/stores/votes.ts new file mode 100644 index 0000000..14b40bd --- /dev/null +++ b/frontend/app/stores/votes.ts @@ -0,0 +1,178 @@ +/** + * Votes store: vote sessions, individual votes, and result computation. + * + * Maps to the backend /api/v1/votes endpoints. + */ + +export interface Vote { + id: string + session_id: string + voter_id: string + vote_value: string + nuanced_level: number | null + comment: string | null + signature: string + signed_payload: string + voter_wot_status: string + voter_is_smith: boolean + voter_is_techcomm: boolean + is_active: boolean + created_at: string +} + +export interface VoteSession { + id: string + decision_id: string | null + item_version_id: string | null + voting_protocol_id: string + wot_size: number + smith_size: number + techcomm_size: number + starts_at: string + ends_at: string + status: string + votes_for: number + votes_against: number + votes_total: number + smith_votes_for: number + techcomm_votes_for: number + threshold_required: number + result: string | null + chain_recorded: boolean + chain_tx_hash: string | null + created_at: string +} + +export interface VoteResult { + session_id: string + status: string + votes_for: number + votes_against: number + votes_total: number + wot_size: number + smith_size: number + techcomm_size: number + smith_votes_for: number + techcomm_votes_for: number + threshold_required: number + result: string + smith_threshold: number | null + smith_pass: boolean + techcomm_threshold: number | null + techcomm_pass: boolean +} + +export interface VoteCreate { + session_id: string + vote_value: string + nuanced_level?: number | null + comment?: string | null + signature: string + signed_payload: string +} + +interface VotesState { + currentSession: VoteSession | null + votes: Vote[] + result: VoteResult | null + loading: boolean + error: string | null +} + +export const useVotesStore = defineStore('votes', { + state: (): VotesState => ({ + currentSession: null, + votes: [], + result: null, + loading: false, + error: null, + }), + + getters: { + isSessionOpen: (state): boolean => { + if (!state.currentSession) return false + return state.currentSession.status === 'open' && new Date(state.currentSession.ends_at) > new Date() + }, + participationRate: (state): number => { + if (!state.currentSession || state.currentSession.wot_size === 0) return 0 + return (state.currentSession.votes_total / state.currentSession.wot_size) * 100 + }, + forPercentage: (state): number => { + if (!state.currentSession || state.currentSession.votes_total === 0) return 0 + return (state.currentSession.votes_for / state.currentSession.votes_total) * 100 + }, + }, + + actions: { + /** + * Fetch a vote session by ID with its votes and result. + */ + async fetchSession(sessionId: string) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + + const [session, votes, result] = await Promise.all([ + $api(`/votes/sessions/${sessionId}`), + $api(`/votes/sessions/${sessionId}/votes`), + $api(`/votes/sessions/${sessionId}/result`), + ]) + + this.currentSession = session + this.votes = votes + this.result = result + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Session de vote introuvable' + } finally { + this.loading = false + } + }, + + /** + * Submit a vote to the current session. + */ + async submitVote(payload: VoteCreate) { + this.loading = true + this.error = null + + try { + const { $api } = useApi() + const vote = await $api(`/votes/sessions/${payload.session_id}/vote`, { + method: 'POST', + body: payload, + }) + + // Update local state + this.votes.push(vote) + + // Refresh session tallies and result + if (this.currentSession) { + const [session, result] = await Promise.all([ + $api(`/votes/sessions/${payload.session_id}`), + $api(`/votes/sessions/${payload.session_id}/result`), + ]) + this.currentSession = session + this.result = result + } + + return vote + } catch (err: any) { + this.error = err?.data?.detail || err?.message || 'Erreur lors du vote' + throw err + } finally { + this.loading = false + } + }, + + /** + * Clear the current session state. + */ + clearSession() { + this.currentSession = null + this.votes = [] + this.result = null + }, + }, +}) diff --git a/frontend/app/utils/mode-params.ts b/frontend/app/utils/mode-params.ts new file mode 100644 index 0000000..cf1edb2 --- /dev/null +++ b/frontend/app/utils/mode-params.ts @@ -0,0 +1,164 @@ +/** + * TypeScript mirror of the Python mode_params parser. + * + * A mode-params string encodes voting formula parameters in a compact format. + * Example: "D30M50B.1G.2T.1" + * + * Supported codes: + * D = duration_days (int) + * M = majority_pct (int, 0-100) + * B = base_exponent (float) + * G = gradient_exponent (float) + * C = constant_base (float) + * S = smith_exponent (float) + * T = techcomm_exponent (float) + * N = ratio_multiplier (float) + * R = is_ratio_mode (bool, 0 or 1) + * + * Values may start with a dot for decimals < 1, e.g. "B.1" means base_exponent=0.1. + */ + +export interface ModeParams { + duration_days: number + majority_pct: number + base_exponent: number + gradient_exponent: number + constant_base: number + smith_exponent: number | null + techcomm_exponent: number | null + ratio_multiplier: number | null + is_ratio_mode: boolean +} + +type CodeType = 'int' | 'float' | 'bool' + +const CODES: Record = { + D: { key: 'duration_days', type: 'int' }, + M: { key: 'majority_pct', type: 'int' }, + B: { key: 'base_exponent', type: 'float' }, + G: { key: 'gradient_exponent', type: 'float' }, + C: { key: 'constant_base', type: 'float' }, + S: { key: 'smith_exponent', type: 'float' }, + T: { key: 'techcomm_exponent', type: 'float' }, + N: { key: 'ratio_multiplier', type: 'float' }, + R: { key: 'is_ratio_mode', type: 'bool' }, +} + +const PARAM_RE = /([A-Z])(\d*\.?\d+)/g + +function getDefaults(): ModeParams { + return { + duration_days: 30, + majority_pct: 50, + base_exponent: 0.1, + gradient_exponent: 0.2, + constant_base: 0.0, + smith_exponent: null, + techcomm_exponent: null, + ratio_multiplier: null, + is_ratio_mode: false, + } +} + +/** + * Parse a mode-params string into a structured ModeParams object. + * + * @param paramsStr - Compact parameter string, e.g. "D30M50B.1G.2T.1" + * @returns Parsed parameters with defaults for codes not found + * @throws Error if an unrecognised code letter is found + */ +export function parseModeParams(paramsStr: string): ModeParams { + const result = getDefaults() + + if (!paramsStr || !paramsStr.trim()) { + return result + } + + let match: RegExpExecArray | null + PARAM_RE.lastIndex = 0 + + while ((match = PARAM_RE.exec(paramsStr)) !== null) { + const code = match[1] + const rawValue = match[2] + + if (!(code in CODES)) { + throw new Error(`Code de parametre inconnu : '${code}'`) + } + + const { key, type } = CODES[code] + + if (type === 'int') { + ;(result as any)[key] = Math.floor(parseFloat(rawValue)) + } else if (type === 'float') { + ;(result as any)[key] = parseFloat(rawValue) + } else if (type === 'bool') { + ;(result as any)[key] = parseFloat(rawValue) !== 0 + } + } + + return result +} + +/** + * Encode a ModeParams object into a compact mode-params string. + * + * Only includes parameters that differ from defaults. + * + * @param params - Parameters to encode + * @returns Compact string, e.g. "D30M50B.1G.2" + */ +export function encodeModeParams(params: Partial): string { + const defaults = getDefaults() + const parts: string[] = [] + + const codeEntries = Object.entries(CODES) as [string, { key: keyof ModeParams; type: CodeType }][] + + for (const [code, { key, type }] of codeEntries) { + const value = params[key] + if (value === undefined || value === null) continue + if (value === defaults[key]) continue + + if (type === 'int') { + parts.push(`${code}${value}`) + } else if (type === 'float') { + const numVal = value as number + if (numVal < 1 && numVal > 0) { + parts.push(`${code}${numVal.toString().replace(/^0/, '')}`) + } else { + parts.push(`${code}${numVal}`) + } + } else if (type === 'bool') { + parts.push(`${code}${value ? 1 : 0}`) + } + } + + return parts.join('') +} + +/** + * Format a mode-params string for human display. + * + * @param paramsStr - Compact parameter string + * @returns Human-readable description in French + */ +export function formatModeParams(paramsStr: string): string { + const params = parseModeParams(paramsStr) + const parts: string[] = [] + + parts.push(`Duree: ${params.duration_days} jours`) + parts.push(`Majorite: ${params.majority_pct}%`) + parts.push(`Base: ${params.base_exponent}`) + parts.push(`Gradient: ${params.gradient_exponent}`) + + if (params.constant_base > 0) { + parts.push(`Constante: ${params.constant_base}`) + } + if (params.smith_exponent !== null) { + parts.push(`Smith: ${params.smith_exponent}`) + } + if (params.techcomm_exponent !== null) { + parts.push(`TechComm: ${params.techcomm_exponent}`) + } + + return parts.join(' | ') +} diff --git a/frontend/app/utils/threshold.ts b/frontend/app/utils/threshold.ts new file mode 100644 index 0000000..8427ce8 --- /dev/null +++ b/frontend/app/utils/threshold.ts @@ -0,0 +1,84 @@ +/** + * TypeScript mirror of the Python WoT threshold formula. + * + * Core formula: + * Result = C + B^W + (M + (1-M) * (1 - (T/W)^G)) * max(0, T - C) + * + * Where: + * C = constant_base + * B = base_exponent + * W = wot_size (corpus of eligible voters) + * T = total_votes (for + against) + * M = majority_ratio (majority_pct / 100) + * G = gradient_exponent + * + * Inertia behaviour: + * - Low participation (T << W) -> near-unanimity required + * - High participation (T -> W) -> simple majority M suffices + * + * Reference test case: + * wot_size=7224, votes_for=97, votes_against=23 (total=120) + * params M50 B.1 G.2 => threshold=94, adopted (97 >= 94) + */ +export function wotThreshold( + wotSize: number, + totalVotes: number, + majorityPct: number = 50, + baseExponent: number = 0.1, + gradientExponent: number = 0.2, + constantBase: number = 0.0, +): number { + if (wotSize <= 0) { + throw new Error('wotSize doit etre strictement positif') + } + if (totalVotes < 0) { + throw new Error('totalVotes ne peut pas etre negatif') + } + if (majorityPct < 0 || majorityPct > 100) { + throw new Error('majorityPct doit etre entre 0 et 100') + } + + const M = majorityPct / 100 + const T = totalVotes + const W = wotSize + const C = constantBase + const B = baseExponent + const G = gradientExponent + + // Guard: if no votes, threshold is at least ceil(C + B^W) + if (T === 0) { + return Math.ceil(C + Math.pow(B, W)) + } + + // Core formula + const participationRatio = T / W + const inertiaFactor = 1.0 - Math.pow(participationRatio, G) + const requiredRatio = M + (1.0 - M) * inertiaFactor + const result = C + Math.pow(B, W) + requiredRatio * Math.max(0, T - C) + + return Math.ceil(result) +} + +/** + * Compute the Smith criterion threshold. + * + * @param smithWotSize - Number of Smith members + * @param smithExponent - Exponent S for the Smith criterion + * @returns Minimum number of Smith votes required + */ +export function smithThreshold(smithWotSize: number, smithExponent: number): number { + if (smithWotSize <= 0) return 0 + return Math.ceil(Math.pow(smithWotSize, smithExponent)) +} + +/** + * Compute the TechComm criterion threshold. + * + * @param techcommSize - Number of TechComm members + * @param techcommExponent - Exponent T for the TechComm criterion + * @returns Minimum number of TechComm votes required + */ +export function techcommThreshold(techcommSize: number, techcommExponent: number): number { + if (techcommSize <= 0) return 0 + return Math.ceil(Math.pow(techcommSize, techcommExponent)) +} diff --git a/frontend/nuxt.config.ts b/frontend/nuxt.config.ts new file mode 100644 index 0000000..9a54c23 --- /dev/null +++ b/frontend/nuxt.config.ts @@ -0,0 +1,28 @@ +export default defineNuxtConfig({ + compatibilityDate: '2025-07-15', + ssr: false, + devtools: { enabled: true }, + devServer: { port: 3002 }, + components: [{ path: '~/components', pathPrefix: false }], + modules: [ + '@nuxt/ui', + '@pinia/nuxt', + '@unocss/nuxt', + '@vueuse/nuxt', + ], + app: { + head: { + htmlAttrs: { lang: 'fr' }, + meta: [ + { name: 'viewport', content: 'width=device-width, initial-scale=1' }, + { name: 'description', content: 'Plateforme de decisions collectives pour la communaute Duniter/G1' }, + ], + title: 'Glibredecision', + }, + }, + runtimeConfig: { + public: { + apiBase: 'http://localhost:8002/api/v1', + }, + }, +}) diff --git a/frontend/package.json b/frontend/package.json new file mode 100644 index 0000000..a81a2f0 --- /dev/null +++ b/frontend/package.json @@ -0,0 +1,29 @@ +{ + "name": "glibredecision", + "version": "0.1.0", + "type": "module", + "private": true, + "scripts": { + "build": "nuxt build", + "dev": "nuxt dev", + "generate": "nuxt generate", + "preview": "nuxt preview", + "postinstall": "nuxt prepare" + }, + "dependencies": { + "@nuxt/content": "^3.11.2", + "@nuxt/ui": "^3.1.0", + "@pinia/nuxt": "^0.9.0", + "@unocss/nuxt": "^66.6.0", + "@vueuse/nuxt": "^14.2.1", + "nuxt": "^4.3.1", + "pinia": "^3.0.2", + "vue": "^3.5.28", + "vue-router": "^4.6.4" + }, + "devDependencies": { + "@iconify-json/lucide": "^1.2.91", + "typescript": "^5.9.3", + "unocss": "^66.6.0" + } +} diff --git a/frontend/tsconfig.json b/frontend/tsconfig.json new file mode 100644 index 0000000..4b34df1 --- /dev/null +++ b/frontend/tsconfig.json @@ -0,0 +1,3 @@ +{ + "extends": "./.nuxt/tsconfig.json" +} diff --git a/research_duniter_forum.md b/research_duniter_forum.md new file mode 100644 index 0000000..a127300 --- /dev/null +++ b/research_duniter_forum.md @@ -0,0 +1,536 @@ +# Recherche Forum Duniter -- Donnees de reference pour Glibredecision + +Date de recherche : 2026-02-28 + +--- + +## TABLE DES MATIERES + +1. [Licence G1 -- Texte complet](#1-licence-g1--texte-complet) +2. [Acte d'engagement des forgerons (Engagement Forgeron v2.0.0)](#2-acte-dengagement-des-forgerons) +3. [Acte d'engagement du comite technique (Engagement Comite Tech v2.0.0)](#3-acte-dengagement-du-comite-technique) +4. [G1vote -- Fonctionnement, formule et mecanisme](#4-g1vote--fonctionnement-formule-et-mecanisme) +5. [Processus de validation par vote nuance (1000i100)](#5-processus-de-validation-par-vote-nuance) +6. [Contributions d'Yvv sur les forums](#6-contributions-dyvv-sur-les-forums) +7. [Sources et liens](#7-sources-et-liens) + +--- + +## 1. Licence G1 -- Texte complet + +**Version : v0.3.0 (2017-04-04, modifiee le 2025-02-11)** + +Source : https://monnaie-libre.fr/licence-g1/ +Depot Git : https://git.duniter.org/documents/g1_monetary_license/-/blob/master/g1_monetary_license_fr.rst + +### Preambule + +Licence de la monnaie et engagement de responsabilite. + +Toute operation de certification d'un nouveau membre de la monnaie G1 doit prealablement s'accompagner de la transmission de cette licence de la monnaie G1 dont le certificateur doit s'assurer qu'elle a ete etudiee, comprise et acceptee par la personne qui sera certifiee. + +### Toile de confiance G1 (TdC G1) + +**Avertissement :** Certifier n'est pas uniquement s'assurer que vous avez rencontre la personne, c'est assurer a la communaute G1 que vous connaissez suffisamment bien la personne que vous vous appretez a certifier et que vous saurez ainsi la contacter facilement, et etre en mesure de reperer un double compte effectue par une personne certifiee par vous-meme, ou d'autres types de problemes (disparition...), en effectuant des recoupements qui permettront de reveler le probleme le cas echeant. + +**Conseils fortement recommandes :** +- Connaitre la personne par plusieurs moyens de contact (physique, electronique, etc.) +- Connaitre d'autres personnes qui la connaissent aussi bien +- Ne jamais certifier seul ; travailler avec au moins un autre membre de la TdC +- Verifier si le compte a deja des certifications existantes avant de proceder +- Confirmer que le futur membre controle bien son compte (transfert test) +- S'assurer que les contacts ont etudie la licence G1 en vigueur + +**Verifications obligatoires pour le certificateur :** + +1. De suffisamment bien connaitre la personne qui declare gerer cette cle publique +2. Verifier personnellement la bonne cle publique avec elle +3. Confirmer qu'elle a genere son document de revocation de compte Duniter + +4a. Rencontre physique en personne preferee, OU +4b. Verifier a distance le lien personne / cle publique en contactant la personne par plusieurs moyens de communication differents + +### Regles abregees de la TdC + +- Chaque membre : 100 certifications possibles ; 1 certification par 5 jours +- Validite de certification : 2 mois pour les nouveaux membres +- Nouveau membre : 5 certifications requises + distance <= 5 pas de 80% des membres referents +- Seuil de membre referent : Y[N] = plafond de N^(1/5) +- Certifications membres etablis : valides 2 ans +- Renouvellement : accord tous les 12 mois ; maintenir >= 5 certifications valides + +### Monnaie G1 + +**Production du Dividende Universel (DU) :** 1 DU par personne et par jour + +**Code monetaire :** +Le DU journalier reste constant jusqu'a l'equinoxe, puis est reevalue selon : + +``` +DUjour(equinoxe suivant) = DUjour(equinoxe) + c^2 * (M/N)(equinoxe) / (182,625 jours) +``` + +Parametres : +- c = 4,88% par equinoxe +- DU(0) = 10,00 G1 +- M = masse monetaire totale +- N = nombre de membres + +### Logiciels et licence + +Les logiciels G1 permettant aux utilisateurs de gerer leur utilisation de G1 doivent transmettre cette licence avec le logiciel ainsi que l'ensemble des parametres techniques de la monnaie G1 inscrits dans le bloc 0. + +### Regles de modification du document + +- Proposants, soutiens et votants doivent etre membres de la TdC +- Finaliser : creer des comptes portefeuille pour chaque option (pour/contre) ; poster la proposition avec les cles publiques dans la categorie dediee du forum +- Mecanisme de vote : les membres transferent des fonds vers le compte de l'option choisie ; transferts multiples vers le meme compte = un vote ; transferts vers les deux comptes = nul +- Resultats apres 30 jours : Si la proposition a recolte au moins 20 votes/virements "pour" et aucun vote "contre", elle est adoptee. Pour chaque vote "contre", 5 votes "pour" supplementaires sont necessaires. + +--- + +## 2. Acte d'engagement des forgerons + +**Document : Engagement Forgeron v2.0.0-fr (date : 2026-01-07)** +**Statut : ADOPTE (vote du 7 janvier au 6 fevrier 2026)** + +Source : https://forum.monnaie-libre.fr/t/vote-engagement-forgeron-v2-0-0/33165 +Depot Git : https://git.duniter.org/documents/g1_monetary_license (MR #30 par 1000i100) + +### Resultats du vote +- 97 votes POUR (dont 8 forgerons) +- 23 votes CONTRE (dont 3 forgerons) +- 19 votes invalides/nuls +- Seuil atteint exactement a 97 votes POUR +- Total WoT : 7 224 membres (1,66% de participation) + +### Intention et enjeux + +Avec la V2, une sous-toile de confiance pour les forgerons est proposee pour s'assurer que les forgerons qui gerent l'ecosysteme technique le font avec competence, rigueur, securite et reactivite. Le document etablit les standards pour les operateurs de noeuds Duniter V2. + +L'engagement vise a eviter les dependances centralisees -- qu'elles soient techniques ou humaines. + +### Savoirs-faire requis (competences techniques) + +Les candidats doivent demontrer : +- Administration systeme Linux en ligne de commande +- Configuration reseau et pratiques de securite +- Protocoles de gestion de cles cryptographiques et mots de passe +- Fondamentaux de cybersecurite (modelisation de menaces, surfaces d'attaque) +- Comprehension des mecanismes de consensus blockchain Duniter +- Gestion de conteneurs Docker (recommande) + +### Savoirs-etre (engagements comportementaux) + +**Rigueur :** Les operateurs doivent comprendre en profondeur les configurations de leurs noeuds. "Un faux sentiment de securite erode la confiance et met le reseau en danger." Connaitre ses limites et savoir demander de l'aide. + +**Reactivite :** Repondre dans les 24 heures aux alertes, restaurer les services rapidement, escalader les problemes de maniere appropriee. + +**Responsabilite :** Les certificateurs garantissent la competence des candidats et fournissent un accompagnement continu. + +### Clauses de l'aspirant forgeron + +**Securite et conformite (~9 engagements) :** +- Utiliser des phrases de recuperation generees aleatoirement +- Separer les comptes (membre vs forgeron) +- Maintenir des sauvegardes physiques multiples +- Operer des noeuds synchronises et a jour +- Ne pas exposer d'API non securisees +- Declarer hors-ligne si indisponible +- Repondre dans les 24 heures quand en ligne + +**Contact :** Pouvoir joindre 3+ forgerons certificateurs par au moins 2 canaux de communication (telephone/SMS, email, XMPP, Matrix). + +**Connaissances :** Acceptation de tous les engagements, comprehension des regles du graphe de confiance, mecanismes de consensus blockchain. + +**Clauses pieges :** Exclusions pour harcelement, recherche de pouvoir ou infiltration de l'ecosysteme. + +### Clauses du forgeron certificateur + +**Securite et conformite (~8 exigences) :** +- Verification de l'intention du candidat +- Evaluation des pratiques de securite +- Validation de la phrase de recuperation +- Verification du noeud +- Documentation de la configuration + +**Contact :** Maintien de canaux de communication et engagement de reponse maximale de 24 heures pour les defauts lies aux noeuds. + +**Connaissances :** Verification que les candidats comprennent les implications de l'engagement et les delais operationnels du graphe de confiance. + +**Clauses pieges :** Rejet des certifications sous contrainte ou motivees commercialement. + +### Regles de la toile de confiance des forgerons + +- Necessite l'adhesion a la toile de confiance principale +- Necessite une invitation de forgeron +- Necessite 3 certifications de forgerons +- Pas d'expiration directe ; adhesion perdue via sortie de la TdC principale ou 6 mois hors-ligne en tant que validateur +- Seuil d'inactivite des forgerons : 3 mois avant perte du statut de forge + +### Mise en oeuvre + +Les certifications presentent toutes les clauses d'engagement dans un ordre aleatoire. Les candidats doivent repondre correctement a chaque clause (affirmative/negative selon le type) ou recommencer la procedure. + +### Formule de vote + +``` +votesPour >= ceil(WotSize^0.1 + (0.5 + (1 - 0.5) * (1 - (TotalVotes/WotSize)^0.2)) * TotalVotes) +``` +ET +``` +votesSmithPour >= ceil(SmithWotSize^0.1) +``` + +### Actions post-vote + +Ajout des listes de verification dans g1cli, duniter-vue, cesium2, et developpement des workflows de certification UI. + +--- + +## 3. Acte d'engagement du comite technique + +**Document : Engagement Comite Tech v2.0.0-fr** +**Vote : 4 fevrier -- 6 mars 2026 (en cours au moment de la recherche)** + +Source : https://forum.monnaie-libre.fr/t/vote-engagement-comite-tech-2-0-0-fr/33293 + +### Objectif principal + +Le comite technique a pour role de proteger la souverainete de la communaute G1 sur les mises a jour de la blockchain. Ses missions : +- Auditer le code +- Detecter le code malveillant +- Verifier les fonctionnalites annoncees +- Deployer uniquement les mises a jour servant la communaute + +### Engagements des membres + +Les membres s'engagent a : +- Respecter les regles decrites dans la version en vigueur du document +- Demissionner s'ils sont en desaccord avec les regles en vigueur +- Voter pour retirer le mandat des collegues qui violent visiblement les directives + +### Gouvernance des modifications + +L'adoption necessite : +- Seuil de vote unani-majoritaire +- Support minimum des membres du comite + +Formule : +``` +votesPour >= ceil(WotSize^0.1 + (0.5 + (1 - 0.5) * (1 - (TotalVotes/WotSize)^0.2)) * TotalVotes) +``` +ET +``` +votesCoTecPour >= ceil(CoTecSize^0.1) +``` + +### Composition actuelle du comite + +L'entree/sortie necessite l'approbation des 2/3 des membres existants (modifiable uniquement par mises a jour runtime avec validation des 2/3). + +### Methode de vote + +0,01 G1 envoyes aux adresses designees (POUR ou CONTRE) +Suivi via : g1vote-view (talk: 32960; mode: D30M50B.1G.2T.1) + +--- + +## 4. G1vote -- Fonctionnement, formule et mecanisme + +### Description generale + +**Depot :** https://git.duniter.org/tools/g1vote-view +**Licence :** GNU AGPLv3 +**Technologie :** Vue.js + TypeScript + Vite +**131 commits, cree le 16 fevrier 2025** + +G1vote est une application web qui affiche le statut des votes G1 pour une proposition specifique. L'outil lit les transactions blockchain G1 pour compter les votes. + +### Acces + +URL : `https://g1vote-view-237903.pages.duniter.org/#/vote/[TOPIC_ID]` + +Le TOPIC_ID correspond a l'identifiant du sujet sur le forum monnaie-libre.fr qui contient les deux cles publiques (POUR et CONTRE) dans le premier message. + +### Modes de validation + +Deux approches de validation configurables via parametres URL : + +1. **Mode ratio** : `?validation=ratio&custom=B20-5N` +2. **Mode unani-majoritaire** : `?validation=unani-majority&custom=M50` + +### Codes des parametres (ModeParams) + +| Code | Signification | Exemple | +|------|--------------|---------| +| D | Duree du vote en jours | D30 = 30 jours | +| M | Seuil de majorite en % | M50 = 50% | +| N | Multiplicateur ratio (alternatif a M) | N5 | +| B | Exposant de base | B1 = exposant 1 | +| G | Exposant de gradient | G.5 = gradient 0.5 | +| C | Base constante | C10 = constante 10 | +| S | Critere Smith (exposant) | S.5 | +| T | Critere Comite Tech (exposant) | T.3 | +| R | Indicateur mode ratio | R | + +Exemple complet : `D30M50B1G.5C10` = vote de 30 jours, majorite 50%, exposant base 1, gradient 0.5, constante base 10. + +### Formule centrale de seuil (WotMembersThreshold) + +La formule principale pour determiner le nombre de votes "pour" necessaires : + +``` +Resultat = C + B^W + (M + (1-M) * (1 - (T/W)^G)) * max(0, T-C) +``` + +Ou : +- **C** = constantBase (base constante) +- **B** = baseExponent (exposant de base) +- **W** = corpus (taille de la WoT, nombre de membres inscrits) +- **T** = total des votes (pour + contre) +- **M** = majorityRatio (ratio de majorite = majority/100) +- **G** = gradientExponent (exposant de gradient) + +### Mecanisme d'inertie + +Le mecanisme d'inertie est encode dans le terme `(1 - (T/W)^G)` de la formule : + +- Quand la participation est faible (T/W proche de 0), le terme `(T/W)^G` est proche de 0, donc `(1 - (T/W)^G)` est proche de 1. Le seuil d'adoption est alors quasi-unanime (proche de 100% des votants). +- Quand la participation augmente (T/W s'approche de 1), le terme `(1 - (T/W)^G)` diminue vers 0. Le seuil baisse vers le ratio de majorite M (par exemple 50%). +- Le parametre **G** (gradient) controle la vitesse de transition entre quasi-unanimite et majorite simple. + +**En pratique :** Avec peu de votants, il faut quasi l'unanimite pour adopter une proposition. Plus il y a de votants, plus le seuil se rapproche d'une majorite simple. C'est le mecanisme d'"inertie" : une faible mobilisation ne doit pas permettre a une petite minorite de decider pour tous. + +### Seuils specifiques + +**SmithThreshold (forgerons) :** +``` +votesPour >= ceil(SmithWotSize^exponent) +``` + +**TechCommThreshold (comite technique) :** +``` +votesPour >= ceil(CoTecSize^exponent) +``` + +### Architecture technique + +- `Threshold.ts` : Contient les classes WotMembersThreshold, SmithThreshold, TechCommThreshold +- `formulaTex.ts` : Genere les formules en LaTeX pour affichage +- `ModeParams.ts` : Parse les codes de parametres (D30M50B, etc.) +- `SimuParams.ts` : Gere les parametres de simulation +- `reactiveVote.ts` : Couche reactive Vue.js autour de la logique metier +- `techCommittee.ts` : Resolution d'identite des membres du comite technique + +--- + +## 5. Processus de validation par vote nuance + +Source : https://forum.monnaie-libre.fr/t/processus-de-validation-licence-par-vote-nuance/31729 + +### Echelle d'adhesion a six niveaux + +Au lieu d'un vote binaire pour/contre, le systeme propose une echelle : + +| Niveau | Montant G1 | Signification | +|--------|-----------|---------------| +| CONTRE | 0,01 G1 | Opposition viscerale | +| PAS DU TOUT D'ACCORD | 0,1 G1 | Plusieurs points de desaccord | +| PAS D'ACCORD | 0,2 G1 | 1-2 points problematiques | +| NEUTRE | 0,3 G1 | Ni pour ni contre | +| D'ACCORD | 0,4 G1 | Ameliorations mineures possibles, globalement acceptable | +| TOUT A FAIT D'ACCORD | 0,5 G1 | Alignement parfait | + +### Formule d'adoption + +Une proposition passe quand : la somme des votes NEUTRE + D'ACCORD + TOUT A FAIT D'ACCORD represente au moins 80% de la totalite des votes, avec au moins 59 personnes differentes exprimees. + +Rejet si les votes d'opposition depassent strictement 20% des votes totaux. + +### Regles de validite + +- Votes avec montants hors de l'echelle prescrite : annules +- Votes negatifs sans commentaire de transaction expliquant les points de desaccord : invalides +- Fenetre de vote standard de 30 jours, prolongeable si participation inferieure a 59 membres + +### Concepts avances proposes par 1000i100 + +- **Vote par procuration quadratique :** Le poids du vote augmente par racine carree du nombre de procurations (avec 3 procurations, poids = racine de 4 = 2), empechant la domination par des figures populaires +- **Vote sous pression :** Mecanisme permettant aux votants de signaler une contrainte indue, declenchant une intervention communautaire +- **Distinction consensus vs consentement :** Le systeme vise le consentement (absence d'opposition/risques identifies) plutot que le consensus (accord universel) + +### Seuil de participation + +Le seuil de 59 votants reference les membres fondateurs du reseau, etablissant un mecanisme de verification de presence empechant les decisions par des noyaux inactifs. + +--- + +## 6. Contributions d'Yvv sur les forums + +### Roles institutionnels + +- **Secretaire** du Bureau d'Axiom Team (2023) + Source : https://forum.monnaie-libre.fr/t/membres-bureau-et-ca-axiom-team-2023/26650 +- **Membre du Conseil d'Administration** d'Axiom Team (2025/26) + Source : https://forum.monnaie-libre.fr/t/bureau-conseil-dadministration-axiom-team-2025-26/32967 +- **Product Owner (PO)** pour le developpement Cesium2 et l'ecosysteme G1 + +### Posts sur forum.duniter.org + +#### a) Strategie et coordination V2 (post #24, 12 sept 2024) +Source : https://forum.duniter.org/t/visio-strategie-et-coordination-v2-lundi-25-mars-13h/12087/24 + +Yvv a decrit l'organisation de la migration V2 : +- Version 1.10 = symbolique, pour la synchronisation reseau +- V2.0 = vrai redemarrage blockchain avec premieres decisions on-chain +- Equipes specialisees : communication, enquete de proximite, integration forgerons, traduction, developpement +- "Les modalites de deploiement ne sont pas definies" +- Equipe pivot : Hugo, Cgeek, Kimamila, Poka + +#### b) Entretiens forgerons -- rapport d'enquete (26 avril 2025) +Source : https://forum.duniter.org/t/entretiens-forgerons-rapport-denquete/13080 + +Yvv a dirige une enquete aupres des forgerons (decideurs cles pour la migration V2) : +- Equipe de 5 personnes +- 27 entretiens initiaux +- Questions : experience de V1, reception de V2, besoins d'information pour la prise de decision +- Rapport PDF publie : "Les entretiens Forgerons -- Rapport d'enquete" (234,4 KB) + +#### c) Wish Ticket : UX Certification (11 janvier 2026) +Source : https://forum.duniter.org/t/wish-ticket-ux-certification-alleger-la-pratique-du-qcm/13531 + +Yvv a identifie un probleme UX avec les QCM de certification : +- Le QCM systematique est vecu comme infantilisant +- Proposition : frequence configurable + - Basee sur les certifications : afficher tous les 3, 5, 8, ou 13 certifications (defaut : 5) + - Basee sur le temps : afficher tous les 2-5 mois (defaut : 3 mois) +- Pour les renouvellements : alerte confirmant que la personne est toujours joignable +- Anticipation des pratiques de certification en lot avec la V2 + +#### d) Fonction "queue list" pour les certifications (16 decembre 2025) +Source : https://forum.duniter.org/t/fonction-queue-list-pour-les-certifs-dans-les-clients/13496 + +Yvv a propose une specification complete pour la gestion de file d'attente des certifications : +- File d'attente locale sans synchronisation inter-appareils +- Ordonnancement chronologique avec reordonnancement manuel +- Certification automatique a l'expiration des delais de 5 jours +- Notifications externes (push ou daemon local) +- Distinction entre nouvelles certifications et renouvellements +- Processus de renouvellement simplifie (verification de statut vital uniquement) +- Poka a developpe cette fonctionnalite dans Cesium2 au hackathon de Lodeve + +#### e) Schema / diagramme de la crypto (janvier 2026) +Source : https://forum.duniter.org/t/schema-diagramme-de-la-crypto/13509 + +Yvv a propose la creation d'un diagramme pedagogique simplifie : +- Vocabulaire : coffre, comptes membres, comptes portefeuilles +- Acronymes : BIP39 -> seed, ED255-19 -> paire de cles, derivations BIP32 +- Approche modulaire : version technique precise d'abord, puis simplification par design +- Collaboration communautaire via Excalidraw ("GlobalChart") + +#### f) Gestion des enveloppes et projets (19 septembre 2025) +Source : https://forum.duniter.org/t/petit-point-sur-les-enveloppes-et-les-projets/13325 + +Yvv, en tant que "Pedro, ange gardien des dons", a organise des reunions avec les porteurs de projets : +- Revue des enveloppes ouvertes +- Appels de 5-10 min par enveloppe, le double ou triple pour une campagne +- References au site Axiom pour la section projets + +#### g) Hackathon Axiom #5 -- Preparation lancement V2 (janvier-fevrier 2026) +Source : https://forum.duniter.org/t/hackathon-axiom-5-preparation-lancement-v2/13523 + +Yvv a contribue en tant que facilitateur : +- Conversion du sujet en mode wiki pour organiser le backlog +- "La decision appartient de facto aux geniteurs" +- Position strategique : mobiliser les ressources la semaine apres le lancement (vs 1000i100 : la semaine avant) +- Backlog : finaliser les elements V2, tester le genesis local, runtime upgrades, infrastructure +- **Definition du scope et du calendrier de G1Vote** (mentionne comme tache future) +- Distinction "bugfixes" vs "changements necessitant reflexion collective" +- Jalons post-migration : SSO, IPFS datapods, messagerie, API Glib + +#### h) Preparation passage V2 (commentaires de janvier 2026) +Source : https://forum.duniter.org/t/preparation-au-08-03-2026-passage-a-la-v2/13538 + +- Proposition de lancement a minuit : "branle-bas le 7 au soir", T0 a "8 a 0h00m01s" +- Ceremonie le samedi soir a minuit avec travail technique la nuit, puis soutien communication le dimanche + +#### i) Hackathon Toulouse (novembre 2025) +Source : https://forum.duniter.org/t/hackathon-25sem47-capitole-prepa/13424 + +Yvv a organise deux sprints : +- Sprint Toulouse : crash test sur Gtest, preparation production, livraison G1 Companion +- Sprint Lodeve : lancement production V2 vise le 16 decembre (reporte a mars 2026) +- Liste des bloquants : confirmation technique/fonctionnelle, variables runtime upgrade, clients web/mobile, killswitch V1 + +### Posts sur forum.monnaie-libre.fr + +#### j) IA open source en Monnaie Libre (post #29) +Source : https://forum.monnaie-libre.fr/t/ia-open-source-en-monnaie-libre/29970/29 + +Yvv a contribue a la discussion sur l'IA open source dans l'ecosysteme Monnaie Libre. + +### Note sur les CDC (cahiers des charges) + +Aucun document formel intitule "cahier des charges" n'a ete trouve sous ce nom exact. Cependant, les contributions d'Yvv en tant que Product Owner constituent de facto des specifications fonctionnelles, notamment : +- La specification de la file d'attente de certifications (point d) +- Le wish ticket UX certification (point c) +- Le diagramme pedagogique crypto (point e) +- L'organisation du backlog au hackathon Axiom #5 (point g) + +--- + +## 7. Sources et liens + +### Forums +- Forum Duniter : https://forum.duniter.org/ +- Forum Monnaie Libre : https://forum.monnaie-libre.fr/ + +### Documents officiels +- Licence G1 (texte) : https://duniter.fr/wiki/g1/licence-txt/ +- Licence G1 (page) : https://duniter.fr/wiki/g1/licence-g1/ +- Licence G1 (monnaie-libre.fr) : https://monnaie-libre.fr/licence-g1/ +- Depot Git licence : https://git.duniter.org/documents/g1_monetary_license + +### Votes et engagements +- Vote Engagement Forgeron v2.0.0 : https://forum.monnaie-libre.fr/t/vote-engagement-forgeron-v2-0-0/33165 +- Vote Engagement Comite Tech v2.0.0 : https://forum.monnaie-libre.fr/t/vote-engagement-comite-tech-2-0-0-fr/33293 +- G1vote view (forgeron) : https://g1vote-view-237903.pages.duniter.org/#/vote/33165 +- MR smith_commitment : https://git.duniter.org/documents/g1_monetary_license/-/merge_requests/30 +- MR licence forgeron FR : https://git.duniter.org/documents/g1_monetary_license/-/merge_requests/27 (#28, #29) +- Processus vote nuance : https://forum.monnaie-libre.fr/t/processus-de-validation-licence-par-vote-nuance/31729 + +### G1vote +- Application : https://g1vote-view-237903.pages.duniter.org/ +- Depot source : https://git.duniter.org/tools/g1vote-view +- Auteur principal : 1000i100 (Millicent Billette) +- Profil GitLab : https://git.duniter.org/1000i100 + +### Discussions de gouvernance +- Atelier gouvernance V2 : https://forum.duniter.org/t/atelier-de-gouvernance-pour-la-blockchain-v2/10938 +- Sous-toile forgerons : https://forum.duniter.org/t/la-sous-toile-forgerons/9047 +- Toile des forgerons : https://forum.duniter.org/t/toile-des-forgerons/9999 +- Modifications licence G1 : https://forum.duniter.org/t/modifications-de-la-licence-g1/8362 +- Systeme de vote cles G1 : https://forum.duniter.org/t/systeme-de-vote-a-laide-des-cles-g1/6134 +- Vote quadratique : https://forum.monnaie-libre.fr/t/le-vote-quadratique-et-la-monnaie-libre/26773 +- Vote outil organisationnel : https://forum.monnaie-libre.fr/t/le-vote-en-tant-quoutil-organisationnel/3868 + +### Contributions Yvv +- Visio strategie V2 : https://forum.duniter.org/t/visio-strategie-et-coordination-v2-lundi-25-mars-13h/12087/24 +- Entretiens forgerons : https://forum.duniter.org/t/entretiens-forgerons-rapport-denquete/13080 +- Wish Ticket UX : https://forum.duniter.org/t/wish-ticket-ux-certification-alleger-la-pratique-du-qcm/13531 +- Queue certifications : https://forum.duniter.org/t/fonction-queue-list-pour-les-certifs-dans-les-clients/13496 +- Cesium2 queue : https://forum.duniter.org/t/cesium2-certification-queue-management/13493 +- Schema crypto : https://forum.duniter.org/t/schema-diagramme-de-la-crypto/13509 +- Enveloppes projets : https://forum.duniter.org/t/petit-point-sur-les-enveloppes-et-les-projets/13325 +- Hackathon Axiom #5 : https://forum.duniter.org/t/hackathon-axiom-5-preparation-lancement-v2/13523 +- Preparation V2 : https://forum.duniter.org/t/preparation-au-08-03-2026-passage-a-la-v2/13538 +- Hackathon Toulouse : https://forum.duniter.org/t/hackathon-25sem47-capitole-prepa/13424 +- Axiom Team 2023 : https://forum.monnaie-libre.fr/t/membres-bureau-et-ca-axiom-team-2023/26650 +- Axiom Team 2025/26 : https://forum.monnaie-libre.fr/t/bureau-conseil-dadministration-axiom-team-2025-26/32967 +- IA open source : https://forum.monnaie-libre.fr/t/ia-open-source-en-monnaie-libre/29970/29 + +### Ecosysteme 1000i100 +- Page d'outils : https://g1.1000i100.fr/ +- Profil GitLab : https://git.duniter.org/1000i100 +- Charte G1 (fork) : https://git.duniter.org/1000i100/g1_charter