feat: découverte automatique des nœuds squid via duniter_peerings
ci/woodpecker/push/woodpecker Pipeline was successful
ci/woodpecker/push/woodpecker Pipeline was successful
- PeerDiscovery.ts : appel duniter_peerings sur rpc.duniter.org, extraction des endpoints squid, normalisation URLs, cache localStorage 24h - EndpointPopover : section "Réseau Ğ1" avec nœuds découverts auto-testés à l'ouverture, bouton actualiser pour forcer un refresh du cache - FlowMap : zone de hit des arcs réduite (max 12→4 px) pour ne plus interférer avec le zoom/déplacement de la carte Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
This commit is contained in:
@@ -0,0 +1,62 @@
|
||||
const DUNITER_RPC = 'https://rpc.duniter.org';
|
||||
const CACHE_KEY = 'geoflux-peers-v1';
|
||||
const CACHE_TTL_MS = 24 * 60 * 60 * 1000;
|
||||
|
||||
interface PeerCache {
|
||||
urls: string[];
|
||||
fetchedAt: number;
|
||||
}
|
||||
|
||||
function normalizeSquidUrl(raw: string): string {
|
||||
const url = raw.replace(/\/$/, '');
|
||||
return url.endsWith('/v1/graphql') ? url : `${url}/v1/graphql`;
|
||||
}
|
||||
|
||||
export async function discoverSquidNodes(): Promise<string[]> {
|
||||
try {
|
||||
const cached = localStorage.getItem(CACHE_KEY);
|
||||
if (cached) {
|
||||
const parsed: PeerCache = JSON.parse(cached);
|
||||
if (Date.now() - parsed.fetchedAt < CACHE_TTL_MS) return parsed.urls;
|
||||
}
|
||||
} catch { /* ignore */ }
|
||||
|
||||
const controller = new AbortController();
|
||||
const timer = setTimeout(() => controller.abort(), 8_000);
|
||||
try {
|
||||
const res = await fetch(DUNITER_RPC, {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ jsonrpc: '2.0', method: 'duniter_peerings', params: [], id: 1 }),
|
||||
signal: controller.signal,
|
||||
});
|
||||
const data = await res.json();
|
||||
const peers: { peer_id: string; endpoints: { protocol: string; address: string }[] }[] =
|
||||
data?.result?.peerings ?? [];
|
||||
|
||||
const seen = new Set<string>();
|
||||
const urls: string[] = [];
|
||||
for (const peer of peers) {
|
||||
for (const ep of peer.endpoints ?? []) {
|
||||
if (ep.protocol === 'squid' && ep.address) {
|
||||
const normalized = normalizeSquidUrl(ep.address);
|
||||
if (!seen.has(normalized)) {
|
||||
seen.add(normalized);
|
||||
urls.push(normalized);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
localStorage.setItem(CACHE_KEY, JSON.stringify({ urls, fetchedAt: Date.now() }));
|
||||
return urls;
|
||||
} catch {
|
||||
return [];
|
||||
} finally {
|
||||
clearTimeout(timer);
|
||||
}
|
||||
}
|
||||
|
||||
export function clearPeerCache(): void {
|
||||
localStorage.removeItem(CACHE_KEY);
|
||||
}
|
||||
Reference in New Issue
Block a user