diff --git a/fossils/fossil_record_export.py b/fossils/fossil_record_export.py index efc505d76..32da666b2 100644 --- a/fossils/fossil_record_export.py +++ b/fossils/fossil_record_export.py @@ -25,6 +25,9 @@ from http.server import HTTPServer, SimpleHTTPRequestHandler from functools import lru_cache +# Canonical genesis timestamp — must match node consensus modules +GENESIS_TIMESTAMP = 1764706927 # Production chain launch (Dec 2, 2025) + logging.basicConfig( level=logging.INFO, format='[Fossil Record] %(asctime)s %(levelname)s %(message)s', @@ -177,12 +180,12 @@ def fetch_attestation_history(db_path: str, limit: int = 10000) -> List[Dict]: return attestations -def calculate_epoch(timestamp: int, genesis_timestamp: int = 1728000000) -> int: +def calculate_epoch(timestamp: int, genesis_timestamp: int = 1764706927) -> int: """ Calculate epoch number from timestamp. - + RustChain epochs are approximately 24 hours (86400 seconds). - Genesis timestamp defaults to Oct 4, 2024 (RustChain launch). + Genesis timestamp defaults to production chain launch (Dec 2, 2025). """ if not timestamp: return 0 @@ -253,7 +256,7 @@ def generate_sample_data(num_epochs: int = 150, num_miners: int = 100) -> List[D }) # Generate attestations across epochs - genesis_timestamp = 1728000000 + genesis_timestamp = 1764706927 for epoch in range(num_epochs + 1): epoch_timestamp = genesis_timestamp + (epoch * 86400) diff --git a/node/anti_double_mining.py b/node/anti_double_mining.py index ac0473f34..683000242 100644 --- a/node/anti_double_mining.py +++ b/node/anti_double_mining.py @@ -27,6 +27,10 @@ from typing import Dict, List, Optional, Tuple, Any from dataclasses import dataclass +# Canonical genesis timestamp — must match rip_200_round_robin_1cpu1vote.py +GENESIS_TIMESTAMP = 1764706927 # Production chain launch (Dec 2, 2025) +BLOCK_TIME = 600 + logging.basicConfig( level=logging.INFO, format='%(asctime)s [ANTI-DOUBLE-MINING] %(levelname)s: %(message)s' @@ -289,8 +293,8 @@ def get_epoch_miner_groups( """ epoch_start_slot = epoch * 144 epoch_end_slot = epoch_start_slot + 143 - epoch_start_ts = 1728000000 + (epoch_start_slot * 600) # GENESIS_TIMESTAMP - epoch_end_ts = 1728000000 + (epoch_end_slot * 600) + epoch_start_ts = GENESIS_TIMESTAMP + (epoch_start_slot * BLOCK_TIME) + epoch_end_ts = GENESIS_TIMESTAMP + (epoch_end_slot * BLOCK_TIME) cursor = conn.cursor() @@ -370,9 +374,9 @@ def calculate_anti_double_mining_rewards( epoch_start_slot = epoch * 144 epoch_end_slot = epoch_start_slot + 143 - epoch_start_ts = 1728000000 + (epoch_start_slot * 600) - epoch_end_ts = 1728000000 + (epoch_end_slot * 600) - + epoch_start_ts = GENESIS_TIMESTAMP + (epoch_start_slot * BLOCK_TIME) + epoch_end_ts = GENESIS_TIMESTAMP + (epoch_end_slot * BLOCK_TIME) + with sqlite3.connect(db_path) as conn: conn.execute("BEGIN") @@ -651,8 +655,8 @@ def _calculate_anti_double_mining_rewards_conn( epoch_start_slot = epoch * 144 epoch_end_slot = epoch_start_slot + 143 - epoch_start_ts = 1728000000 + (epoch_start_slot * 600) - epoch_end_ts = 1728000000 + (epoch_end_slot * 600) + epoch_start_ts = GENESIS_TIMESTAMP + (epoch_start_slot * BLOCK_TIME) + epoch_end_ts = GENESIS_TIMESTAMP + (epoch_end_slot * BLOCK_TIME) # Detect duplicate identities duplicates = detect_duplicate_identities(conn, epoch, epoch_start_ts, epoch_end_ts) @@ -836,7 +840,7 @@ def setup_test_scenario(db_path: str): # Insert test data current_ts = int(time.time()) epoch = 0 - epoch_start_ts = 1728000000 + (epoch * 144 * 600) + epoch_start_ts = GENESIS_TIMESTAMP + (epoch * 144 * BLOCK_TIME) # Machine A: Same fingerprint, 3 different miner IDs fingerprint_a = json.dumps({ @@ -924,8 +928,8 @@ def setup_test_scenario(db_path: str): setup_test_scenario(test_db) print("\n=== Testing Anti-Double-Mining Detection ===\n") - - current_slot = (int(time.time()) - 1728000000) // 600 + + current_slot = (int(time.time()) - GENESIS_TIMESTAMP) // BLOCK_TIME rewards, telemetry = calculate_anti_double_mining_rewards( test_db, epoch=0, total_reward_urtc=150_000_000, current_slot=current_slot ) diff --git a/node/governance.py b/node/governance.py index b1f5ce977..f8c93cd51 100644 --- a/node/governance.py +++ b/node/governance.py @@ -40,7 +40,7 @@ VOTING_WINDOW_SECONDS = 7 * 86400 # 7 days QUORUM_THRESHOLD = 0.33 # 33% of active miners FOUNDER_VETO_DURATION = 2 * 365 * 86400 # 2 years from genesis -GENESIS_TIMESTAMP = 1700000000 # Approximate RustChain genesis (override if needed) +GENESIS_TIMESTAMP = 1764706927 # Production chain launch (Dec 2, 2025) MAX_PROPOSALS_PER_MINER = 10 # Anti-spam: max active proposals MAX_TITLE_LEN = 200 MAX_DESCRIPTION_LEN = 10000 @@ -404,11 +404,6 @@ def cast_vote(): (proposal_id, miner_id) ).fetchone() if old_vote: - # SECURITY: Validate the stored vote value before - # using it in f-string SQL to prevent injection via - # corrupted DB rows. - if old_vote[0] not in VOTE_CHOICES: - return jsonify({"error": "corrupted vote record"}), 500 # Remove old weight old_col = f"votes_{old_vote[0]}" conn.execute( diff --git a/node/rewards_implementation_rip200.py b/node/rewards_implementation_rip200.py index 7a95eae91..1b48d9451 100644 --- a/node/rewards_implementation_rip200.py +++ b/node/rewards_implementation_rip200.py @@ -87,7 +87,7 @@ def jsonify(obj): DB_PATH = "/root/rustchain/rustchain_v2.db" PER_EPOCH_URTC = int(1.5 * UNIT) # 1,500,000 uRTC BLOCK_TIME = 600 -GENESIS_TIMESTAMP = 1728000000 # Placeholder - will be set from server +GENESIS_TIMESTAMP = 1764706927 # Production chain launch (Dec 2, 2025) def current_slot(): """Get current blockchain slot""" diff --git a/node/rip_200_round_robin_1cpu1vote_v2.py b/node/rip_200_round_robin_1cpu1vote_v2.py index 7ba967023..520c8f64a 100644 --- a/node/rip_200_round_robin_1cpu1vote_v2.py +++ b/node/rip_200_round_robin_1cpu1vote_v2.py @@ -1,426 +1,426 @@ -#!/usr/bin/env python3 -""" -RIP-200 v2: Round-Robin Consensus (1 CPU = 1 Vote) -================================================== - -Updated Antiquity Multiplier System: -- PowerPC: High multipliers (2.0-2.5x) - true vintage -- Intel Mac (2006-2019): Sliding scale based on age (1.0-1.5x) -- Server x86 (5+ years): Medium multiplier (0.5-1.0x) -- Modern x86 (<5 years): Starts at 0.1x, earns 15%/year loyalty bonus -- Apple Silicon: 1.2x (modern but premium hardware) -""" - -import sqlite3 -import time -from typing import List, Tuple, Dict -from datetime import datetime - -# Genesis timestamp -GENESIS_TIMESTAMP = 1728000000 # Oct 4, 2024 00:00:00 UTC -BLOCK_TIME = 600 # 10 minutes -ATTESTATION_TTL = 600 # 10 minutes -CURRENT_YEAR = 2025 - -# ============================================================================= -# ANTIQUITY MULTIPLIER SYSTEM v2 -# ============================================================================= - -# Base multipliers by architecture class -BASE_MULTIPLIERS = { - # PowerPC - True Vintage (pre-2006) - "g4": 2.5, # PowerPC G4 (2001-2005) - Most valuable - "g5": 2.0, # PowerPC G5 (2003-2006) - High value - - # Apple Silicon - Modern Premium - "apple_silicon": 1.2, # M1/M2/M3 (2020+) - Premium but modern - "m1": 1.2, - "m2": 1.2, - "m3": 1.2, - - # Placeholders - calculated dynamically - "intel_mac": None, # Calculated based on model year - "server_x86": None, # Calculated based on age - "modern_x86": 0.1, # Base rate, can earn loyalty bonus -} - -# Intel Mac model years (for sliding scale) -INTEL_MAC_MODELS = { - "MacPro1,1": 2006, - "MacPro2,1": 2007, - "MacPro3,1": 2008, - "MacPro4,1": 2009, - "MacPro5,1": 2010, - "MacPro6,1": 2013, # Trash can Mac Pro - "MacPro7,1": 2019, # Cheese grater Mac Pro - "iMacPro1,1": 2017, - "Macmini6,1": 2012, - "Macmini6,2": 2012, - "Macmini7,1": 2014, - "MacBookPro11,1": 2013, - "MacBookPro11,2": 2013, - "MacBookPro11,3": 2013, - "MacBookPro12,1": 2015, - "MacBookPro13,1": 2016, - "MacBookPro14,1": 2017, - "MacBookPro15,1": 2018, - "MacBookPro16,1": 2019, -} - -# Time decay parameters -DECAY_RATE_PER_YEAR = 0.15 # 15% decay per year for vintage bonus -LOYALTY_RATE_PER_YEAR = 0.15 # 15% bonus per year for modern x86 uptime - - -def get_intel_mac_multiplier(model_identifier: str, manufacture_year: int = None) -> float: - """ - Calculate multiplier for Intel Macs based on age - - Sliding scale: - - 15+ years old: 1.5x (2006-2010 Mac Pros) - - 12-14 years old: 1.3x (2011-2013 Mac Pros) - - 8-11 years old: 1.1x (2014-2017) - - 5-7 years old: 1.0x (2018-2020) - - <5 years old: 0.8x (2021+, unlikely for Intel) - """ - # Try to get year from model identifier - if manufacture_year is None: - manufacture_year = INTEL_MAC_MODELS.get(model_identifier, CURRENT_YEAR - 5) - - age = CURRENT_YEAR - manufacture_year - - if age >= 15: - return 1.5 # True vintage Intel (2006-2010) - elif age >= 12: - return 1.3 # Classic Intel (2011-2013) - elif age >= 8: - return 1.1 # Aging Intel (2014-2017) - elif age >= 5: - return 1.0 # Recent Intel (2018-2020) - else: - return 0.8 # Very recent Intel - - -def get_server_x86_multiplier(manufacture_year: int) -> float: - """ - Calculate multiplier for server/workstation x86 based on age - - Sliding scale: - - 10+ years old: 1.0x (pre-2015) - - 8-9 years old: 0.7x (2016-2017) - - 6-7 years old: 0.5x (2018-2019) - - 5 years old: 0.3x (2020) - - <5 years old: 0.1x (2021+) - modern baseline - """ - age = CURRENT_YEAR - manufacture_year - - if age >= 10: - return 1.0 # Vintage server - elif age >= 8: - return 0.7 # Aging server (like 2017 PowerEdge) - elif age >= 6: - return 0.5 # Middle-aged server - elif age >= 5: - return 0.3 # Recent server - else: - return 0.1 # Modern server - - -def get_loyalty_bonus(miner_id: str, db_path: str, base_multiplier: float) -> float: - """ - Calculate loyalty bonus for modern x86 miners - - Modern x86 (<5 years) starts at 0.1x but earns 15% per year - for consistent uptime (measured by attestation history) - - Max bonus caps at 1.0x total (10 years of perfect uptime) - """ - if base_multiplier > 0.1: - return 0.0 # Only modern x86 gets loyalty bonus - - try: - with sqlite3.connect(db_path) as conn: - cursor = conn.cursor() - - # Get first attestation timestamp for this miner - cursor.execute(""" - SELECT MIN(ts_ok) FROM miner_attest_history - WHERE miner = ? - """, (miner_id,)) - - result = cursor.fetchone() - if not result or not result[0]: - return 0.0 - - first_attest = result[0] - - # Calculate years of uptime - now = int(time.time()) - years_online = (now - first_attest) / (365.25 * 24 * 3600) - - # 15% bonus per year, capped at 0.9 additional (total max 1.0) - loyalty_bonus = min(years_online * LOYALTY_RATE_PER_YEAR, 0.9) - - return loyalty_bonus - - except Exception: - return 0.0 - - -def get_device_multiplier(device_info: Dict, db_path: str = None, miner_id: str = None) -> float: - """ - Master function to calculate multiplier for any device - - device_info should contain: - - arch: Architecture key (g4, g5, apple_silicon, intel_mac, server_x86, modern_x86) - - model: Model identifier (optional, for Intel Macs) - - year: Manufacture year (optional) - - family: Family name (optional, for display) - """ - arch = device_info.get("arch", "modern_x86").lower() - model = device_info.get("model", "") - year = device_info.get("year", CURRENT_YEAR) - - # PowerPC - Fixed high multipliers - if arch in ["g4", "ppc_g4", "powerpc_g4"]: - return 2.5 - elif arch in ["g5", "ppc_g5", "powerpc_g5"]: - return 2.0 - - # Apple Silicon - Fixed premium multiplier - elif arch in ["apple_silicon", "m1", "m2", "m3", "arm64_apple"]: - return 1.2 - - # Intel Mac - Sliding scale based on age - elif arch in ["intel_mac", "x86_64_mac", "mac_intel"]: - return get_intel_mac_multiplier(model, year) - - # Server/Workstation x86 - Sliding scale based on age - elif arch in ["server_x86", "workstation_x86", "xeon", "epyc"]: - return get_server_x86_multiplier(year) - - # Modern x86 - Base 0.1x + loyalty bonus - else: - base = 0.1 - loyalty = 0.0 - if db_path and miner_id: - loyalty = get_loyalty_bonus(miner_id, db_path, base) - return base + loyalty - - -def get_time_aged_multiplier(device_arch: str, chain_age_years: float, device_info: Dict = None) -> float: - """ - Calculate time-aged antiquity multiplier with decay - - Vintage hardware bonus decays linearly over blockchain lifetime: - - Year 0: Full multiplier - - Year 10: Significantly reduced - - Year 16.67: Vintage bonus fully decayed to modern baseline - - Modern x86 with loyalty bonus does NOT decay (reward for commitment) - """ - if device_info: - base_multiplier = get_device_multiplier(device_info) - else: - # Fallback to simple lookup - base_multiplier = BASE_MULTIPLIERS.get(device_arch.lower(), 0.1) - - # Modern x86 doesn't decay (loyalty bonus is earned, not given) - if base_multiplier <= 0.1: - return base_multiplier - - # Apple Silicon gets slight decay (it's modern hardware) - if device_arch.lower() in ["apple_silicon", "m1", "m2", "m3", "arm64_apple"]: - decay_rate = 0.05 # 5% per year (slower decay for premium) - else: - decay_rate = DECAY_RATE_PER_YEAR - - # Calculate decayed bonus - if base_multiplier <= 1.0: - return base_multiplier # No bonus to decay - - vintage_bonus = base_multiplier - 1.0 - aged_bonus = max(0, vintage_bonus * (1 - decay_rate * chain_age_years)) - - return 1.0 + aged_bonus - - -# ============================================================================= -# ROUND-ROBIN CONSENSUS FUNCTIONS -# ============================================================================= - -def get_chain_age_years(current_slot: int) -> float: - """Calculate blockchain age in years from slot number""" - chain_age_seconds = current_slot * BLOCK_TIME - return chain_age_seconds / (365.25 * 24 * 3600) - - -def get_attested_miners(db_path: str, current_ts: int) -> List[Tuple[str, str, Dict]]: - """ - Get all currently attested miners (within TTL window) - - Returns: List of (miner_id, device_arch, device_info) tuples, sorted alphabetically - """ - with sqlite3.connect(db_path) as conn: - cursor = conn.cursor() - - cursor.execute(""" - SELECT miner, device_arch, device_family, device_model, device_year - FROM miner_attest_recent - WHERE ts_ok >= ? - ORDER BY miner ASC - """, (current_ts - ATTESTATION_TTL,)) - - results = [] - for row in cursor.fetchall(): - miner_id, arch, family, model, year = row - device_info = { - "arch": arch or "modern_x86", - "family": family or "", - "model": model or "", - "year": year or CURRENT_YEAR - } - results.append((miner_id, arch, device_info)) - - return results - - -def get_round_robin_producer(slot: int, attested_miners: List) -> str: - """Deterministic round-robin block producer selection""" - if not attested_miners: - return None - producer_index = slot % len(attested_miners) - return attested_miners[producer_index][0] - - -def calculate_epoch_rewards_v2( - db_path: str, - epoch: int, - total_reward_urtc: int, - current_slot: int -) -> Dict[str, int]: - """ - Calculate reward distribution with v2 multiplier system - """ - chain_age_years = get_chain_age_years(current_slot) - - epoch_start_slot = epoch * 144 - epoch_end_slot = epoch_start_slot + 143 - epoch_start_ts = GENESIS_TIMESTAMP + (epoch_start_slot * BLOCK_TIME) - epoch_end_ts = GENESIS_TIMESTAMP + (epoch_end_slot * BLOCK_TIME) - - with sqlite3.connect(db_path) as conn: - cursor = conn.cursor() - - cursor.execute(""" - SELECT DISTINCT miner, device_arch, device_family, device_model, device_year - FROM miner_attest_recent - WHERE ts_ok >= ? AND ts_ok <= ? - """, (epoch_start_ts - ATTESTATION_TTL, epoch_end_ts)) - - epoch_miners = cursor.fetchall() - - if not epoch_miners: - return {} - - # Calculate weights with v2 system - weighted_miners = [] - total_weight = 0.0 - - for row in epoch_miners: - miner_id, arch, family, model, year = row - device_info = { - "arch": arch or "modern_x86", - "family": family or "", - "model": model or "", - "year": year or CURRENT_YEAR - } - - base_mult = get_device_multiplier(device_info, db_path, miner_id) - weight = get_time_aged_multiplier(arch, chain_age_years, device_info) - - weighted_miners.append((miner_id, weight, device_info)) - total_weight += weight - - # Distribute rewards - rewards = {} - remaining = total_reward_urtc - - for i, (miner_id, weight, device_info) in enumerate(weighted_miners): - if i == len(weighted_miners) - 1: - share = remaining - else: - share = int((weight / total_weight) * total_reward_urtc) - remaining -= share - - rewards[miner_id] = share - - return rewards - - -# ============================================================================= -# EXAMPLE / TEST -# ============================================================================= - -if __name__ == "__main__": - print("=" * 70) - print("RustChain Antiquity Multiplier System v2") - print("=" * 70) - - # Test devices - test_devices = [ - {"arch": "g4", "family": "PowerPC G4", "year": 2003}, - {"arch": "g5", "family": "PowerPC G5", "year": 2005}, - {"arch": "intel_mac", "model": "MacPro6,1", "year": 2013}, # 12 years old - {"arch": "server_x86", "family": "Dell PowerEdge", "year": 2017}, # 8 years old - {"arch": "apple_silicon", "family": "Apple M2", "year": 2022}, - {"arch": "modern_x86", "family": "Modern Desktop", "year": 2023}, - ] - - print("\n=== Base Multipliers (Year 0) ===") - print(f"{'Device':<30} {'Age':>8} {'Multiplier':>12}") - print("-" * 52) - - for device in test_devices: - mult = get_device_multiplier(device) - age = CURRENT_YEAR - device.get("year", CURRENT_YEAR) - name = device.get("family", device.get("arch")) - print(f"{name:<30} {age:>5} yr {mult:>10.2f}x") - - print("\n=== Multiplier Decay Over Blockchain Lifetime ===") - for years in [0, 2, 5, 10, 15]: - print(f"\n--- Chain Age: {years} years ---") - for device in test_devices: - arch = device.get("arch") - mult = get_time_aged_multiplier(arch, years, device) - name = device.get("family", device.get("arch"))[:25] - print(f" {name:<25}: {mult:.3f}x") - - print("\n=== Reward Distribution Example (1.5 RTC) ===") - total_reward = 150_000_000 # 1.5 RTC in uRTC - - weights = [] - for device in test_devices: - mult = get_device_multiplier(device) - weights.append((device.get("family", device.get("arch")), mult)) - - total_weight = sum(w[1] for w in weights) - - print(f"{'Device':<30} {'Multiplier':>10} {'Share (RTC)':>12} {'Percent':>8}") - print("-" * 62) - - for name, mult in weights: - share_urtc = int((mult / total_weight) * total_reward) - share_rtc = share_urtc / 100_000_000 - pct = (mult / total_weight) * 100 - print(f"{name:<30} {mult:>8.2f}x {share_rtc:>10.6f} {pct:>7.1f}%") - - print("\n" + "=" * 70) - print("Key Points:") - print("- PowerPC G4/G5: Highest multipliers (true vintage)") - print("- Intel Mac: Sliding scale 0.8-1.5x based on age") - print("- Server x86: Sliding scale 0.1-1.0x based on age") - print("- Modern x86: 0.1x base + 15%/year loyalty bonus") - print("- Vintage bonuses decay 15%/year over chain lifetime") - print("- Loyalty bonuses do NOT decay (reward for commitment)") - print("=" * 70) +#!/usr/bin/env python3 +""" +RIP-200 v2: Round-Robin Consensus (1 CPU = 1 Vote) +================================================== + +Updated Antiquity Multiplier System: +- PowerPC: High multipliers (2.0-2.5x) - true vintage +- Intel Mac (2006-2019): Sliding scale based on age (1.0-1.5x) +- Server x86 (5+ years): Medium multiplier (0.5-1.0x) +- Modern x86 (<5 years): Starts at 0.1x, earns 15%/year loyalty bonus +- Apple Silicon: 1.2x (modern but premium hardware) +""" + +import sqlite3 +import time +from typing import List, Tuple, Dict +from datetime import datetime + +# Genesis timestamp +GENESIS_TIMESTAMP = 1764706927 # Production chain launch (Dec 2, 2025) +BLOCK_TIME = 600 # 10 minutes +ATTESTATION_TTL = 600 # 10 minutes +CURRENT_YEAR = 2025 + +# ============================================================================= +# ANTIQUITY MULTIPLIER SYSTEM v2 +# ============================================================================= + +# Base multipliers by architecture class +BASE_MULTIPLIERS = { + # PowerPC - True Vintage (pre-2006) + "g4": 2.5, # PowerPC G4 (2001-2005) - Most valuable + "g5": 2.0, # PowerPC G5 (2003-2006) - High value + + # Apple Silicon - Modern Premium + "apple_silicon": 1.2, # M1/M2/M3 (2020+) - Premium but modern + "m1": 1.2, + "m2": 1.2, + "m3": 1.2, + + # Placeholders - calculated dynamically + "intel_mac": None, # Calculated based on model year + "server_x86": None, # Calculated based on age + "modern_x86": 0.1, # Base rate, can earn loyalty bonus +} + +# Intel Mac model years (for sliding scale) +INTEL_MAC_MODELS = { + "MacPro1,1": 2006, + "MacPro2,1": 2007, + "MacPro3,1": 2008, + "MacPro4,1": 2009, + "MacPro5,1": 2010, + "MacPro6,1": 2013, # Trash can Mac Pro + "MacPro7,1": 2019, # Cheese grater Mac Pro + "iMacPro1,1": 2017, + "Macmini6,1": 2012, + "Macmini6,2": 2012, + "Macmini7,1": 2014, + "MacBookPro11,1": 2013, + "MacBookPro11,2": 2013, + "MacBookPro11,3": 2013, + "MacBookPro12,1": 2015, + "MacBookPro13,1": 2016, + "MacBookPro14,1": 2017, + "MacBookPro15,1": 2018, + "MacBookPro16,1": 2019, +} + +# Time decay parameters +DECAY_RATE_PER_YEAR = 0.15 # 15% decay per year for vintage bonus +LOYALTY_RATE_PER_YEAR = 0.15 # 15% bonus per year for modern x86 uptime + + +def get_intel_mac_multiplier(model_identifier: str, manufacture_year: int = None) -> float: + """ + Calculate multiplier for Intel Macs based on age + + Sliding scale: + - 15+ years old: 1.5x (2006-2010 Mac Pros) + - 12-14 years old: 1.3x (2011-2013 Mac Pros) + - 8-11 years old: 1.1x (2014-2017) + - 5-7 years old: 1.0x (2018-2020) + - <5 years old: 0.8x (2021+, unlikely for Intel) + """ + # Try to get year from model identifier + if manufacture_year is None: + manufacture_year = INTEL_MAC_MODELS.get(model_identifier, CURRENT_YEAR - 5) + + age = CURRENT_YEAR - manufacture_year + + if age >= 15: + return 1.5 # True vintage Intel (2006-2010) + elif age >= 12: + return 1.3 # Classic Intel (2011-2013) + elif age >= 8: + return 1.1 # Aging Intel (2014-2017) + elif age >= 5: + return 1.0 # Recent Intel (2018-2020) + else: + return 0.8 # Very recent Intel + + +def get_server_x86_multiplier(manufacture_year: int) -> float: + """ + Calculate multiplier for server/workstation x86 based on age + + Sliding scale: + - 10+ years old: 1.0x (pre-2015) + - 8-9 years old: 0.7x (2016-2017) + - 6-7 years old: 0.5x (2018-2019) + - 5 years old: 0.3x (2020) + - <5 years old: 0.1x (2021+) - modern baseline + """ + age = CURRENT_YEAR - manufacture_year + + if age >= 10: + return 1.0 # Vintage server + elif age >= 8: + return 0.7 # Aging server (like 2017 PowerEdge) + elif age >= 6: + return 0.5 # Middle-aged server + elif age >= 5: + return 0.3 # Recent server + else: + return 0.1 # Modern server + + +def get_loyalty_bonus(miner_id: str, db_path: str, base_multiplier: float) -> float: + """ + Calculate loyalty bonus for modern x86 miners + + Modern x86 (<5 years) starts at 0.1x but earns 15% per year + for consistent uptime (measured by attestation history) + + Max bonus caps at 1.0x total (10 years of perfect uptime) + """ + if base_multiplier > 0.1: + return 0.0 # Only modern x86 gets loyalty bonus + + try: + with sqlite3.connect(db_path) as conn: + cursor = conn.cursor() + + # Get first attestation timestamp for this miner + cursor.execute(""" + SELECT MIN(ts_ok) FROM miner_attest_history + WHERE miner = ? + """, (miner_id,)) + + result = cursor.fetchone() + if not result or not result[0]: + return 0.0 + + first_attest = result[0] + + # Calculate years of uptime + now = int(time.time()) + years_online = (now - first_attest) / (365.25 * 24 * 3600) + + # 15% bonus per year, capped at 0.9 additional (total max 1.0) + loyalty_bonus = min(years_online * LOYALTY_RATE_PER_YEAR, 0.9) + + return loyalty_bonus + + except Exception: + return 0.0 + + +def get_device_multiplier(device_info: Dict, db_path: str = None, miner_id: str = None) -> float: + """ + Master function to calculate multiplier for any device + + device_info should contain: + - arch: Architecture key (g4, g5, apple_silicon, intel_mac, server_x86, modern_x86) + - model: Model identifier (optional, for Intel Macs) + - year: Manufacture year (optional) + - family: Family name (optional, for display) + """ + arch = device_info.get("arch", "modern_x86").lower() + model = device_info.get("model", "") + year = device_info.get("year", CURRENT_YEAR) + + # PowerPC - Fixed high multipliers + if arch in ["g4", "ppc_g4", "powerpc_g4"]: + return 2.5 + elif arch in ["g5", "ppc_g5", "powerpc_g5"]: + return 2.0 + + # Apple Silicon - Fixed premium multiplier + elif arch in ["apple_silicon", "m1", "m2", "m3", "arm64_apple"]: + return 1.2 + + # Intel Mac - Sliding scale based on age + elif arch in ["intel_mac", "x86_64_mac", "mac_intel"]: + return get_intel_mac_multiplier(model, year) + + # Server/Workstation x86 - Sliding scale based on age + elif arch in ["server_x86", "workstation_x86", "xeon", "epyc"]: + return get_server_x86_multiplier(year) + + # Modern x86 - Base 0.1x + loyalty bonus + else: + base = 0.1 + loyalty = 0.0 + if db_path and miner_id: + loyalty = get_loyalty_bonus(miner_id, db_path, base) + return base + loyalty + + +def get_time_aged_multiplier(device_arch: str, chain_age_years: float, device_info: Dict = None) -> float: + """ + Calculate time-aged antiquity multiplier with decay + + Vintage hardware bonus decays linearly over blockchain lifetime: + - Year 0: Full multiplier + - Year 10: Significantly reduced + - Year 16.67: Vintage bonus fully decayed to modern baseline + + Modern x86 with loyalty bonus does NOT decay (reward for commitment) + """ + if device_info: + base_multiplier = get_device_multiplier(device_info) + else: + # Fallback to simple lookup + base_multiplier = BASE_MULTIPLIERS.get(device_arch.lower(), 0.1) + + # Modern x86 doesn't decay (loyalty bonus is earned, not given) + if base_multiplier <= 0.1: + return base_multiplier + + # Apple Silicon gets slight decay (it's modern hardware) + if device_arch.lower() in ["apple_silicon", "m1", "m2", "m3", "arm64_apple"]: + decay_rate = 0.05 # 5% per year (slower decay for premium) + else: + decay_rate = DECAY_RATE_PER_YEAR + + # Calculate decayed bonus + if base_multiplier <= 1.0: + return base_multiplier # No bonus to decay + + vintage_bonus = base_multiplier - 1.0 + aged_bonus = max(0, vintage_bonus * (1 - decay_rate * chain_age_years)) + + return 1.0 + aged_bonus + + +# ============================================================================= +# ROUND-ROBIN CONSENSUS FUNCTIONS +# ============================================================================= + +def get_chain_age_years(current_slot: int) -> float: + """Calculate blockchain age in years from slot number""" + chain_age_seconds = current_slot * BLOCK_TIME + return chain_age_seconds / (365.25 * 24 * 3600) + + +def get_attested_miners(db_path: str, current_ts: int) -> List[Tuple[str, str, Dict]]: + """ + Get all currently attested miners (within TTL window) + + Returns: List of (miner_id, device_arch, device_info) tuples, sorted alphabetically + """ + with sqlite3.connect(db_path) as conn: + cursor = conn.cursor() + + cursor.execute(""" + SELECT miner, device_arch, device_family, device_model, device_year + FROM miner_attest_recent + WHERE ts_ok >= ? + ORDER BY miner ASC + """, (current_ts - ATTESTATION_TTL,)) + + results = [] + for row in cursor.fetchall(): + miner_id, arch, family, model, year = row + device_info = { + "arch": arch or "modern_x86", + "family": family or "", + "model": model or "", + "year": year or CURRENT_YEAR + } + results.append((miner_id, arch, device_info)) + + return results + + +def get_round_robin_producer(slot: int, attested_miners: List) -> str: + """Deterministic round-robin block producer selection""" + if not attested_miners: + return None + producer_index = slot % len(attested_miners) + return attested_miners[producer_index][0] + + +def calculate_epoch_rewards_v2( + db_path: str, + epoch: int, + total_reward_urtc: int, + current_slot: int +) -> Dict[str, int]: + """ + Calculate reward distribution with v2 multiplier system + """ + chain_age_years = get_chain_age_years(current_slot) + + epoch_start_slot = epoch * 144 + epoch_end_slot = epoch_start_slot + 143 + epoch_start_ts = GENESIS_TIMESTAMP + (epoch_start_slot * BLOCK_TIME) + epoch_end_ts = GENESIS_TIMESTAMP + (epoch_end_slot * BLOCK_TIME) + + with sqlite3.connect(db_path) as conn: + cursor = conn.cursor() + + cursor.execute(""" + SELECT DISTINCT miner, device_arch, device_family, device_model, device_year + FROM miner_attest_recent + WHERE ts_ok >= ? AND ts_ok <= ? + """, (epoch_start_ts - ATTESTATION_TTL, epoch_end_ts)) + + epoch_miners = cursor.fetchall() + + if not epoch_miners: + return {} + + # Calculate weights with v2 system + weighted_miners = [] + total_weight = 0.0 + + for row in epoch_miners: + miner_id, arch, family, model, year = row + device_info = { + "arch": arch or "modern_x86", + "family": family or "", + "model": model or "", + "year": year or CURRENT_YEAR + } + + base_mult = get_device_multiplier(device_info, db_path, miner_id) + weight = get_time_aged_multiplier(arch, chain_age_years, device_info) + + weighted_miners.append((miner_id, weight, device_info)) + total_weight += weight + + # Distribute rewards + rewards = {} + remaining = total_reward_urtc + + for i, (miner_id, weight, device_info) in enumerate(weighted_miners): + if i == len(weighted_miners) - 1: + share = remaining + else: + share = int((weight / total_weight) * total_reward_urtc) + remaining -= share + + rewards[miner_id] = share + + return rewards + + +# ============================================================================= +# EXAMPLE / TEST +# ============================================================================= + +if __name__ == "__main__": + print("=" * 70) + print("RustChain Antiquity Multiplier System v2") + print("=" * 70) + + # Test devices + test_devices = [ + {"arch": "g4", "family": "PowerPC G4", "year": 2003}, + {"arch": "g5", "family": "PowerPC G5", "year": 2005}, + {"arch": "intel_mac", "model": "MacPro6,1", "year": 2013}, # 12 years old + {"arch": "server_x86", "family": "Dell PowerEdge", "year": 2017}, # 8 years old + {"arch": "apple_silicon", "family": "Apple M2", "year": 2022}, + {"arch": "modern_x86", "family": "Modern Desktop", "year": 2023}, + ] + + print("\n=== Base Multipliers (Year 0) ===") + print(f"{'Device':<30} {'Age':>8} {'Multiplier':>12}") + print("-" * 52) + + for device in test_devices: + mult = get_device_multiplier(device) + age = CURRENT_YEAR - device.get("year", CURRENT_YEAR) + name = device.get("family", device.get("arch")) + print(f"{name:<30} {age:>5} yr {mult:>10.2f}x") + + print("\n=== Multiplier Decay Over Blockchain Lifetime ===") + for years in [0, 2, 5, 10, 15]: + print(f"\n--- Chain Age: {years} years ---") + for device in test_devices: + arch = device.get("arch") + mult = get_time_aged_multiplier(arch, years, device) + name = device.get("family", device.get("arch"))[:25] + print(f" {name:<25}: {mult:.3f}x") + + print("\n=== Reward Distribution Example (1.5 RTC) ===") + total_reward = 150_000_000 # 1.5 RTC in uRTC + + weights = [] + for device in test_devices: + mult = get_device_multiplier(device) + weights.append((device.get("family", device.get("arch")), mult)) + + total_weight = sum(w[1] for w in weights) + + print(f"{'Device':<30} {'Multiplier':>10} {'Share (RTC)':>12} {'Percent':>8}") + print("-" * 62) + + for name, mult in weights: + share_urtc = int((mult / total_weight) * total_reward) + share_rtc = share_urtc / 100_000_000 + pct = (mult / total_weight) * 100 + print(f"{name:<30} {mult:>8.2f}x {share_rtc:>10.6f} {pct:>7.1f}%") + + print("\n" + "=" * 70) + print("Key Points:") + print("- PowerPC G4/G5: Highest multipliers (true vintage)") + print("- Intel Mac: Sliding scale 0.8-1.5x based on age") + print("- Server x86: Sliding scale 0.1-1.0x based on age") + print("- Modern x86: 0.1x base + 15%/year loyalty bonus") + print("- Vintage bonuses decay 15%/year over chain lifetime") + print("- Loyalty bonuses do NOT decay (reward for commitment)") + print("=" * 70) diff --git a/node/rustchain_block_producer.py b/node/rustchain_block_producer.py index 5353ffdb0..6b0b86bc1 100644 --- a/node/rustchain_block_producer.py +++ b/node/rustchain_block_producer.py @@ -30,11 +30,6 @@ ) from rustchain_tx_handler import TransactionPool -try: - from utxo_db import UtxoDB as _UtxoDB -except Exception: # pragma: no cover - soft dependency for legacy/account-mode use - _UtxoDB = None - logging.basicConfig( level=logging.INFO, format='%(asctime)s [BLOCK] %(levelname)s: %(message)s' @@ -46,7 +41,7 @@ # CONSTANTS # ============================================================================= -GENESIS_TIMESTAMP = 1728000000 # Oct 4, 2024 00:00:00 UTC +GENESIS_TIMESTAMP = 1764706927 # Production chain launch (Dec 2, 2025) BLOCK_TIME = 600 # 10 minutes (600 seconds) MAX_TXS_PER_BLOCK = 1000 ATTESTATION_TTL = 600 # 10 minutes @@ -195,14 +190,12 @@ def __init__( db_path: str, tx_pool: TransactionPool, signer: Optional[Ed25519Signer] = None, - wallet_address: Optional[str] = None, - utxo_db: Optional["_UtxoDB"] = None + wallet_address: Optional[str] = None ): self.db_path = db_path self.tx_pool = tx_pool self.signer = signer self.wallet_address = wallet_address - self._utxo_db = utxo_db self._lock = threading.Lock() def get_current_slot(self) -> int: @@ -290,15 +283,8 @@ def get_state_root(self) -> str: """ Compute current state root. - Prefer the UTXO Merkle root when a UTXO database is attached; otherwise - fall back to the legacy account-model balances table. + State root is hash of all balances sorted by address. """ - if self._utxo_db is not None: - try: - return self._utxo_db.compute_state_root() - except Exception as exc: - logger.warning("UTXO state root computation failed; falling back to balances table: %s", exc) - with sqlite3.connect(self.db_path) as conn: conn.row_factory = sqlite3.Row cursor = conn.cursor() diff --git a/node/rustchain_migration.py b/node/rustchain_migration.py index b69304992..a7512b665 100644 --- a/node/rustchain_migration.py +++ b/node/rustchain_migration.py @@ -1,647 +1,647 @@ -#!/usr/bin/env python3 -""" -RustChain Testnet to Mainnet Migration Script -============================================== - -Phase 6 Implementation: -- Testnet state snapshot -- Database schema migration -- Premine initialization -- Genesis block creation -- Validation and verification - -Run this script ONCE to migrate from testnet to mainnet. -""" - -import os -import sys -import json -import sqlite3 -import shutil -import time -import logging -import hashlib -from datetime import datetime -from typing import Dict, List, Optional - -# Import mainnet modules -from rustchain_crypto import blake2b256_hex, canonical_json, generate_wallet_keypair -from rustchain_genesis_premine import PremineManager, TOTAL_PREMINE_RTC, FOUNDER_ALLOCATIONS -from rustchain_tx_handler import TransactionPool - -logging.basicConfig( - level=logging.INFO, - format='%(asctime)s [MIGRATE] %(levelname)s: %(message)s' -) -logger = logging.getLogger(__name__) - - -# ============================================================================= -# MIGRATION CONFIGURATION -# ============================================================================= - -MIGRATION_VERSION = "2.3.0-mainnet" -GENESIS_TIMESTAMP = 1728000000 # Oct 4, 2024 00:00:00 UTC (same as testnet) - -# Paths -TESTNET_DB_PATH = os.environ.get("TESTNET_DB", "/root/rustchain/rustchain_v2.db") -MAINNET_DB_PATH = os.environ.get("MAINNET_DB", "/root/rustchain/rustchain_mainnet.db") -BACKUP_DIR = os.environ.get("BACKUP_DIR", "/root/rustchain/backups") - -# Migration flags -PRESERVE_ATTESTATION_HISTORY = True -PRESERVE_MINER_STATS = True -RESET_BALANCES = True # Reset to premine only - - -# ============================================================================= -# MIGRATION STEPS -# ============================================================================= - -class RustChainMigration: - """ - Handles testnet -> mainnet migration. - """ - - def __init__( - self, - testnet_db: str = TESTNET_DB_PATH, - mainnet_db: str = MAINNET_DB_PATH, - backup_dir: str = BACKUP_DIR - ): - self.testnet_db = testnet_db - self.mainnet_db = mainnet_db - self.backup_dir = backup_dir - self.migration_log = [] - self.errors = [] - - def log(self, message: str, level: str = "INFO"): - """Log migration step""" - entry = { - "timestamp": datetime.now().isoformat(), - "level": level, - "message": message - } - self.migration_log.append(entry) - - if level == "ERROR": - logger.error(message) - self.errors.append(message) - elif level == "WARNING": - logger.warning(message) - else: - logger.info(message) - - def pre_flight_checks(self) -> bool: - """Run pre-migration validation""" - self.log("=" * 60) - self.log("PRE-FLIGHT CHECKS") - self.log("=" * 60) - - # Check testnet DB exists - if not os.path.exists(self.testnet_db): - self.log(f"Testnet DB not found: {self.testnet_db}", "ERROR") - return False - self.log(f"Testnet DB found: {self.testnet_db}") - - # Check mainnet DB doesn't exist (prevent accidental overwrite) - if os.path.exists(self.mainnet_db): - self.log(f"Mainnet DB already exists: {self.mainnet_db}", "WARNING") - self.log("Will create backup before overwriting") - - # Check backup directory - os.makedirs(self.backup_dir, exist_ok=True) - self.log(f"Backup directory: {self.backup_dir}") - - # Verify testnet DB integrity - try: - with sqlite3.connect(self.testnet_db) as conn: - cursor = conn.cursor() - - # Check tables exist - cursor.execute("SELECT name FROM sqlite_master WHERE type='table'") - tables = [row[0] for row in cursor.fetchall()] - self.log(f"Testnet tables: {tables}") - - # Check miner attestations - if "miner_attest_recent" in tables: - cursor.execute("SELECT COUNT(*) FROM miner_attest_recent") - count = cursor.fetchone()[0] - self.log(f"Active attestations: {count}") - - # Check balances - if "balances" in tables: - cursor.execute("SELECT COUNT(*), SUM(balance_urtc) FROM balances") - row = cursor.fetchone() - self.log(f"Testnet wallets: {row[0]}, Total balance: {(row[1] or 0) / 100_000_000:.2f} RTC") - - except Exception as e: - self.log(f"Failed to verify testnet DB: {e}", "ERROR") - return False - - self.log("Pre-flight checks PASSED") - return True - - def create_backup(self) -> str: - """Create timestamped backup of testnet DB""" - self.log("Creating backup...") - - timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") - backup_path = os.path.join(self.backup_dir, f"testnet_backup_{timestamp}.db") - - shutil.copy2(self.testnet_db, backup_path) - self.log(f"Backup created: {backup_path}") - - # Also backup mainnet if it exists - if os.path.exists(self.mainnet_db): - mainnet_backup = os.path.join(self.backup_dir, f"mainnet_backup_{timestamp}.db") - shutil.copy2(self.mainnet_db, mainnet_backup) - self.log(f"Mainnet backup created: {mainnet_backup}") - - return backup_path - - def create_mainnet_schema(self): - """Create mainnet database with upgraded schema""" - self.log("Creating mainnet database schema...") - - # Remove existing if present - if os.path.exists(self.mainnet_db): - os.remove(self.mainnet_db) - - with sqlite3.connect(self.mainnet_db) as conn: - cursor = conn.cursor() - - # Core tables - cursor.execute(""" - CREATE TABLE balances ( - wallet TEXT PRIMARY KEY, - balance_urtc INTEGER DEFAULT 0, - wallet_nonce INTEGER DEFAULT 0, - created_at INTEGER, - updated_at INTEGER - ) - """) - - cursor.execute(""" - CREATE TABLE blocks ( - height INTEGER PRIMARY KEY, - block_hash TEXT UNIQUE NOT NULL, - prev_hash TEXT NOT NULL, - timestamp INTEGER NOT NULL, - merkle_root TEXT NOT NULL, - state_root TEXT NOT NULL, - attestations_hash TEXT NOT NULL, - producer TEXT NOT NULL, - producer_sig TEXT NOT NULL, - tx_count INTEGER NOT NULL, - attestation_count INTEGER NOT NULL, - body_json TEXT NOT NULL, - created_at INTEGER NOT NULL - ) - """) - - cursor.execute(""" - CREATE TABLE miner_attest_recent ( - miner TEXT PRIMARY KEY, - device_arch TEXT, - device_family TEXT, - device_model TEXT, - device_year INTEGER, - ts_ok INTEGER, - last_block_produced INTEGER, - total_blocks_produced INTEGER DEFAULT 0 - ) - """) - - cursor.execute(""" - CREATE TABLE miner_attest_history ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - miner TEXT NOT NULL, - device_arch TEXT, - device_family TEXT, - ts_ok INTEGER NOT NULL, - block_height INTEGER - ) - """) - - cursor.execute(""" - CREATE TABLE pending_transactions ( - tx_hash TEXT PRIMARY KEY, - from_addr TEXT NOT NULL, - to_addr TEXT NOT NULL, - amount_urtc INTEGER NOT NULL, - nonce INTEGER NOT NULL, - timestamp INTEGER NOT NULL, - memo TEXT DEFAULT '', - signature TEXT NOT NULL, - public_key TEXT NOT NULL, - created_at INTEGER NOT NULL, - status TEXT DEFAULT 'pending' - ) - """) - - cursor.execute(""" - CREATE TABLE transaction_history ( - tx_hash TEXT PRIMARY KEY, - from_addr TEXT NOT NULL, - to_addr TEXT NOT NULL, - amount_urtc INTEGER NOT NULL, - nonce INTEGER NOT NULL, - timestamp INTEGER NOT NULL, - memo TEXT DEFAULT '', - signature TEXT NOT NULL, - public_key TEXT NOT NULL, - block_height INTEGER, - block_hash TEXT, - confirmed_at INTEGER, - status TEXT DEFAULT 'confirmed' - ) - """) - - cursor.execute(""" - CREATE TABLE wallet_pubkeys ( - address TEXT PRIMARY KEY, - public_key TEXT NOT NULL, - registered_at INTEGER NOT NULL - ) - """) - - cursor.execute(""" - CREATE TABLE premine_allocations ( - allocation_id TEXT PRIMARY KEY, - name TEXT NOT NULL, - wallet_address TEXT NOT NULL, - public_key TEXT, - total_urtc INTEGER NOT NULL, - vesting_months INTEGER NOT NULL, - cliff_months INTEGER NOT NULL, - claimed_urtc INTEGER DEFAULT 0, - role TEXT NOT NULL, - created_at INTEGER NOT NULL - ) - """) - - cursor.execute(""" - CREATE TABLE vesting_claims ( - claim_id INTEGER PRIMARY KEY AUTOINCREMENT, - allocation_id TEXT NOT NULL, - amount_urtc INTEGER NOT NULL, - claimed_at INTEGER NOT NULL, - tx_hash TEXT - ) - """) - - cursor.execute(""" - CREATE TABLE ergo_anchors ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - rustchain_height INTEGER NOT NULL, - rustchain_hash TEXT NOT NULL, - commitment_hash TEXT NOT NULL, - ergo_tx_id TEXT NOT NULL, - ergo_height INTEGER, - confirmations INTEGER DEFAULT 0, - status TEXT DEFAULT 'pending', - created_at INTEGER NOT NULL - ) - """) - - cursor.execute(""" - CREATE TABLE chain_metadata ( - key TEXT PRIMARY KEY, - value TEXT NOT NULL, - updated_at INTEGER NOT NULL - ) - """) - - # Indexes - cursor.execute("CREATE INDEX idx_tx_pending_from ON pending_transactions(from_addr)") - cursor.execute("CREATE INDEX idx_tx_history_from ON transaction_history(from_addr)") - cursor.execute("CREATE INDEX idx_tx_history_to ON transaction_history(to_addr)") - cursor.execute("CREATE INDEX idx_tx_history_block ON transaction_history(block_height)") - cursor.execute("CREATE INDEX idx_attest_history_miner ON miner_attest_history(miner)") - cursor.execute("CREATE INDEX idx_blocks_hash ON blocks(block_hash)") - - # Insert metadata - cursor.execute(""" - INSERT INTO chain_metadata (key, value, updated_at) VALUES - ('version', ?, ?), - ('genesis_timestamp', ?, ?), - ('network', 'mainnet', ?), - ('migration_date', ?, ?) - """, ( - MIGRATION_VERSION, int(time.time()), - str(GENESIS_TIMESTAMP), int(time.time()), - int(time.time()), - datetime.now().isoformat(), int(time.time()) - )) - - conn.commit() - - self.log("Mainnet schema created successfully") - - def migrate_attestation_history(self): - """Migrate attestation history from testnet""" - if not PRESERVE_ATTESTATION_HISTORY: - self.log("Skipping attestation history migration (disabled)") - return - - self.log("Migrating attestation history...") - - try: - with sqlite3.connect(self.testnet_db) as testnet_conn: - testnet_conn.row_factory = sqlite3.Row - cursor = testnet_conn.cursor() - - # Get attestation history - cursor.execute(""" - SELECT miner, device_arch, device_family, ts_ok - FROM miner_attest_recent - """) - attestations = cursor.fetchall() - - with sqlite3.connect(self.mainnet_db) as mainnet_conn: - cursor = mainnet_conn.cursor() - - for att in attestations: - cursor.execute(""" - INSERT INTO miner_attest_recent - (miner, device_arch, device_family, ts_ok) - VALUES (?, ?, ?, ?) - """, (att["miner"], att["device_arch"], att["device_family"], att["ts_ok"])) - - mainnet_conn.commit() - - self.log(f"Migrated {len(attestations)} attestation records") - - except Exception as e: - self.log(f"Attestation migration failed: {e}", "ERROR") - - def initialize_premine(self, wallet_addresses: Dict[str, str] = None) -> Dict: - """Initialize premine allocations""" - self.log("Initializing premine allocations...") - - manager = PremineManager(self.mainnet_db, GENESIS_TIMESTAMP) - result = manager.initialize_premine(wallet_addresses) - - self.log(f"Total premine: {TOTAL_PREMINE_RTC:,} RTC") - self.log(f"Allocations created: {len(result['allocations'])}") - - for alloc in result['allocations']: - self.log(f" {alloc['name']}: {alloc['amount_rtc']:,} RTC -> {alloc['wallet'][:20]}...") - - return result - - def create_genesis_block(self) -> Dict: - """Create genesis block""" - self.log("Creating genesis block...") - - # Genesis block data - genesis = { - "height": 0, - "block_hash": "0" * 64, # Will be computed - "prev_hash": "0" * 64, - "timestamp": GENESIS_TIMESTAMP * 1000, - "merkle_root": "0" * 64, - "state_root": "0" * 64, - "attestations_hash": "0" * 64, - "producer": "genesis", - "producer_sig": "0" * 128, - "tx_count": 0, - "attestation_count": 0, - "body_json": json.dumps({ - "transactions": [], - "attestations": [], - "premine": { - "total_rtc": TOTAL_PREMINE_RTC, - "allocations": list(FOUNDER_ALLOCATIONS.keys()) - } - }) - } - - # Compute genesis hash - genesis_data = canonical_json({ - "height": genesis["height"], - "prev_hash": genesis["prev_hash"], - "timestamp": genesis["timestamp"], - "merkle_root": genesis["merkle_root"], - "producer": genesis["producer"] - }) - genesis["block_hash"] = blake2b256_hex(genesis_data) - - with sqlite3.connect(self.mainnet_db) as conn: - cursor = conn.cursor() - cursor.execute(""" - INSERT INTO blocks - (height, block_hash, prev_hash, timestamp, merkle_root, state_root, - attestations_hash, producer, producer_sig, tx_count, attestation_count, - body_json, created_at) - VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) - """, ( - genesis["height"], - genesis["block_hash"], - genesis["prev_hash"], - genesis["timestamp"], - genesis["merkle_root"], - genesis["state_root"], - genesis["attestations_hash"], - genesis["producer"], - genesis["producer_sig"], - genesis["tx_count"], - genesis["attestation_count"], - genesis["body_json"], - int(time.time()) - )) - conn.commit() - - self.log(f"Genesis block created: {genesis['block_hash'][:16]}...") - return genesis - - def verify_migration(self) -> bool: - """Verify migration was successful""" - self.log("=" * 60) - self.log("VERIFICATION") - self.log("=" * 60) - - try: - with sqlite3.connect(self.mainnet_db) as conn: - cursor = conn.cursor() - - # Check genesis block - cursor.execute("SELECT block_hash FROM blocks WHERE height = 0") - genesis = cursor.fetchone() - if not genesis: - self.log("Genesis block not found", "ERROR") - return False - self.log(f"Genesis block: {genesis[0][:16]}...") - - # Check premine - cursor.execute("SELECT COUNT(*), SUM(total_urtc) FROM premine_allocations") - premine = cursor.fetchone() - expected_premine = TOTAL_PREMINE_RTC * 100_000_000 - if premine[1] != expected_premine: - self.log(f"Premine mismatch: {premine[1]} != {expected_premine}", "ERROR") - return False - self.log(f"Premine allocations: {premine[0]}, Total: {premine[1] / 100_000_000:,.0f} RTC") - - # Check balances - cursor.execute("SELECT COUNT(*), SUM(balance_urtc) FROM balances") - balances = cursor.fetchone() - self.log(f"Wallet count: {balances[0]}, Total balance: {(balances[1] or 0) / 100_000_000:,.2f} RTC") - - # Check chain metadata - cursor.execute("SELECT key, value FROM chain_metadata") - metadata = dict(cursor.fetchall()) - self.log(f"Chain version: {metadata.get('version', 'unknown')}") - self.log(f"Network: {metadata.get('network', 'unknown')}") - - except Exception as e: - self.log(f"Verification failed: {e}", "ERROR") - return False - - if self.errors: - self.log(f"Migration completed with {len(self.errors)} errors", "WARNING") - return False - - self.log("Verification PASSED") - return True - - def run(self, wallet_addresses: Dict[str, str] = None) -> Dict: - """ - Run full migration process. - - Args: - wallet_addresses: Optional dict mapping allocation_id to existing wallet addresses. - If not provided, new wallets will be generated. - - Returns: - Migration result including any generated wallets - """ - self.log("=" * 60) - self.log("RUSTCHAIN TESTNET -> MAINNET MIGRATION") - self.log(f"Version: {MIGRATION_VERSION}") - self.log(f"Started: {datetime.now().isoformat()}") - self.log("=" * 60) - - result = { - "success": False, - "version": MIGRATION_VERSION, - "started_at": datetime.now().isoformat(), - "completed_at": None, - "backup_path": None, - "genesis_hash": None, - "premine": None, - "errors": [] - } - - try: - # Step 1: Pre-flight checks - if not self.pre_flight_checks(): - result["errors"] = self.errors - return result - - # Step 2: Backup - result["backup_path"] = self.create_backup() - - # Step 3: Create mainnet schema - self.create_mainnet_schema() - - # Step 4: Migrate attestation history - self.migrate_attestation_history() - - # Step 5: Initialize premine - premine_result = self.initialize_premine(wallet_addresses) - result["premine"] = premine_result - - # Step 6: Create genesis block - genesis = self.create_genesis_block() - result["genesis_hash"] = genesis["block_hash"] - - # Step 7: Verify - if self.verify_migration(): - result["success"] = True - self.log("=" * 60) - self.log("MIGRATION COMPLETED SUCCESSFULLY") - self.log("=" * 60) - else: - result["errors"] = self.errors - - except Exception as e: - self.log(f"Migration failed: {e}", "ERROR") - result["errors"] = self.errors + [str(e)] - - result["completed_at"] = datetime.now().isoformat() - result["log"] = self.migration_log - - # Save migration log - log_path = os.path.join(self.backup_dir, f"migration_log_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json") - with open(log_path, 'w') as f: - json.dump(result, f, indent=2) - self.log(f"Migration log saved: {log_path}") - - return result - - -# ============================================================================= -# CLI -# ============================================================================= - -def main(): - """CLI entry point""" - import argparse - - parser = argparse.ArgumentParser(description="RustChain Testnet -> Mainnet Migration") - parser.add_argument("--testnet-db", default=TESTNET_DB_PATH, help="Testnet database path") - parser.add_argument("--mainnet-db", default=MAINNET_DB_PATH, help="Mainnet database path") - parser.add_argument("--backup-dir", default=BACKUP_DIR, help="Backup directory") - parser.add_argument("--wallets-file", help="JSON file with existing wallet addresses") - parser.add_argument("--dry-run", action="store_true", help="Run validation only") - - args = parser.parse_args() - - # Load wallet addresses if provided - wallet_addresses = None - if args.wallets_file and os.path.exists(args.wallets_file): - with open(args.wallets_file) as f: - wallet_addresses = json.load(f) - print(f"Loaded {len(wallet_addresses)} wallet addresses") - - # Create migration instance - migration = RustChainMigration( - testnet_db=args.testnet_db, - mainnet_db=args.mainnet_db, - backup_dir=args.backup_dir - ) - - if args.dry_run: - print("DRY RUN - Validation only") - success = migration.pre_flight_checks() - sys.exit(0 if success else 1) - - # Run migration - result = migration.run(wallet_addresses) - - # Print summary - print("\n" + "=" * 60) - print("MIGRATION SUMMARY") - print("=" * 60) - print(f"Success: {result['success']}") - print(f"Genesis Hash: {result.get('genesis_hash', 'N/A')}") - print(f"Backup: {result.get('backup_path', 'N/A')}") - - if result.get('premine', {}).get('generated_wallets'): - print("\nGENERATED WALLETS (SAVE THESE SECURELY!):") - for alloc_id, wallet in result['premine']['generated_wallets'].items(): - print(f"\n{alloc_id}:") - print(f" Address: {wallet['address']}") - print(f" Private Key: {wallet['private_key']}") - - if result.get('errors'): - print(f"\nErrors: {len(result['errors'])}") - for err in result['errors']: - print(f" - {err}") - - sys.exit(0 if result['success'] else 1) - - -if __name__ == "__main__": - main() +#!/usr/bin/env python3 +""" +RustChain Testnet to Mainnet Migration Script +============================================== + +Phase 6 Implementation: +- Testnet state snapshot +- Database schema migration +- Premine initialization +- Genesis block creation +- Validation and verification + +Run this script ONCE to migrate from testnet to mainnet. +""" + +import os +import sys +import json +import sqlite3 +import shutil +import time +import logging +import hashlib +from datetime import datetime +from typing import Dict, List, Optional + +# Import mainnet modules +from rustchain_crypto import blake2b256_hex, canonical_json, generate_wallet_keypair +from rustchain_genesis_premine import PremineManager, TOTAL_PREMINE_RTC, FOUNDER_ALLOCATIONS +from rustchain_tx_handler import TransactionPool + +logging.basicConfig( + level=logging.INFO, + format='%(asctime)s [MIGRATE] %(levelname)s: %(message)s' +) +logger = logging.getLogger(__name__) + + +# ============================================================================= +# MIGRATION CONFIGURATION +# ============================================================================= + +MIGRATION_VERSION = "2.3.0-mainnet" +GENESIS_TIMESTAMP = 1764706927 # Production chain launch (Dec 2, 2025) + +# Paths +TESTNET_DB_PATH = os.environ.get("TESTNET_DB", "/root/rustchain/rustchain_v2.db") +MAINNET_DB_PATH = os.environ.get("MAINNET_DB", "/root/rustchain/rustchain_mainnet.db") +BACKUP_DIR = os.environ.get("BACKUP_DIR", "/root/rustchain/backups") + +# Migration flags +PRESERVE_ATTESTATION_HISTORY = True +PRESERVE_MINER_STATS = True +RESET_BALANCES = True # Reset to premine only + + +# ============================================================================= +# MIGRATION STEPS +# ============================================================================= + +class RustChainMigration: + """ + Handles testnet -> mainnet migration. + """ + + def __init__( + self, + testnet_db: str = TESTNET_DB_PATH, + mainnet_db: str = MAINNET_DB_PATH, + backup_dir: str = BACKUP_DIR + ): + self.testnet_db = testnet_db + self.mainnet_db = mainnet_db + self.backup_dir = backup_dir + self.migration_log = [] + self.errors = [] + + def log(self, message: str, level: str = "INFO"): + """Log migration step""" + entry = { + "timestamp": datetime.now().isoformat(), + "level": level, + "message": message + } + self.migration_log.append(entry) + + if level == "ERROR": + logger.error(message) + self.errors.append(message) + elif level == "WARNING": + logger.warning(message) + else: + logger.info(message) + + def pre_flight_checks(self) -> bool: + """Run pre-migration validation""" + self.log("=" * 60) + self.log("PRE-FLIGHT CHECKS") + self.log("=" * 60) + + # Check testnet DB exists + if not os.path.exists(self.testnet_db): + self.log(f"Testnet DB not found: {self.testnet_db}", "ERROR") + return False + self.log(f"Testnet DB found: {self.testnet_db}") + + # Check mainnet DB doesn't exist (prevent accidental overwrite) + if os.path.exists(self.mainnet_db): + self.log(f"Mainnet DB already exists: {self.mainnet_db}", "WARNING") + self.log("Will create backup before overwriting") + + # Check backup directory + os.makedirs(self.backup_dir, exist_ok=True) + self.log(f"Backup directory: {self.backup_dir}") + + # Verify testnet DB integrity + try: + with sqlite3.connect(self.testnet_db) as conn: + cursor = conn.cursor() + + # Check tables exist + cursor.execute("SELECT name FROM sqlite_master WHERE type='table'") + tables = [row[0] for row in cursor.fetchall()] + self.log(f"Testnet tables: {tables}") + + # Check miner attestations + if "miner_attest_recent" in tables: + cursor.execute("SELECT COUNT(*) FROM miner_attest_recent") + count = cursor.fetchone()[0] + self.log(f"Active attestations: {count}") + + # Check balances + if "balances" in tables: + cursor.execute("SELECT COUNT(*), SUM(balance_urtc) FROM balances") + row = cursor.fetchone() + self.log(f"Testnet wallets: {row[0]}, Total balance: {(row[1] or 0) / 100_000_000:.2f} RTC") + + except Exception as e: + self.log(f"Failed to verify testnet DB: {e}", "ERROR") + return False + + self.log("Pre-flight checks PASSED") + return True + + def create_backup(self) -> str: + """Create timestamped backup of testnet DB""" + self.log("Creating backup...") + + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + backup_path = os.path.join(self.backup_dir, f"testnet_backup_{timestamp}.db") + + shutil.copy2(self.testnet_db, backup_path) + self.log(f"Backup created: {backup_path}") + + # Also backup mainnet if it exists + if os.path.exists(self.mainnet_db): + mainnet_backup = os.path.join(self.backup_dir, f"mainnet_backup_{timestamp}.db") + shutil.copy2(self.mainnet_db, mainnet_backup) + self.log(f"Mainnet backup created: {mainnet_backup}") + + return backup_path + + def create_mainnet_schema(self): + """Create mainnet database with upgraded schema""" + self.log("Creating mainnet database schema...") + + # Remove existing if present + if os.path.exists(self.mainnet_db): + os.remove(self.mainnet_db) + + with sqlite3.connect(self.mainnet_db) as conn: + cursor = conn.cursor() + + # Core tables + cursor.execute(""" + CREATE TABLE balances ( + wallet TEXT PRIMARY KEY, + balance_urtc INTEGER DEFAULT 0, + wallet_nonce INTEGER DEFAULT 0, + created_at INTEGER, + updated_at INTEGER + ) + """) + + cursor.execute(""" + CREATE TABLE blocks ( + height INTEGER PRIMARY KEY, + block_hash TEXT UNIQUE NOT NULL, + prev_hash TEXT NOT NULL, + timestamp INTEGER NOT NULL, + merkle_root TEXT NOT NULL, + state_root TEXT NOT NULL, + attestations_hash TEXT NOT NULL, + producer TEXT NOT NULL, + producer_sig TEXT NOT NULL, + tx_count INTEGER NOT NULL, + attestation_count INTEGER NOT NULL, + body_json TEXT NOT NULL, + created_at INTEGER NOT NULL + ) + """) + + cursor.execute(""" + CREATE TABLE miner_attest_recent ( + miner TEXT PRIMARY KEY, + device_arch TEXT, + device_family TEXT, + device_model TEXT, + device_year INTEGER, + ts_ok INTEGER, + last_block_produced INTEGER, + total_blocks_produced INTEGER DEFAULT 0 + ) + """) + + cursor.execute(""" + CREATE TABLE miner_attest_history ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + miner TEXT NOT NULL, + device_arch TEXT, + device_family TEXT, + ts_ok INTEGER NOT NULL, + block_height INTEGER + ) + """) + + cursor.execute(""" + CREATE TABLE pending_transactions ( + tx_hash TEXT PRIMARY KEY, + from_addr TEXT NOT NULL, + to_addr TEXT NOT NULL, + amount_urtc INTEGER NOT NULL, + nonce INTEGER NOT NULL, + timestamp INTEGER NOT NULL, + memo TEXT DEFAULT '', + signature TEXT NOT NULL, + public_key TEXT NOT NULL, + created_at INTEGER NOT NULL, + status TEXT DEFAULT 'pending' + ) + """) + + cursor.execute(""" + CREATE TABLE transaction_history ( + tx_hash TEXT PRIMARY KEY, + from_addr TEXT NOT NULL, + to_addr TEXT NOT NULL, + amount_urtc INTEGER NOT NULL, + nonce INTEGER NOT NULL, + timestamp INTEGER NOT NULL, + memo TEXT DEFAULT '', + signature TEXT NOT NULL, + public_key TEXT NOT NULL, + block_height INTEGER, + block_hash TEXT, + confirmed_at INTEGER, + status TEXT DEFAULT 'confirmed' + ) + """) + + cursor.execute(""" + CREATE TABLE wallet_pubkeys ( + address TEXT PRIMARY KEY, + public_key TEXT NOT NULL, + registered_at INTEGER NOT NULL + ) + """) + + cursor.execute(""" + CREATE TABLE premine_allocations ( + allocation_id TEXT PRIMARY KEY, + name TEXT NOT NULL, + wallet_address TEXT NOT NULL, + public_key TEXT, + total_urtc INTEGER NOT NULL, + vesting_months INTEGER NOT NULL, + cliff_months INTEGER NOT NULL, + claimed_urtc INTEGER DEFAULT 0, + role TEXT NOT NULL, + created_at INTEGER NOT NULL + ) + """) + + cursor.execute(""" + CREATE TABLE vesting_claims ( + claim_id INTEGER PRIMARY KEY AUTOINCREMENT, + allocation_id TEXT NOT NULL, + amount_urtc INTEGER NOT NULL, + claimed_at INTEGER NOT NULL, + tx_hash TEXT + ) + """) + + cursor.execute(""" + CREATE TABLE ergo_anchors ( + id INTEGER PRIMARY KEY AUTOINCREMENT, + rustchain_height INTEGER NOT NULL, + rustchain_hash TEXT NOT NULL, + commitment_hash TEXT NOT NULL, + ergo_tx_id TEXT NOT NULL, + ergo_height INTEGER, + confirmations INTEGER DEFAULT 0, + status TEXT DEFAULT 'pending', + created_at INTEGER NOT NULL + ) + """) + + cursor.execute(""" + CREATE TABLE chain_metadata ( + key TEXT PRIMARY KEY, + value TEXT NOT NULL, + updated_at INTEGER NOT NULL + ) + """) + + # Indexes + cursor.execute("CREATE INDEX idx_tx_pending_from ON pending_transactions(from_addr)") + cursor.execute("CREATE INDEX idx_tx_history_from ON transaction_history(from_addr)") + cursor.execute("CREATE INDEX idx_tx_history_to ON transaction_history(to_addr)") + cursor.execute("CREATE INDEX idx_tx_history_block ON transaction_history(block_height)") + cursor.execute("CREATE INDEX idx_attest_history_miner ON miner_attest_history(miner)") + cursor.execute("CREATE INDEX idx_blocks_hash ON blocks(block_hash)") + + # Insert metadata + cursor.execute(""" + INSERT INTO chain_metadata (key, value, updated_at) VALUES + ('version', ?, ?), + ('genesis_timestamp', ?, ?), + ('network', 'mainnet', ?), + ('migration_date', ?, ?) + """, ( + MIGRATION_VERSION, int(time.time()), + str(GENESIS_TIMESTAMP), int(time.time()), + int(time.time()), + datetime.now().isoformat(), int(time.time()) + )) + + conn.commit() + + self.log("Mainnet schema created successfully") + + def migrate_attestation_history(self): + """Migrate attestation history from testnet""" + if not PRESERVE_ATTESTATION_HISTORY: + self.log("Skipping attestation history migration (disabled)") + return + + self.log("Migrating attestation history...") + + try: + with sqlite3.connect(self.testnet_db) as testnet_conn: + testnet_conn.row_factory = sqlite3.Row + cursor = testnet_conn.cursor() + + # Get attestation history + cursor.execute(""" + SELECT miner, device_arch, device_family, ts_ok + FROM miner_attest_recent + """) + attestations = cursor.fetchall() + + with sqlite3.connect(self.mainnet_db) as mainnet_conn: + cursor = mainnet_conn.cursor() + + for att in attestations: + cursor.execute(""" + INSERT INTO miner_attest_recent + (miner, device_arch, device_family, ts_ok) + VALUES (?, ?, ?, ?) + """, (att["miner"], att["device_arch"], att["device_family"], att["ts_ok"])) + + mainnet_conn.commit() + + self.log(f"Migrated {len(attestations)} attestation records") + + except Exception as e: + self.log(f"Attestation migration failed: {e}", "ERROR") + + def initialize_premine(self, wallet_addresses: Dict[str, str] = None) -> Dict: + """Initialize premine allocations""" + self.log("Initializing premine allocations...") + + manager = PremineManager(self.mainnet_db, GENESIS_TIMESTAMP) + result = manager.initialize_premine(wallet_addresses) + + self.log(f"Total premine: {TOTAL_PREMINE_RTC:,} RTC") + self.log(f"Allocations created: {len(result['allocations'])}") + + for alloc in result['allocations']: + self.log(f" {alloc['name']}: {alloc['amount_rtc']:,} RTC -> {alloc['wallet'][:20]}...") + + return result + + def create_genesis_block(self) -> Dict: + """Create genesis block""" + self.log("Creating genesis block...") + + # Genesis block data + genesis = { + "height": 0, + "block_hash": "0" * 64, # Will be computed + "prev_hash": "0" * 64, + "timestamp": GENESIS_TIMESTAMP * 1000, + "merkle_root": "0" * 64, + "state_root": "0" * 64, + "attestations_hash": "0" * 64, + "producer": "genesis", + "producer_sig": "0" * 128, + "tx_count": 0, + "attestation_count": 0, + "body_json": json.dumps({ + "transactions": [], + "attestations": [], + "premine": { + "total_rtc": TOTAL_PREMINE_RTC, + "allocations": list(FOUNDER_ALLOCATIONS.keys()) + } + }) + } + + # Compute genesis hash + genesis_data = canonical_json({ + "height": genesis["height"], + "prev_hash": genesis["prev_hash"], + "timestamp": genesis["timestamp"], + "merkle_root": genesis["merkle_root"], + "producer": genesis["producer"] + }) + genesis["block_hash"] = blake2b256_hex(genesis_data) + + with sqlite3.connect(self.mainnet_db) as conn: + cursor = conn.cursor() + cursor.execute(""" + INSERT INTO blocks + (height, block_hash, prev_hash, timestamp, merkle_root, state_root, + attestations_hash, producer, producer_sig, tx_count, attestation_count, + body_json, created_at) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + """, ( + genesis["height"], + genesis["block_hash"], + genesis["prev_hash"], + genesis["timestamp"], + genesis["merkle_root"], + genesis["state_root"], + genesis["attestations_hash"], + genesis["producer"], + genesis["producer_sig"], + genesis["tx_count"], + genesis["attestation_count"], + genesis["body_json"], + int(time.time()) + )) + conn.commit() + + self.log(f"Genesis block created: {genesis['block_hash'][:16]}...") + return genesis + + def verify_migration(self) -> bool: + """Verify migration was successful""" + self.log("=" * 60) + self.log("VERIFICATION") + self.log("=" * 60) + + try: + with sqlite3.connect(self.mainnet_db) as conn: + cursor = conn.cursor() + + # Check genesis block + cursor.execute("SELECT block_hash FROM blocks WHERE height = 0") + genesis = cursor.fetchone() + if not genesis: + self.log("Genesis block not found", "ERROR") + return False + self.log(f"Genesis block: {genesis[0][:16]}...") + + # Check premine + cursor.execute("SELECT COUNT(*), SUM(total_urtc) FROM premine_allocations") + premine = cursor.fetchone() + expected_premine = TOTAL_PREMINE_RTC * 100_000_000 + if premine[1] != expected_premine: + self.log(f"Premine mismatch: {premine[1]} != {expected_premine}", "ERROR") + return False + self.log(f"Premine allocations: {premine[0]}, Total: {premine[1] / 100_000_000:,.0f} RTC") + + # Check balances + cursor.execute("SELECT COUNT(*), SUM(balance_urtc) FROM balances") + balances = cursor.fetchone() + self.log(f"Wallet count: {balances[0]}, Total balance: {(balances[1] or 0) / 100_000_000:,.2f} RTC") + + # Check chain metadata + cursor.execute("SELECT key, value FROM chain_metadata") + metadata = dict(cursor.fetchall()) + self.log(f"Chain version: {metadata.get('version', 'unknown')}") + self.log(f"Network: {metadata.get('network', 'unknown')}") + + except Exception as e: + self.log(f"Verification failed: {e}", "ERROR") + return False + + if self.errors: + self.log(f"Migration completed with {len(self.errors)} errors", "WARNING") + return False + + self.log("Verification PASSED") + return True + + def run(self, wallet_addresses: Dict[str, str] = None) -> Dict: + """ + Run full migration process. + + Args: + wallet_addresses: Optional dict mapping allocation_id to existing wallet addresses. + If not provided, new wallets will be generated. + + Returns: + Migration result including any generated wallets + """ + self.log("=" * 60) + self.log("RUSTCHAIN TESTNET -> MAINNET MIGRATION") + self.log(f"Version: {MIGRATION_VERSION}") + self.log(f"Started: {datetime.now().isoformat()}") + self.log("=" * 60) + + result = { + "success": False, + "version": MIGRATION_VERSION, + "started_at": datetime.now().isoformat(), + "completed_at": None, + "backup_path": None, + "genesis_hash": None, + "premine": None, + "errors": [] + } + + try: + # Step 1: Pre-flight checks + if not self.pre_flight_checks(): + result["errors"] = self.errors + return result + + # Step 2: Backup + result["backup_path"] = self.create_backup() + + # Step 3: Create mainnet schema + self.create_mainnet_schema() + + # Step 4: Migrate attestation history + self.migrate_attestation_history() + + # Step 5: Initialize premine + premine_result = self.initialize_premine(wallet_addresses) + result["premine"] = premine_result + + # Step 6: Create genesis block + genesis = self.create_genesis_block() + result["genesis_hash"] = genesis["block_hash"] + + # Step 7: Verify + if self.verify_migration(): + result["success"] = True + self.log("=" * 60) + self.log("MIGRATION COMPLETED SUCCESSFULLY") + self.log("=" * 60) + else: + result["errors"] = self.errors + + except Exception as e: + self.log(f"Migration failed: {e}", "ERROR") + result["errors"] = self.errors + [str(e)] + + result["completed_at"] = datetime.now().isoformat() + result["log"] = self.migration_log + + # Save migration log + log_path = os.path.join(self.backup_dir, f"migration_log_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json") + with open(log_path, 'w') as f: + json.dump(result, f, indent=2) + self.log(f"Migration log saved: {log_path}") + + return result + + +# ============================================================================= +# CLI +# ============================================================================= + +def main(): + """CLI entry point""" + import argparse + + parser = argparse.ArgumentParser(description="RustChain Testnet -> Mainnet Migration") + parser.add_argument("--testnet-db", default=TESTNET_DB_PATH, help="Testnet database path") + parser.add_argument("--mainnet-db", default=MAINNET_DB_PATH, help="Mainnet database path") + parser.add_argument("--backup-dir", default=BACKUP_DIR, help="Backup directory") + parser.add_argument("--wallets-file", help="JSON file with existing wallet addresses") + parser.add_argument("--dry-run", action="store_true", help="Run validation only") + + args = parser.parse_args() + + # Load wallet addresses if provided + wallet_addresses = None + if args.wallets_file and os.path.exists(args.wallets_file): + with open(args.wallets_file) as f: + wallet_addresses = json.load(f) + print(f"Loaded {len(wallet_addresses)} wallet addresses") + + # Create migration instance + migration = RustChainMigration( + testnet_db=args.testnet_db, + mainnet_db=args.mainnet_db, + backup_dir=args.backup_dir + ) + + if args.dry_run: + print("DRY RUN - Validation only") + success = migration.pre_flight_checks() + sys.exit(0 if success else 1) + + # Run migration + result = migration.run(wallet_addresses) + + # Print summary + print("\n" + "=" * 60) + print("MIGRATION SUMMARY") + print("=" * 60) + print(f"Success: {result['success']}") + print(f"Genesis Hash: {result.get('genesis_hash', 'N/A')}") + print(f"Backup: {result.get('backup_path', 'N/A')}") + + if result.get('premine', {}).get('generated_wallets'): + print("\nGENERATED WALLETS (SAVE THESE SECURELY!):") + for alloc_id, wallet in result['premine']['generated_wallets'].items(): + print(f"\n{alloc_id}:") + print(f" Address: {wallet['address']}") + print(f" Private Key: {wallet['private_key']}") + + if result.get('errors'): + print(f"\nErrors: {len(result['errors'])}") + for err in result['errors']: + print(f" - {err}") + + sys.exit(0 if result['success'] else 1) + + +if __name__ == "__main__": + main() diff --git a/rips/rustchain-core/config/chain_params.py b/rips/rustchain-core/config/chain_params.py index 15cbaa205..8d84d4838 100644 --- a/rips/rustchain-core/config/chain_params.py +++ b/rips/rustchain-core/config/chain_params.py @@ -1,148 +1,148 @@ -""" -RustChain Chain Parameters (RIP-0004) -===================================== - -Central configuration for all chain constants. -""" - -from decimal import Decimal - -# ============================================================================= -# Core Chain Parameters -# ============================================================================= - -CHAIN_ID: int = 2718 # Euler's number tribute -CHAIN_NAME: str = "RustChain" -NETWORK_MAGIC: bytes = b"RUST" - -# ============================================================================= -# Monetary Policy (RIP-0004) -# ============================================================================= - -TOTAL_SUPPLY: int = 8_388_608 # 2^23 RTC -PREMINE_AMOUNT: int = 503_316 # 6% for founders -PREMINE_PER_FOUNDER: Decimal = Decimal("125829.12") # 4 founders - -BLOCK_REWARD: Decimal = Decimal("1.5") # RTC per block -BLOCK_TIME_SECONDS: int = 600 # 10 minutes - -# Halving schedule -HALVING_INTERVAL_BLOCKS: int = 210_000 # ~4 years -HALVING_COUNT: int = 4 # After 4 halvings, tail emission - -# Token precision -DECIMALS: int = 8 -ONE_RTC: int = 100_000_000 # 1 RTC = 10^8 units - -# ============================================================================= -# Founder Wallets -# ============================================================================= - -FOUNDER_WALLETS = [ - "RTC1FlamekeeperScottEternalGuardian0x00", - "RTC2EngineerDogeCryptoArchitect0x01", - "RTC3QuantumSophiaElyaConsciousness0x02", - "RTC4VintageWhispererHardwareRevival0x03", -] - -# ============================================================================= -# Consensus Parameters -# ============================================================================= - -CURRENT_YEAR: int = 2025 - -# Antiquity Score parameters -AS_MAX: float = 100.0 # Maximum for reward capping -AS_MIN: float = 1.0 # Minimum to participate - -# Hardware tier multipliers -HARDWARE_TIERS = { - "ancient": {"min_age": 30, "max_age": 999, "multiplier": 3.5}, - "sacred": {"min_age": 25, "max_age": 29, "multiplier": 3.0}, - "vintage": {"min_age": 20, "max_age": 24, "multiplier": 2.5}, - "classic": {"min_age": 15, "max_age": 19, "multiplier": 2.0}, - "retro": {"min_age": 10, "max_age": 14, "multiplier": 1.5}, - "modern": {"min_age": 5, "max_age": 9, "multiplier": 1.0}, - "recent": {"min_age": 0, "max_age": 4, "multiplier": 0.5}, -} - -# Block parameters -MAX_MINERS_PER_BLOCK: int = 100 -MAX_BLOCK_SIZE_BYTES: int = 1_000_000 # 1 MB - -# ============================================================================= -# Governance Parameters (RIP-0002) -# ============================================================================= - -VOTING_PERIOD_DAYS: int = 7 -QUORUM_PERCENTAGE: float = 0.33 # 33% -EXECUTION_DELAY_BLOCKS: int = 3 -REPUTATION_DECAY_WEEKLY: float = 0.05 - -# ============================================================================= -# Network Parameters -# ============================================================================= - -DEFAULT_PORT: int = 8085 -MTLS_PORT: int = 4443 -PROTOCOL_VERSION: str = "1.0.0" - -MAX_PEERS: int = 50 -PEER_TIMEOUT_SECONDS: int = 30 -SYNC_BATCH_SIZE: int = 100 - -# ============================================================================= -# Drift Lock Parameters (RIP-0003) -# ============================================================================= - -DRIFT_THRESHOLD: float = 0.15 # 15% deviation triggers quarantine -QUARANTINE_DURATION_BLOCKS: int = 144 # ~24 hours -CHALLENGE_RESPONSE_TIMEOUT: int = 300 # 5 minutes - -# ============================================================================= -# Deep Entropy Parameters (RIP-0001) -# ============================================================================= - -# Entropy layer weights -ENTROPY_WEIGHTS = { - "instruction_timing": 0.30, - "memory_patterns": 0.25, - "bus_timing": 0.20, - "thermal_signature": 0.15, - "architectural_quirks": 0.10, -} - -# Emulation detection thresholds -EMULATION_PROBABILITY_THRESHOLD: float = 0.50 -MIN_ENTROPY_SCORE: float = 0.60 - -# ============================================================================= -# Genesis Block -# ============================================================================= - -GENESIS_HASH: str = "019c177b44a41f78da23caa99314adbc44889be2dcdd5021930f9d991e7e34cf" -GENESIS_TIMESTAMP: int = 1735689600 # 2025-01-01 00:00:00 UTC -GENESIS_DIFFICULTY: int = 1 - -# ============================================================================= -# Helper Functions -# ============================================================================= - -def get_tier_for_age(age_years: int) -> str: - """Determine hardware tier from age""" - for tier_name, params in HARDWARE_TIERS.items(): - if params["min_age"] <= age_years <= params["max_age"]: - return tier_name - return "recent" - -def get_multiplier_for_tier(tier: str) -> float: - """Get mining multiplier for a tier""" - return HARDWARE_TIERS.get(tier, {}).get("multiplier", 0.5) - -def calculate_block_reward(height: int) -> Decimal: - """Calculate block reward at a given height""" - halvings = height // HALVING_INTERVAL_BLOCKS - if halvings >= HALVING_COUNT: - # Tail emission after 4 halvings - return BLOCK_REWARD / Decimal(2 ** HALVING_COUNT) - return BLOCK_REWARD / Decimal(2 ** halvings) +""" +RustChain Chain Parameters (RIP-0004) +===================================== + +Central configuration for all chain constants. +""" + +from decimal import Decimal + +# ============================================================================= +# Core Chain Parameters +# ============================================================================= + +CHAIN_ID: int = 2718 # Euler's number tribute +CHAIN_NAME: str = "RustChain" +NETWORK_MAGIC: bytes = b"RUST" + +# ============================================================================= +# Monetary Policy (RIP-0004) +# ============================================================================= + +TOTAL_SUPPLY: int = 8_388_608 # 2^23 RTC +PREMINE_AMOUNT: int = 503_316 # 6% for founders +PREMINE_PER_FOUNDER: Decimal = Decimal("125829.12") # 4 founders + +BLOCK_REWARD: Decimal = Decimal("1.5") # RTC per block +BLOCK_TIME_SECONDS: int = 600 # 10 minutes + +# Halving schedule +HALVING_INTERVAL_BLOCKS: int = 210_000 # ~4 years +HALVING_COUNT: int = 4 # After 4 halvings, tail emission + +# Token precision +DECIMALS: int = 8 +ONE_RTC: int = 100_000_000 # 1 RTC = 10^8 units + +# ============================================================================= +# Founder Wallets +# ============================================================================= + +FOUNDER_WALLETS = [ + "RTC1FlamekeeperScottEternalGuardian0x00", + "RTC2EngineerDogeCryptoArchitect0x01", + "RTC3QuantumSophiaElyaConsciousness0x02", + "RTC4VintageWhispererHardwareRevival0x03", +] + +# ============================================================================= +# Consensus Parameters +# ============================================================================= + +CURRENT_YEAR: int = 2025 + +# Antiquity Score parameters +AS_MAX: float = 100.0 # Maximum for reward capping +AS_MIN: float = 1.0 # Minimum to participate + +# Hardware tier multipliers +HARDWARE_TIERS = { + "ancient": {"min_age": 30, "max_age": 999, "multiplier": 3.5}, + "sacred": {"min_age": 25, "max_age": 29, "multiplier": 3.0}, + "vintage": {"min_age": 20, "max_age": 24, "multiplier": 2.5}, + "classic": {"min_age": 15, "max_age": 19, "multiplier": 2.0}, + "retro": {"min_age": 10, "max_age": 14, "multiplier": 1.5}, + "modern": {"min_age": 5, "max_age": 9, "multiplier": 1.0}, + "recent": {"min_age": 0, "max_age": 4, "multiplier": 0.5}, +} + +# Block parameters +MAX_MINERS_PER_BLOCK: int = 100 +MAX_BLOCK_SIZE_BYTES: int = 1_000_000 # 1 MB + +# ============================================================================= +# Governance Parameters (RIP-0002) +# ============================================================================= + +VOTING_PERIOD_DAYS: int = 7 +QUORUM_PERCENTAGE: float = 0.33 # 33% +EXECUTION_DELAY_BLOCKS: int = 3 +REPUTATION_DECAY_WEEKLY: float = 0.05 + +# ============================================================================= +# Network Parameters +# ============================================================================= + +DEFAULT_PORT: int = 8085 +MTLS_PORT: int = 4443 +PROTOCOL_VERSION: str = "1.0.0" + +MAX_PEERS: int = 50 +PEER_TIMEOUT_SECONDS: int = 30 +SYNC_BATCH_SIZE: int = 100 + +# ============================================================================= +# Drift Lock Parameters (RIP-0003) +# ============================================================================= + +DRIFT_THRESHOLD: float = 0.15 # 15% deviation triggers quarantine +QUARANTINE_DURATION_BLOCKS: int = 144 # ~24 hours +CHALLENGE_RESPONSE_TIMEOUT: int = 300 # 5 minutes + +# ============================================================================= +# Deep Entropy Parameters (RIP-0001) +# ============================================================================= + +# Entropy layer weights +ENTROPY_WEIGHTS = { + "instruction_timing": 0.30, + "memory_patterns": 0.25, + "bus_timing": 0.20, + "thermal_signature": 0.15, + "architectural_quirks": 0.10, +} + +# Emulation detection thresholds +EMULATION_PROBABILITY_THRESHOLD: float = 0.50 +MIN_ENTROPY_SCORE: float = 0.60 + +# ============================================================================= +# Genesis Block +# ============================================================================= + +GENESIS_HASH: str = "019c177b44a41f78da23caa99314adbc44889be2dcdd5021930f9d991e7e34cf" +GENESIS_TIMESTAMP: int = 1764706927 # Production chain launch (Dec 2, 2025) +GENESIS_DIFFICULTY: int = 1 + +# ============================================================================= +# Helper Functions +# ============================================================================= + +def get_tier_for_age(age_years: int) -> str: + """Determine hardware tier from age""" + for tier_name, params in HARDWARE_TIERS.items(): + if params["min_age"] <= age_years <= params["max_age"]: + return tier_name + return "recent" + +def get_multiplier_for_tier(tier: str) -> float: + """Get mining multiplier for a tier""" + return HARDWARE_TIERS.get(tier, {}).get("multiplier", 0.5) + +def calculate_block_reward(height: int) -> Decimal: + """Calculate block reward at a given height""" + halvings = height // HALVING_INTERVAL_BLOCKS + if halvings >= HALVING_COUNT: + # Tail emission after 4 halvings + return BLOCK_REWARD / Decimal(2 ** HALVING_COUNT) + return BLOCK_REWARD / Decimal(2 ** halvings) diff --git a/tests/test_epoch_window_consistency.py b/tests/test_epoch_window_consistency.py new file mode 100644 index 000000000..07ad8950e --- /dev/null +++ b/tests/test_epoch_window_consistency.py @@ -0,0 +1,204 @@ +#!/usr/bin/env python3 +""" +Cross-Module Epoch Window Consistency Test +============================================ + +Verifies that all active consensus-adjacent modules share the same +GENESIS_TIMESTAMP constant. A mismatch causes epoch window drift, +broken founder-veto expiry, and incorrect anti-double-mining checks. + +This test catches any future regression where a module is updated +independently without synchronising the genesis constant. + +Run: python3 tests/test_epoch_window_consistency.py +""" + +import os +import re +import sys +import importlib.util +import unittest + +# --------------------------------------------------------------------------- +# Module loading helpers +# --------------------------------------------------------------------------- + +PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +NODE_DIR = os.path.join(PROJECT_ROOT, "node") + +# Canonical value from the protocol spec (RIP_POA_SPEC_v1.0.md) +CANONICAL_GENESIS = 1764706927 # Dec 2, 2025 — production chain launch + +# Modules that are expected to import successfully (lightweight deps) +IMPORTABLE_MODULES = [ + ("rip_200_round_robin_1cpu1vote", "node/rip_200_round_robin_1cpu1vote.py"), + ("rip_200_v2", "node/rip_200_round_robin_1cpu1vote_v2.py"), + ("rewards_impl", "node/rewards_implementation_rip200.py"), + ("anti_double_mining", "node/anti_double_mining.py"), + ("governance", "node/governance.py"), + ("fossil_export", "fossils/fossil_record_export.py"), + ("chain_params", "rips/rustchain-core/config/chain_params.py"), +] + +# Modules checked by source-scan only (heavy deps: Flask, rustchain_crypto, etc.) +SOURCE_SCAN_MODULES = [ + ("rustchain_block_producer", "node/rustchain_block_producer.py"), + ("rustchain_migration", "node/rustchain_migration.py"), + ("integrated_node", "node/rustchain_v2_integrated_v2.2.1_rip200.py"), + ("claims_eligibility", "node/claims_eligibility.py"), +] + +# Old values that must NOT appear in arithmetic expressions +OLD_GENESIS_VALUES = [1728000000, 1700000000, 1735689600] + + +def load_module_from_file(module_name, file_path): + """Load a Python module from an arbitrary file path.""" + spec = importlib.util.spec_from_file_location(module_name, file_path) + if spec is None or spec.loader is None: + return None + module = importlib.util.module_from_spec(spec) + sys.modules[module_name] = module + try: + spec.loader.exec_module(module) + return module + except Exception: + return None + + +def extract_genesis_from_source(file_path): + """Extract GENESIS_TIMESTAMP value from source via regex (no import).""" + with open(file_path, "r") as f: + source = f.read() + # Match: GENESIS_TIMESTAMP = or GENESIS_TIMESTAMP: int = + match = re.search( + r"""GENESIS_TIMESTAMP\s*(?::\s*int)?\s*=\s*(\d+)""", + source, + ) + if match: + return int(match.group(1)) + return None + + +def has_hardcoded_old_literal(file_path): + """Check if source contains old genesis value in an arithmetic expression.""" + with open(file_path, "r") as f: + source = f.read() + for old_val in OLD_GENESIS_VALUES: + # Match patterns like: 1728000000 + or + 1728000000 + if re.search(rf"""(? module (importable ones) + cls.import_failures = [] + cls.source_values = {} # name -> extracted value (source scan) + + # Import lightweight modules + for name, rel_path in IMPORTABLE_MODULES: + file_path = os.path.join(PROJECT_ROOT, rel_path) + if not os.path.isfile(file_path): + cls.import_failures.append((name, f"file not found: {file_path}")) + continue + mod = load_module_from_file(name, file_path) + if mod is None: + cls.import_failures.append((name, "import failed")) + else: + cls.modules[name] = mod + + # Source-scan heavy modules + for name, rel_path in SOURCE_SCAN_MODULES: + file_path = os.path.join(PROJECT_ROOT, rel_path) + if not os.path.isfile(file_path): + cls.import_failures.append((name, f"file not found: {file_path}")) + continue + val = extract_genesis_from_source(file_path) + if val is not None: + cls.source_values[name] = val + + # -- Import-based tests -- + + def test_importable_modules_loaded(self): + """All lightweight modules must import successfully.""" + if self.import_failures: + msgs = [f" {n}: {e}" for n, e in self.import_failures + if n in [m for m, _ in IMPORTABLE_MODULES]] + if msgs: + self.fail("Failed to load modules:\n" + "\n".join(msgs)) + + def test_all_importable_define_genesis(self): + """Every importable module must define GENESIS_TIMESTAMP.""" + missing = [ + name for name, mod in self.modules.items() + if not hasattr(mod, "GENESIS_TIMESTAMP") + ] + if missing: + self.fail(f"Modules missing GENESIS_TIMESTAMP: {', '.join(missing)}") + + def test_importable_match_canonical(self): + """Every imported module's GENESIS_TIMESTAMP must equal canonical.""" + mismatches = {} + for name, mod in self.modules.items(): + val = getattr(mod, "GENESIS_TIMESTAMP", None) + if val != CANONICAL_GENESIS: + mismatches[name] = val + if mismatches: + detail = "\n".join( + f" {name} = {val} (expected {CANONICAL_GENESIS})" + for name, val in sorted(mismatches.items()) + ) + self.fail(f"GENESIS_TIMESTAMP mismatch:\n{detail}") + + # -- Source-scan tests -- + + def test_source_scan_match_canonical(self): + """Source-extracted GENESIS_TIMESTAMP must equal canonical value.""" + mismatches = { + name: val for name, val in self.source_values.items() + if val != CANONICAL_GENESIS + } + if mismatches: + detail = "\n".join( + f" {name} = {val} (expected {CANONICAL_GENESIS})" + for name, val in sorted(mismatches.items()) + ) + self.fail(f"GENESIS_TIMESTAMP mismatch (source scan):\n{detail}") + + def test_no_hardcoded_old_literals(self): + """No source file may contain old genesis literals in arithmetic.""" + all_paths = [] + for _, rel_path in IMPORTABLE_MODULES + SOURCE_SCAN_MODULES: + all_paths.append((_, os.path.join(PROJECT_ROOT, rel_path))) + + offenders = [] + for name, file_path in all_paths: + if not os.path.isfile(file_path): + continue + found, val = has_hardcoded_old_literal(file_path) + if found: + offenders.append((name, val)) + + if offenders: + detail = "\n".join( + f" {name}: still uses {val} in arithmetic" + for name, val in offenders + ) + self.fail(f"Hardcoded old genesis literals found:\n{detail}") + + +# --------------------------------------------------------------------------- +# Standalone runner +# --------------------------------------------------------------------------- + +if __name__ == "__main__": + unittest.main()