Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
72 changes: 48 additions & 24 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@

logger = logging.getLogger(__name__)

BURN_ADDRESS = "0" * 40
MAX_TRANSACTIONS_PER_BLOCK = 100


# ──────────────────────────────────────────────
Expand All @@ -49,7 +49,9 @@ def create_wallet():

def mine_and_process_block(chain, mempool, miner_pk):
"""Mine pending transactions into a new block."""
pending_txs = mempool.get_transactions_for_block()
pending_txs = mempool.get_transactions_for_block(
chain.state, max_count=MAX_TRANSACTIONS_PER_BLOCK
)
if not pending_txs:
logger.info("Mempool is empty — nothing to mine.")
return None
Expand All @@ -58,11 +60,13 @@ def mine_and_process_block(chain, mempool, miner_pk):
index=chain.last_block.index + 1,
previous_hash=chain.last_block.hash,
transactions=pending_txs,
miner=miner_pk,
)

mined_block = mine_block(block)

if chain.add_block(mined_block):
mempool.remove_transactions(pending_txs)
logger.info("✅ Block #%d mined and added (%d txs)", mined_block.index, len(pending_txs))
chain.state.credit_mining_reward(miner_pk)
return mined_block
Expand All @@ -83,45 +87,66 @@ async def handler(data):
payload = data.get("data")

if msg_type == "sync":
if not isinstance(payload, dict):
logger.warning("Received malformed sync payload")
return False
# Merge remote state into local state (for accounts we don't have yet)
remote_accounts = payload.get("accounts", {})
for addr, acc in remote_accounts.items():
if addr not in chain.state.accounts:
chain.state.accounts[addr] = acc
logger.info("🔄 Synced account %s... (balance=%d)", addr[:12], acc.get("balance", 0))
logger.info("🔄 State sync complete — %d accounts", len(chain.state.accounts))
return True

elif msg_type == "tx":
if not isinstance(payload, dict):
logger.warning("Received malformed tx payload")
return False
tx = Transaction(**payload)
if mempool.add_transaction(tx):
logger.info("📥 Received tx from %s... (amount=%s)", tx.sender[:8], tx.amount)
return True
return False

elif msg_type == "block":
txs_raw = payload.pop("transactions", [])
block_hash = payload.pop("hash", None)
if not isinstance(payload, dict):
logger.warning("Received malformed block payload")
return False
payload_data = dict(payload)

txs_raw = payload_data.get("transactions", [])
block_hash = payload_data.get("hash")
transactions = [Transaction(**t) for t in txs_raw]

block = Block(
index=payload["index"],
previous_hash=payload["previous_hash"],
index=payload_data["index"],
previous_hash=payload_data["previous_hash"],
transactions=transactions,
timestamp=payload.get("timestamp"),
difficulty=payload.get("difficulty"),
timestamp=payload_data.get("timestamp"),
difficulty=payload_data.get("difficulty"),
miner=payload_data.get("miner"),
)
block.nonce = payload.get("nonce", 0)
block.nonce = payload_data.get("nonce", 0)
block.hash = block_hash

if chain.add_block(block):
logger.info("📥 Received Block #%d — added to chain", block.index)

# Apply mining reward for the remote miner (burn address as placeholder)
miner = payload.get("miner", BURN_ADDRESS)
chain.state.credit_mining_reward(miner)
# Reward only when miner is authenticated as part of hashed block data.
if block.miner:
chain.state.credit_mining_reward(block.miner)
else:
logger.warning("Received block without authenticated miner; reward not credited")

# Drain matching txs from mempool so they aren't re-mined
mempool.get_transactions_for_block()
# Drop only confirmed transactions so higher nonces can remain queued.
mempool.remove_transactions(block.transactions)
return True
else:
logger.warning("📥 Received Block #%s — rejected", block.index)
return False

return False

return handler

Expand All @@ -147,7 +172,7 @@ async def handler(data):
"""


async def cli_loop(sk, pk, chain, mempool, network, nonce_counter):
async def cli_loop(sk, pk, chain, mempool, network):
"""Read commands from stdin asynchronously."""
loop = asyncio.get_event_loop()
print(HELP_TEXT)
Expand Down Expand Up @@ -179,18 +204,23 @@ async def cli_loop(sk, pk, chain, mempool, network, nonce_counter):
print(" Usage: send <receiver_address> <amount>")
continue
receiver = parts[1]
if len(receiver) != len(pk) or not re.fullmatch(r"[0-9a-fA-F]+", receiver):
print(" Receiver address must be a valid hex public key.")
continue
try:
amount = int(parts[2])
except ValueError:
print(" Amount must be an integer.")
continue
if amount <= 0:
print(" Amount must be greater than 0.")
continue

nonce = nonce_counter[0]
nonce = chain.state.get_account(pk)["nonce"]
tx = Transaction(sender=pk, receiver=receiver, amount=amount, nonce=nonce)
tx.sign(sk)

if mempool.add_transaction(tx):
nonce_counter[0] += 1
await network.broadcast_transaction(tx)
print(f" ✅ Tx sent: {amount} coins → {receiver[:12]}...")
else:
Expand All @@ -201,9 +231,6 @@ async def cli_loop(sk, pk, chain, mempool, network, nonce_counter):
mined = mine_and_process_block(chain, mempool, pk)
if mined:
await network.broadcast_block(mined)
# Sync local nonce from chain state
acc = chain.state.get_account(pk)
nonce_counter[0] = acc.get("nonce", 0)

# ── peers ──
elif cmd == "peers":
Expand Down Expand Up @@ -288,11 +315,8 @@ async def on_peer_connected(writer):
except ValueError:
logger.error("Invalid --connect format. Use host:port")

# Nonce counter kept as a mutable list so the CLI closure can mutate it
nonce_counter = [0]

try:
await cli_loop(sk, pk, chain, mempool, network, nonce_counter)
await cli_loop(sk, pk, chain, mempool, network)
finally:
await network.stop()

Expand Down
42 changes: 31 additions & 11 deletions minichain/block.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import json
from typing import List, Optional
from .transaction import Transaction
from .serialization import canonical_json_hash

def _sha256(data: str) -> str:
return hashlib.sha256(data.encode()).hexdigest()
Expand All @@ -12,11 +13,8 @@ def _calculate_merkle_root(transactions: List[Transaction]) -> Optional[str]:
if not transactions:
return None

# Hash each transaction deterministically
tx_hashes = [
_sha256(json.dumps(tx.to_dict(), sort_keys=True))
for tx in transactions
]
# Keep legacy leaf format for compatibility with existing blocks.
tx_hashes = [_transaction_leaf(tx) for tx in transactions]

# Build Merkle tree
while len(tx_hashes) > 1:
Expand All @@ -33,6 +31,26 @@ def _calculate_merkle_root(transactions: List[Transaction]) -> Optional[str]:
return tx_hashes[0]


def _transaction_leaf(tx: Transaction) -> str:
"""Return a deterministic transaction leaf hash with compatibility fallback."""
# Prefer an explicit legacy-compatible leaf method if present.
if hasattr(tx, "get_leaf_digest") and callable(getattr(tx, "get_leaf_digest")):
value = tx.get_leaf_digest()
if isinstance(value, str):
return value
if hasattr(tx, "digest"):
value = getattr(tx, "digest")
if isinstance(value, str):
return value

# Legacy default used in prior versions.
if hasattr(tx, "to_dict") and callable(getattr(tx, "to_dict")):
return _sha256(json.dumps(tx.to_dict(), sort_keys=True))

# Final fallback for newer transaction shapes.
return tx.tx_id


class Block:
def __init__(
self,
Expand All @@ -41,6 +59,7 @@ def __init__(
transactions: Optional[List[Transaction]] = None,
timestamp: Optional[float] = None,
difficulty: Optional[int] = None,
miner: Optional[str] = None,
):
self.index = index
self.previous_hash = previous_hash
Expand All @@ -54,6 +73,7 @@ def __init__(
)

self.difficulty: Optional[int] = difficulty
self.miner: Optional[str] = miner
self.nonce: int = 0
self.hash: Optional[str] = None

Expand All @@ -64,14 +84,18 @@ def __init__(
# HEADER (used for mining)
# -------------------------
def to_header_dict(self):
return {
header = {
"index": self.index,
"previous_hash": self.previous_hash,
"merkle_root": self.merkle_root,
"timestamp": self.timestamp,
"difficulty": self.difficulty,
"nonce": self.nonce,
}
# Include miner only when present so old-format headers stay valid.
if self.miner is not None:
header["miner"] = self.miner
return header

# -------------------------
# BODY (transactions only)
Expand All @@ -97,8 +121,4 @@ def to_dict(self):
# HASH CALCULATION
# -------------------------
def compute_hash(self) -> str:
header_string = json.dumps(
self.to_header_dict(),
sort_keys=True
)
return _sha256(header_string)
return canonical_json_hash(self.to_header_dict())
18 changes: 16 additions & 2 deletions minichain/contract.py
Original file line number Diff line number Diff line change
@@ -1,10 +1,16 @@
import logging
import multiprocessing
import ast
import os

import json # Moved to module-level import
logger = logging.getLogger(__name__)


def _allow_unrestricted_contracts() -> bool:
value = os.getenv("MINICHAIN_ALLOW_UNRESTRICTED_CONTRACTS", "")
return value.lower() in {"1", "true", "yes", "on"}

def _safe_exec_worker(code, globals_dict, context_dict, result_queue):
"""
Worker function to execute contract code in a separate process.
Expand All @@ -19,8 +25,16 @@ def _safe_exec_worker(code, globals_dict, context_dict, result_queue):
except ImportError:
logger.warning("Resource module not available. Contract will run without OS-level resource limits.")
except (OSError, ValueError) as e:
logger.error(f"Failed to set resource limits: {e}")
raise RuntimeError(f"Failed to set resource limits: {e}")
if _allow_unrestricted_contracts():
logger.warning(
"Failed to set resource limits but unsafe mode is enabled: %s",
e,
)
else:
raise RuntimeError(
"Failed to set contract resource limits; refusing to execute "
"without explicit MINICHAIN_ALLOW_UNRESTRICTED_CONTRACTS=1"
) from e

exec(code, globals_dict, context_dict)
# Return the updated storage
Expand Down
Loading