diff --git a/.github/workflows/onchain.yml b/.github/workflows/onchain.yml index 2d341333..6a1c27e9 100644 --- a/.github/workflows/onchain.yml +++ b/.github/workflows/onchain.yml @@ -23,7 +23,7 @@ jobs: strategy: matrix: os: ["ubuntu-latest", "windows-2025"] - type: ["etherscan", "sourcify"] + type: ["etherscan", "sourcify", "blockscout"] steps: - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6.0.2 with: diff --git a/.gitignore b/.gitignore index 4b3890f9..64493f5c 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,5 @@ package-lock.json result env/ .coverage* +.env +crytic-export/ diff --git a/README.md b/README.md index 33cf6d59..ae71b4d0 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,7 @@ Library to help smart contract compilation. It includes support for: - [Etherlime](https://github.com/LimeChain/etherlime) - [Sourcify](https://sourcify.dev/) - [Etherscan](https://etherscan.io/) (including several alt-chain explorers and testnets) +- [Blockscout](https://www.blockscout.com/) ([750+ chains](https://chains.blockscout.com/)) - [Truffle](https://truffleframework.com/) - [Waffle](https://github.com/EthWorks/Waffle) diff --git a/crytic_compile/cryticparser/cryticparser.py b/crytic_compile/cryticparser/cryticparser.py index 8c14884d..e77baac2 100755 --- a/crytic_compile/cryticparser/cryticparser.py +++ b/crytic_compile/cryticparser/cryticparser.py @@ -72,7 +72,7 @@ def init(parser: ArgumentParser) -> None: _init_brownie(parser) _init_dapp(parser) _init_etherlime(parser) - _init_etherscan(parser) + _init_explorer(parser) _init_waffle(parser) _init_npx(parser) _init_buidler(parser) @@ -299,30 +299,46 @@ def _init_etherlime(parser: ArgumentParser) -> None: ) -def _init_etherscan(parser: ArgumentParser) -> None: - """Init etherscan arguments +def _init_explorer(parser: ArgumentParser) -> None: + """Init block explorer arguments (Etherscan, Blockscout, Sourcify, etc.) Args: parser (ArgumentParser): argparser where the cli flags are added """ - group_etherscan = parser.add_argument_group("Etherscan options") - group_etherscan.add_argument( - "--etherscan-only-source-code", + group = parser.add_argument_group("Block explorer options") + group.add_argument( + "--explorer-only-source-code", help="Only compile if the source code is available.", action="store_true", - dest="etherscan_only_source_code", - default=DEFAULTS_FLAG_IN_CONFIG["etherscan_only_source_code"], + dest="explorer_only_source_code", + default=DEFAULTS_FLAG_IN_CONFIG["explorer_only_source_code"], ) - group_etherscan.add_argument( - "--etherscan-only-bytecode", + group.add_argument( + "--explorer-only-bytecode", help="Only looks for bytecode.", action="store_true", - dest="etherscan_only_bytecode", - default=DEFAULTS_FLAG_IN_CONFIG["etherscan_only_bytecode"], + dest="explorer_only_bytecode", + default=DEFAULTS_FLAG_IN_CONFIG["explorer_only_bytecode"], ) - group_etherscan.add_argument( + group.add_argument( + "--explorer-ignore", + help="Ignore block explorer platforms (Etherscan, Blockscout, etc.).", + action="store_true", + dest="explorer_ignore", + default=DEFAULTS_FLAG_IN_CONFIG["explorer_ignore"], + ) + + group.add_argument( + "--explorer-export-directory", + help="Directory in which to save contracts fetched from block explorers.", + action="store", + dest="explorer_export_dir", + default=DEFAULTS_FLAG_IN_CONFIG["explorer_export_directory"], + ) + + group.add_argument( "--etherscan-apikey", help="Etherscan API key.", action="store", @@ -330,20 +346,20 @@ def _init_etherscan(parser: ArgumentParser) -> None: default=DEFAULTS_FLAG_IN_CONFIG["etherscan_api_key"], ) - group_etherscan.add_argument( - "--avax-apikey", - help="Etherscan API key.", + group.add_argument( + "--blockscout-url", + help="Custom Blockscout explorer URL for chains not in the directory.", action="store", - dest="avax_api_key", - default=DEFAULTS_FLAG_IN_CONFIG["etherscan_api_key"], + dest="blockscout_url", + default=DEFAULTS_FLAG_IN_CONFIG["blockscout_url"], ) - group_etherscan.add_argument( - "--etherscan-export-directory", - help="Directory in which to save the analyzed contracts.", + group.add_argument( + "--avax-apikey", + help="Avalanche (Snowtrace) API key.", action="store", - dest="etherscan_export_dir", - default=DEFAULTS_FLAG_IN_CONFIG["etherscan_export_directory"], + dest="avax_api_key", + default=DEFAULTS_FLAG_IN_CONFIG["etherscan_api_key"], ) diff --git a/crytic_compile/cryticparser/defaults.py b/crytic_compile/cryticparser/defaults.py index f130c493..fdcb895b 100755 --- a/crytic_compile/cryticparser/defaults.py +++ b/crytic_compile/cryticparser/defaults.py @@ -27,10 +27,12 @@ "dapp_ignore_compile": False, "etherlime_ignore_compile": False, "etherlime_compile_arguments": None, - "etherscan_only_source_code": False, - "etherscan_only_bytecode": False, + "explorer_only_source_code": False, + "explorer_only_bytecode": False, + "explorer_ignore": False, + "explorer_export_directory": None, "etherscan_api_key": None, - "etherscan_export_directory": "etherscan-contracts", + "blockscout_url": None, "waffle_ignore_compile": False, "waffle_config_file": None, "npx_disable": False, diff --git a/crytic_compile/platform/all_platforms.py b/crytic_compile/platform/all_platforms.py index 5236c2a0..b234a54c 100644 --- a/crytic_compile/platform/all_platforms.py +++ b/crytic_compile/platform/all_platforms.py @@ -6,6 +6,7 @@ # crytic_compile.py uses dir(all_platforms) to find these classes __all__ = [ "Archive", + "Blockscout", "Brownie", "Buidler", "Dapp", @@ -24,6 +25,7 @@ ] from .archive import Archive +from .blockscout import Blockscout from .brownie import Brownie from .buidler import Buidler from .dapp import Dapp diff --git a/crytic_compile/platform/blockscout.py b/crytic_compile/platform/blockscout.py new file mode 100644 index 00000000..0970f5b2 --- /dev/null +++ b/crytic_compile/platform/blockscout.py @@ -0,0 +1,369 @@ +""" +Blockscout platform — fetches verified contracts from Blockscout-based explorers. +""" + +import json +import logging +import os +import re +import urllib.error +import urllib.parse +import urllib.request +from json.decoder import JSONDecodeError +from typing import TYPE_CHECKING, Any + +from crytic_compile.compilation_unit import CompilationUnit +from crytic_compile.compiler.compiler import CompilerVersion +from crytic_compile.platform import solc_standard_json +from crytic_compile.platform.abstract_platform import AbstractPlatform +from crytic_compile.platform.exceptions import InvalidCompilation +from crytic_compile.platform.explorer_utils import ( + EXPLORER_BASE_BYTECODE, + convert_version, + handle_bytecode, + handle_multiple_files, + handle_single_file, +) +from crytic_compile.platform.types import Type + +if TYPE_CHECKING: + from crytic_compile import CryticCompile + +LOGGER = logging.getLogger("CryticCompile") + +# Blockscout API endpoint — explorer_url is the full base URL +BLOCKSCOUT_BASE = "%s/api?module=contract&action=getsourcecode&address=%s" + +# Blockscout chain directory API +BLOCKSCOUT_CHAINS_URL = "https://chains.blockscout.com/api/chains" + +# Chains with Blockscout-compatible explorers not listed in the directory. +# Checked first so they cannot be shadowed by directory conflicts. +BLOCKSCOUT_EXTRA_CHAINS: dict[str, str] = { + "747": "https://evm.flowscan.io", # Flow + "98866": "https://explorer.plume.org", # Plume +} + +# Module-level cache: chain_id (str) -> explorer_url (str) +_blockscout_chains: dict[str, str] | None = None + + +def _fetch_blockscout_chains() -> dict[str, str]: + """Fetch the Blockscout chain directory and return a + chain_id -> explorer_url mapping. + + Results are cached after the first successful call. + + Returns: + Mapping of chain ID strings to explorer base URLs. + """ + global _blockscout_chains # noqa: PLW0603 + if _blockscout_chains is not None: + return _blockscout_chains + + try: + req = urllib.request.Request( + BLOCKSCOUT_CHAINS_URL, + headers={"User-Agent": "crytic-compile/0"}, + ) + with urllib.request.urlopen(req, timeout=10) as response: + data = json.loads(response.read()) + except (urllib.error.URLError, json.JSONDecodeError, OSError) as e: + LOGGER.warning("Failed to fetch Blockscout chain list: %s", e) + _blockscout_chains = {} + return _blockscout_chains + + chains: dict[str, str] = {} + for chain_id, info in data.items(): + explorers = info.get("explorers", []) + if explorers: + url = explorers[0].get("url", "").rstrip("/") + if url: + chains[chain_id] = url + + # Extra chains take priority over the directory (avoids conflicts + # like chain 747 mapping to Alvey instead of Flow). + chains.update(BLOCKSCOUT_EXTRA_CHAINS) + + _blockscout_chains = chains + return _blockscout_chains + + +def _normalize_blockscout_result(result: dict[str, Any]) -> dict[str, Any]: + """Normalize a Blockscout API result to Etherscan field conventions. + + Blockscout differs from Etherscan in field names and value formats. + This converts them so the compilation pipeline can work unchanged. + + Args: + result: Raw result dict from a Blockscout getsourcecode response. + + Returns: + dict: Normalized result with Etherscan-compatible field names and values. + """ + normalized = dict(result) + + # OptimizationUsed: "true"/"false" -> "1"/"0" + if "OptimizationUsed" in normalized: + normalized["OptimizationUsed"] = "1" if normalized["OptimizationUsed"] == "true" else "0" + + # OptimizationRuns (int) -> Runs (str) + if "OptimizationRuns" in normalized and "Runs" not in normalized: + normalized["Runs"] = str(normalized["OptimizationRuns"]) + + # IsProxy -> Proxy ("1"/"0") + Implementation + if "IsProxy" in normalized: + normalized["Proxy"] = "1" if normalized["IsProxy"] == "true" else "0" + if normalized["Proxy"] == "1": + normalized["Implementation"] = normalized.get("ImplementationAddress", "") + + # Reconstruct SourceCode as a multi-file JSON blob from FileName + AdditionalSources. + # Blockscout stores the main file in SourceCode with extras in AdditionalSources, + # while Etherscan encodes everything as {"sources": {filename: {content: ...}}} in SourceCode. + additional = normalized.get("AdditionalSources", []) + main_filename = normalized.get("FileName", "") + if additional or main_filename: + sources: dict[str, dict[str, str]] = {} + if main_filename and normalized.get("SourceCode"): + sources[main_filename] = {"content": normalized["SourceCode"]} + for src in additional: + # Blockscout uses "Filename" (lowercase n) in AdditionalSources entries + src_filename = src.get("Filename") or src.get("FileName", "") + src_code = src.get("SourceCode", "") + if src_filename and src_code: + sources[src_filename] = {"content": src_code} + settings = normalized.get("CompilerSettings", {}) + payload: dict[str, Any] = {"sources": sources} + if settings: + payload["settings"] = settings + normalized["SourceCode"] = json.dumps(payload) + + return normalized + + +class Blockscout(AbstractPlatform): + """ + Blockscout platform — fetches verified contracts from Blockscout-based explorers. + """ + + NAME = "Blockscout" + PROJECT_URL = "https://www.blockscout.com/" + TYPE = Type.BLOCKSCOUT + + def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: + """Run the compilation. + + Args: + crytic_compile: Associated CryticCompile object. + **kwargs: optional arguments. Used "solc", "explorer_only_source_code", + "explorer_only_bytecode", "export_dir". + + Raises: + InvalidCompilation: if the explorer returned an error or results could not be parsed. + """ + target = self._target + match = re.match(r"^blockscout-(\d+):(0x[a-fA-F0-9]{40})$", target) + if not match: + raise InvalidCompilation(f"Invalid Blockscout target: {target}") + + chain_id = match.group(1) + addr = match.group(2) + prefix = f"blockscout-{chain_id}" + + custom_url = kwargs.get("blockscout_url") + if custom_url: + explorer_url = custom_url.rstrip("/") + else: + chains = _fetch_blockscout_chains() + if chain_id not in chains: + raise InvalidCompilation( + f"Chain {chain_id} not found in Blockscout " + f"chain list. Use --blockscout-url to " + f"specify a custom explorer URL, or see " + f"https://chains.blockscout.com/ for " + f"supported chains." + ) + explorer_url = chains[chain_id] + explorer_host = urllib.parse.urlparse(explorer_url).netloc + + source_url = BLOCKSCOUT_BASE % (explorer_url, addr) + bytecode_url = EXPLORER_BASE_BYTECODE % (explorer_host, addr) + + only_source = kwargs.get("explorer_only_source_code", False) + only_bytecode = kwargs.get("explorer_only_bytecode", False) + + export_dir = kwargs.get("export_dir", "crytic-export") + export_dir = os.path.join( + export_dir, kwargs.get("explorer_export_dir") or "blockscout-contracts" + ) + + source_code: str = "" + result: dict[str, Any] = {} + contract_name: str = "" + + if not only_bytecode: + req = urllib.request.Request( + source_url, + headers={ + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36 crytic-compile/0" + }, + ) + with urllib.request.urlopen(req) as response: + html = response.read() + + info = json.loads(html) + + if "message" not in info: + LOGGER.error("Incorrect Blockscout request") + raise InvalidCompilation("Incorrect Blockscout request " + source_url) + + if not info["message"].startswith("OK"): + LOGGER.error("Contract has no public source code") + raise InvalidCompilation("Contract has no public source code: " + source_url) + + if "result" not in info: + LOGGER.error("Contract has no public source code") + raise InvalidCompilation("Contract has no public source code: " + source_url) + + result = _normalize_blockscout_result(info["result"][0]) + + if "ABI" in result and "Contract source code not verified" in str(result["ABI"]): + LOGGER.error("Contract has no public source code") + raise InvalidCompilation("Contract has no public source code: " + source_url) + + # Assert to help mypy + assert isinstance(result["SourceCode"], str) + assert isinstance(result["ContractName"], str) + source_code = result["SourceCode"] + contract_name = result["ContractName"] + + if source_code == "" and not only_source: + LOGGER.info("Source code not available, try to fetch the bytecode only") + req = urllib.request.Request(bytecode_url, headers={"User-Agent": "Mozilla/5.0"}) + with urllib.request.urlopen(req) as response: + html = response.read() + handle_bytecode(crytic_compile, target, html) + return + + if source_code == "": + LOGGER.error("Contract has no public source code") + raise InvalidCompilation("Contract has no public source code: " + source_url) + + if not os.path.exists(export_dir): + os.makedirs(export_dir) + + # Assert to help mypy + assert isinstance(result["CompilerVersion"], str) + compiler_version = re.findall(r"\d+\.\d+\.\d+", convert_version(result["CompilerVersion"]))[ + 0 + ] + + evm_version: str | None = None + if "EVMVersion" in result: + assert isinstance(result["EVMVersion"], str) + evm_version = ( + result["EVMVersion"] + if result["EVMVersion"].lower() not in ("default", "") + else None + ) + + optimization_used: bool = result["OptimizationUsed"] == "1" + optimize_runs = None + if optimization_used: + optimize_runs = int(result["Runs"]) + + working_dir: str | None = None + remappings: list[str] | None = None + dict_source_code: dict | None = None + + try: + # Etherscan wraps multi-file source in double braces: {{ content }} + dict_source_code = json.loads(source_code[1:-1]) + assert isinstance(dict_source_code, dict) + filenames, working_dir, remappings = handle_multiple_files( + dict_source_code, addr, prefix, contract_name, export_dir + ) + except JSONDecodeError: + try: + # _normalize_blockscout_result produces a single-brace JSON: { content } + dict_source_code = json.loads(source_code) + assert isinstance(dict_source_code, dict) + filenames, working_dir, remappings = handle_multiple_files( + dict_source_code, addr, prefix, contract_name, export_dir + ) + except JSONDecodeError: + filenames = [ + handle_single_file(source_code, addr, prefix, contract_name, export_dir) + ] + + via_ir_enabled: bool | None = None + if isinstance(dict_source_code, dict): + via_ir_enabled = dict_source_code.get("settings", {}).get("viaIR", None) + + compilation_unit = CompilationUnit(crytic_compile, contract_name) + compilation_unit.compiler_version = CompilerVersion( + compiler=kwargs.get("solc", "solc"), + version=compiler_version, + optimized=optimization_used, + optimize_runs=optimize_runs, + ) + compilation_unit.compiler_version.look_for_installed_version() + + if result.get("Proxy") == "1" and result.get("Implementation"): + implementation = f"{prefix}:{result['Implementation']}" + compilation_unit.implementation_addresses.add(implementation) + + solc_standard_json.standalone_compile( + filenames, + compilation_unit, + working_dir=working_dir, + remappings=remappings, + evm_version=evm_version, + via_ir=via_ir_enabled, + ) + + metadata_config = { + "solc_remaps": remappings if remappings else {}, + "solc_solcs_select": compiler_version, + "solc_args": " ".join( + filter( + None, + [ + "--via-ir" if via_ir_enabled else "", + "--optimize --optimize-runs " + str(optimize_runs) if optimize_runs else "", + "--evm-version " + evm_version if evm_version else "", + ], + ) + ), + } + + with open( + os.path.join(working_dir if working_dir else export_dir, "crytic_compile.config.json"), + "w", + encoding="utf-8", + ) as f: + json.dump(metadata_config, f) + + def clean(self, **_kwargs: str) -> None: + pass + + @staticmethod + def is_supported(target: str, **kwargs: str) -> bool: + """Check if the target is a Blockscout-hosted contract. + + Args: + target: path/target string. + **kwargs: optional arguments. Used "explorer_ignore". + + Returns: + bool: True if the target matches blockscout-:0x
. + """ + if kwargs.get("explorer_ignore", False): + return False + return bool(re.match(r"^blockscout-\d+:0x[a-fA-F0-9]{40}$", target)) + + def is_dependency(self, path: str) -> bool: + return False + + def _guessed_tests(self) -> list[str]: + return [] diff --git a/crytic_compile/platform/etherscan.py b/crytic_compile/platform/etherscan.py index b32d0f72..dd9dda18 100644 --- a/crytic_compile/platform/etherscan.py +++ b/crytic_compile/platform/etherscan.py @@ -8,7 +8,6 @@ import re import urllib.request from json.decoder import JSONDecodeError -from pathlib import Path, PurePosixPath from typing import TYPE_CHECKING from crytic_compile.compilation_unit import CompilationUnit @@ -16,10 +15,14 @@ from crytic_compile.platform import solc_standard_json from crytic_compile.platform.abstract_platform import AbstractPlatform from crytic_compile.platform.exceptions import InvalidCompilation +from crytic_compile.platform.explorer_utils import ( + EXPLORER_BASE_BYTECODE, + convert_version, + handle_bytecode, + handle_multiple_files, + handle_single_file, +) from crytic_compile.platform.types import Type -from crytic_compile.utils.naming import Filename - -# Cycle dependency if TYPE_CHECKING: from crytic_compile import CryticCompile @@ -35,9 +38,6 @@ "https://api.etherscan.io/v2/api?chainid=%s&module=contract&action=getsourcecode&address=%s" ) -# Bytecode URL style (for scraping) -ETHERSCAN_BASE_BYTECODE = "https://%s/address/%s#code" - # v1 style scanners SUPPORTED_NETWORK_V1: dict[str, tuple[str, str]] = { # None at this time. External tracer instances not operated by Etherscan would be here @@ -106,6 +106,7 @@ "testnet.berachain": ("80069", "testnet.berascan.com"), "swellchain": ("1923", "swellchainscan.io"), "testnet.swellchain": ("1924", "sepolia.swellchainscan.io"), + "monad": ("143", "monadscan.com"), "testnet.monad": ("10143", "testnet.monadscan.com"), "hyperevm": ("999", "hyperevmscan.io"), "katana": ("747474", "katanascan.com"), @@ -139,142 +140,6 @@ def generate_supported_network_v2_list() -> None: print(results) -def _handle_bytecode(crytic_compile: "CryticCompile", target: str, result_b: bytes) -> None: - """Parse the bytecode and populate CryticCompile info - - Args: - crytic_compile (CryticCompile): Associate CryticCompile object - target (str): path to the target - result_b (bytes): text containing the bytecode - """ - - # There is no direct API to get the bytecode from etherscan - # The page changes from time to time, we use for now a simple parsing, it will not be robust - begin = """Search Algorithm">\nSimilar Contracts\n""" - begin += """
\n
0x"""
-    result = result_b.decode("utf8")
-    # Removing everything before the begin string
-    result = result[result.find(begin) + len(begin) :]
-    bytecode = result[: result.find("<")]
-
-    contract_name = f"Contract_{target}"
-
-    contract_filename = Filename(absolute="", relative="", short="", used="")
-
-    compilation_unit = CompilationUnit(crytic_compile, str(target))
-
-    source_unit = compilation_unit.create_source_unit(contract_filename)
-
-    source_unit.add_contract_name(contract_name)
-    compilation_unit.filename_to_contracts[contract_filename].add(contract_name)
-    source_unit.abis[contract_name] = {}
-    source_unit.bytecodes_init[contract_name] = bytecode
-    source_unit.bytecodes_runtime[contract_name] = ""
-    source_unit.srcmaps_init[contract_name] = []
-    source_unit.srcmaps_runtime[contract_name] = []
-
-    compilation_unit.compiler_version = CompilerVersion(
-        compiler="unknown", version="", optimized=False
-    )
-
-    crytic_compile.bytecode_only = True
-
-
-def _handle_single_file(
-    source_code: str, addr: str, prefix: str | None, contract_name: str, export_dir: str
-) -> str:
-    """Handle a result with a single file
-
-    Args:
-        source_code (str): source code
-        addr (str): contract address
-        prefix (Optional[str]): used to separate different chains
-        contract_name (str): contract name
-        export_dir (str): directory where the code will be saved
-
-    Returns:
-        str: filename containing the source code
-    """
-    if prefix:
-        filename = os.path.join(export_dir, f"{addr}{prefix}-{contract_name}.sol")
-    else:
-        filename = os.path.join(export_dir, f"{addr}-{contract_name}.sol")
-
-    with open(filename, "w", encoding="utf8") as file_desc:
-        file_desc.write(source_code)
-
-    return filename
-
-
-def _handle_multiple_files(
-    dict_source_code: dict, addr: str, prefix: str | None, contract_name: str, export_dir: str
-) -> tuple[list[str], str, list[str] | None]:
-    """Handle a result with a multiple files. Generate multiple Solidity files
-
-    Args:
-        dict_source_code (Dict): dict result from etherscan
-        addr (str): contract address
-        prefix (Optional[str]): used to separate different chains
-        contract_name (str): contract name
-        export_dir (str): directory where the code will be saved
-
-    Returns:
-        Tuple[List[str], str]: filesnames, directory, where target_filename is the main file
-
-    Raises:
-        IOError: if the path is outside of the allowed directory
-    """
-    if prefix:
-        directory = os.path.join(export_dir, f"{addr}{prefix}-{contract_name}")
-    else:
-        directory = os.path.join(export_dir, f"{addr}-{contract_name}")
-
-    if "sources" in dict_source_code:
-        # etherscan might return an object with a sources prop, which contains an object with contract names as keys
-        source_codes = dict_source_code["sources"]
-    else:
-        # or etherscan might return an object with contract names as keys
-        source_codes = dict_source_code
-
-    filtered_paths: list[str] = []
-    for filename, source_code in source_codes.items():
-        path_filename = PurePosixPath(filename)
-        # Only keep solidity files
-        if path_filename.suffix not in [".sol", ".vy"]:
-            continue
-
-        # https://etherscan.io/address/0x19bb64b80cbf61e61965b0e5c2560cc7364c6546#code has an import of erc721a/contracts/ERC721A.sol
-        # if the full path is lost then won't compile
-        if "contracts" == path_filename.parts[0] and not filename.startswith("@"):
-            path_filename = PurePosixPath(
-                *path_filename.parts[path_filename.parts.index("contracts") :]
-            )
-
-        # Convert "absolute" paths such as "/interfaces/IFoo.sol" into relative ones.
-        # This is needed due to the following behavior from pathlib.Path:
-        # > When several absolute paths are given, the last is taken as an anchor
-        # We need to make sure this is relative, so that Path(directory, ...) remains anchored to directory
-        if path_filename.is_absolute():
-            path_filename = PurePosixPath(*path_filename.parts[1:])
-
-        filtered_paths.append(path_filename.as_posix())
-        path_filename_disk = Path(directory, path_filename)
-
-        allowed_path = os.path.abspath(directory)
-        if os.path.commonpath((allowed_path, os.path.abspath(path_filename_disk))) != allowed_path:
-            raise OSError(
-                f"Path '{path_filename_disk}' is outside of the allowed directory: {allowed_path}"
-            )
-        if not os.path.exists(path_filename_disk.parent):
-            os.makedirs(path_filename_disk.parent)
-        with open(path_filename_disk, "w", encoding="utf8") as file_desc:
-            file_desc.write(source_code["content"])
-
-    remappings = dict_source_code.get("settings", {}).get("remappings", None)
-
-    return list(filtered_paths), directory, _sanitize_remappings(remappings, directory)
-
-
 class Etherscan(AbstractPlatform):
     """
     Etherscan platform
@@ -289,7 +154,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
 
         Args:
             crytic_compile (CryticCompile): Associated CryticCompile object
-            **kwargs: optional arguments. Used "solc", "etherscan_only_source_code", "etherscan_only_bytecode",
+            **kwargs: optional arguments. Used "solc", "explorer_only_source_code", "explorer_only_bytecode",
                 "etherscan_api_key", "export_dir"
 
         Raises:
@@ -305,23 +170,23 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
             prefix, addr = target.split(":", 2)
             chainid, prefix_bytecode = SUPPORTED_NETWORK_V2[prefix]
             etherscan_url = ETHERSCAN_BASE_V2 % (chainid, addr)
-            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % (prefix_bytecode, addr)
+            etherscan_bytecode_url = EXPLORER_BASE_BYTECODE % (prefix_bytecode, addr)
         elif target.startswith(tuple(SUPPORTED_NETWORK_V1)):
             api_key_required = 1
             prefix = SUPPORTED_NETWORK_V1[target[: target.find(":") + 1]][0]
             prefix_bytecode = SUPPORTED_NETWORK_V1[target[: target.find(":") + 1]][1]
             addr = target[target.find(":") + 1 :]
             etherscan_url = ETHERSCAN_BASE_V1 % (prefix, addr)
-            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % (prefix_bytecode, addr)
+            etherscan_bytecode_url = EXPLORER_BASE_BYTECODE % (prefix_bytecode, addr)
         else:
             api_key_required = 2
             etherscan_url = ETHERSCAN_BASE_V2 % ("1", target)
-            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % ("etherscan.io", target)
+            etherscan_bytecode_url = EXPLORER_BASE_BYTECODE % ("etherscan.io", target)
             addr = target
             prefix = None
 
-        only_source = kwargs.get("etherscan_only_source_code", False)
-        only_bytecode = kwargs.get("etherscan_only_bytecode", False)
+        only_source = kwargs.get("explorer_only_source_code", False)
+        only_bytecode = kwargs.get("explorer_only_bytecode", False)
 
         etherscan_api_key = kwargs.get("etherscan_api_key", None)
         if etherscan_api_key is None:
@@ -329,7 +194,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
 
         export_dir = kwargs.get("export_dir", "crytic-export")
         export_dir = os.path.join(
-            export_dir, kwargs.get("etherscan_export_dir", "etherscan-contracts")
+            export_dir, kwargs.get("explorer_export_dir") or "etherscan-contracts"
         )
 
         if api_key_required == 2 and etherscan_api_key:
@@ -403,7 +268,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
             with urllib.request.urlopen(req) as response:
                 html = response.read()
 
-            _handle_bytecode(crytic_compile, target, html)
+            handle_bytecode(crytic_compile, target, html)
             return
 
         if source_code == "":
@@ -416,9 +281,9 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
         # Assert to help mypy
         assert isinstance(result["CompilerVersion"], str)
 
-        compiler_version = re.findall(
-            r"\d+\.\d+\.\d+", _convert_version(result["CompilerVersion"])
-        )[0]
+        compiler_version = re.findall(r"\d+\.\d+\.\d+", convert_version(result["CompilerVersion"]))[
+            0
+        ]
 
         # etherscan can report "default" which is not a valid EVM version
         evm_version: str | None = None
@@ -440,7 +305,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
             # etherscan might return an object with two curly braces, {{ content }}
             dict_source_code = json.loads(source_code[1:-1])
             assert isinstance(dict_source_code, dict)
-            filenames, working_dir, remappings = _handle_multiple_files(
+            filenames, working_dir, remappings = handle_multiple_files(
                 dict_source_code, addr, prefix, contract_name, export_dir
             )
         except JSONDecodeError:
@@ -448,12 +313,12 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
                 # or etherscan might return an object with single curly braces, { content }
                 dict_source_code = json.loads(source_code)
                 assert isinstance(dict_source_code, dict)
-                filenames, working_dir, remappings = _handle_multiple_files(
+                filenames, working_dir, remappings = handle_multiple_files(
                     dict_source_code, addr, prefix, contract_name, export_dir
                 )
             except JSONDecodeError:
                 filenames = [
-                    _handle_single_file(source_code, addr, prefix, contract_name, export_dir)
+                    handle_single_file(source_code, addr, prefix, contract_name, export_dir)
                 ]
 
         # viaIR is not exposed on the top level JSON offered by etherscan, so we need to inspect the settings
@@ -518,13 +383,12 @@ def is_supported(target: str, **kwargs: str) -> bool:
 
         Args:
             target (str): path to the target
-            **kwargs: optional arguments. Used "etherscan_ignore"
+            **kwargs: optional arguments. Used "explorer_ignore"
 
         Returns:
             bool: True if the target is a etherscan project
         """
-        etherscan_ignore = kwargs.get("etherscan_ignore", False)
-        if etherscan_ignore:
+        if kwargs.get("explorer_ignore", False):
             return False
         if target.startswith(tuple(SUPPORTED_NETWORK)):
             target = target[target.find(":") + 1 :]
@@ -548,58 +412,3 @@ def _guessed_tests(self) -> list[str]:
             List[str]: The guessed unit tests commands
         """
         return []
-
-
-def _convert_version(version: str) -> str:
-    """Convert the compiler version
-
-    Args:
-        version (str): original version
-
-    Returns:
-        str: converted version
-    """
-    if "+" in version:
-        return version[1 : version.find("+")]
-    return version[1:]
-
-
-def _sanitize_remappings(remappings: list[str] | None, allowed_directory: str) -> list[str] | None:
-    """Sanitize a list of remappings
-
-    Args:
-        remappings: (Optional[List[str]]): a list of remappings
-        allowed_directory: the allowed base directory for remaps
-
-    Returns:
-        Optional[List[str]]: a list of sanitized remappings
-    """
-
-    if remappings is None:
-        return remappings
-
-    allowed_path = os.path.abspath(allowed_directory)
-
-    remappings_clean: list[str] = []
-    for r in remappings:
-        split = r.split("=", 2)
-        if len(split) != 2:
-            LOGGER.warning("Invalid remapping %s", r)
-            continue
-
-        origin, dest = split[0], PurePosixPath(split[1])
-
-        # if path is absolute, relativize it
-        if dest.is_absolute():
-            dest = PurePosixPath(*dest.parts[1:])
-
-        dest_disk = Path(allowed_directory, dest)
-
-        if os.path.commonpath((allowed_path, os.path.abspath(dest_disk))) != allowed_path:
-            LOGGER.warning("Remapping %s=%s is potentially unsafe, skipping", origin, dest)
-            continue
-
-        # always use a trailing slash for the destination
-        remappings_clean.append(f"{origin}={str(dest / '_')[:-1]}")
-
-    return remappings_clean
diff --git a/crytic_compile/platform/explorer_utils.py b/crytic_compile/platform/explorer_utils.py
new file mode 100644
index 00000000..78d3a2be
--- /dev/null
+++ b/crytic_compile/platform/explorer_utils.py
@@ -0,0 +1,203 @@
+"""Shared utilities for block explorer platforms (Etherscan, Blockscout)."""
+
+import logging
+import os
+from pathlib import Path, PurePosixPath
+from typing import TYPE_CHECKING
+
+from crytic_compile.compilation_unit import CompilationUnit
+from crytic_compile.compiler.compiler import CompilerVersion
+from crytic_compile.utils.naming import Filename
+
+if TYPE_CHECKING:
+    from crytic_compile import CryticCompile
+
+LOGGER = logging.getLogger("CryticCompile")
+
+# Block explorer address page URL — used to scrape bytecode when source is unavailable.
+# Both Etherscan and Blockscout use this URL pattern.
+EXPLORER_BASE_BYTECODE = "https://%s/address/%s#code"
+
+
+def convert_version(version: str) -> str:
+    """Convert the compiler version string from explorer format to a bare semver.
+
+    Args:
+        version (str): original version, e.g. "v0.8.20+commit.a1b79de6"
+
+    Returns:
+        str: version without leading "v" or "+commit..." suffix
+    """
+    if "+" in version:
+        return version[1 : version.find("+")]
+    return version[1:]
+
+
+def handle_bytecode(crytic_compile: "CryticCompile", target: str, result_b: bytes) -> None:
+    """Parse the bytecode scraped from an explorer page and populate CryticCompile.
+
+    Args:
+        crytic_compile (CryticCompile): Associated CryticCompile object.
+        target (str): path to the target.
+        result_b (bytes): raw HTML containing the bytecode.
+    """
+    # There is no direct API to get the bytecode from block explorers.
+    # The page changes from time to time; this simple parsing is not guaranteed to be robust.
+    begin = """Search Algorithm">\nSimilar Contracts\n"""
+    begin += """
\n
0x"""
+    result = result_b.decode("utf8")
+    result = result[result.find(begin) + len(begin) :]
+    bytecode = result[: result.find("<")]
+
+    contract_name = f"Contract_{target}"
+    contract_filename = Filename(absolute="", relative="", short="", used="")
+
+    compilation_unit = CompilationUnit(crytic_compile, str(target))
+    source_unit = compilation_unit.create_source_unit(contract_filename)
+
+    source_unit.add_contract_name(contract_name)
+    compilation_unit.filename_to_contracts[contract_filename].add(contract_name)
+    source_unit.abis[contract_name] = {}
+    source_unit.bytecodes_init[contract_name] = bytecode
+    source_unit.bytecodes_runtime[contract_name] = ""
+    source_unit.srcmaps_init[contract_name] = []
+    source_unit.srcmaps_runtime[contract_name] = []
+
+    compilation_unit.compiler_version = CompilerVersion(
+        compiler="unknown", version="", optimized=False
+    )
+
+    crytic_compile.bytecode_only = True
+
+
+def handle_single_file(
+    source_code: str, addr: str, prefix: str | None, contract_name: str, export_dir: str
+) -> str:
+    """Write a single-file contract to disk and return the filename.
+
+    Args:
+        source_code (str): source code.
+        addr (str): contract address.
+        prefix (Optional[str]): chain prefix, used to disambiguate filenames.
+        contract_name (str): contract name.
+        export_dir (str): directory where the file will be written.
+
+    Returns:
+        str: path to the written file.
+    """
+    if prefix:
+        filename = os.path.join(export_dir, f"{addr}{prefix}-{contract_name}.sol")
+    else:
+        filename = os.path.join(export_dir, f"{addr}-{contract_name}.sol")
+
+    with open(filename, "w", encoding="utf8") as file_desc:
+        file_desc.write(source_code)
+
+    return filename
+
+
+def handle_multiple_files(
+    dict_source_code: dict, addr: str, prefix: str | None, contract_name: str, export_dir: str
+) -> tuple[list[str], str, list[str] | None]:
+    """Write a multi-file contract to disk and return the filenames, working dir, and remappings.
+
+    Args:
+        dict_source_code (dict): parsed source object from an explorer API response.
+        addr (str): contract address.
+        prefix (Optional[str]): chain prefix, used to disambiguate directories.
+        contract_name (str): contract name.
+        export_dir (str): base directory where files will be written.
+
+    Returns:
+        Tuple[List[str], str, Optional[List[str]]]: filenames, working directory, remappings.
+
+    Raises:
+        OSError: if a source path would escape the working directory.
+    """
+    if prefix:
+        directory = os.path.join(export_dir, f"{addr}{prefix}-{contract_name}")
+    else:
+        directory = os.path.join(export_dir, f"{addr}-{contract_name}")
+
+    if "sources" in dict_source_code:
+        # explorer may return {"sources": {filename: {content: ...}, ...}}
+        source_codes = dict_source_code["sources"]
+    else:
+        # or directly {filename: {content: ...}, ...}
+        source_codes = dict_source_code
+
+    filtered_paths: list[str] = []
+    for filename, source_code in source_codes.items():
+        path_filename = PurePosixPath(filename)
+        if path_filename.suffix not in [".sol", ".vy"]:
+            continue
+
+        # https://etherscan.io/address/0x19bb64b80cbf61e61965b0e5c2560cc7364c6546#code has an import of erc721a/contracts/ERC721A.sol
+        # if the full path is lost then won't compile
+        if "contracts" == path_filename.parts[0] and not filename.startswith("@"):
+            path_filename = PurePosixPath(
+                *path_filename.parts[path_filename.parts.index("contracts") :]
+            )
+
+        # Convert "absolute" paths such as "/interfaces/IFoo.sol" into relative ones.
+        # This is needed due to the following behavior from pathlib.Path:
+        # > When several absolute paths are given, the last is taken as an anchor
+        # We need to make sure this is relative, so that Path(directory, ...) remains anchored to directory
+        if path_filename.is_absolute():
+            path_filename = PurePosixPath(*path_filename.parts[1:])
+
+        filtered_paths.append(path_filename.as_posix())
+        path_filename_disk = Path(directory, path_filename)
+
+        allowed_path = os.path.abspath(directory)
+        if os.path.commonpath((allowed_path, os.path.abspath(path_filename_disk))) != allowed_path:
+            raise OSError(
+                f"Path '{path_filename_disk}' is outside of the allowed directory: {allowed_path}"
+            )
+        if not os.path.exists(path_filename_disk.parent):
+            os.makedirs(path_filename_disk.parent)
+        with open(path_filename_disk, "w", encoding="utf8") as file_desc:
+            file_desc.write(source_code["content"])
+
+    remappings = dict_source_code.get("settings", {}).get("remappings", None)
+
+    return list(filtered_paths), directory, sanitize_remappings(remappings, directory)
+
+
+def sanitize_remappings(remappings: list[str] | None, allowed_directory: str) -> list[str] | None:
+    """Sanitize a list of remappings, rejecting any that escape the allowed directory.
+
+    Args:
+        remappings: (Optional[List[str]]): a list of remappings.
+        allowed_directory: the allowed base directory for remap destinations.
+
+    Returns:
+        Optional[List[str]]: a list of sanitized remappings.
+    """
+    if remappings is None:
+        return remappings
+
+    allowed_path = os.path.abspath(allowed_directory)
+
+    remappings_clean: list[str] = []
+    for r in remappings:
+        split = r.split("=", 2)
+        if len(split) != 2:
+            LOGGER.warning("Invalid remapping %s", r)
+            continue
+
+        origin, dest = split[0], PurePosixPath(split[1])
+
+        if dest.is_absolute():
+            dest = PurePosixPath(*dest.parts[1:])
+
+        dest_disk = Path(allowed_directory, dest)
+
+        if os.path.commonpath((allowed_path, os.path.abspath(dest_disk))) != allowed_path:
+            LOGGER.warning("Remapping %s=%s is potentially unsafe, skipping", origin, dest)
+            continue
+
+        # always use a trailing slash for the destination
+        remappings_clean.append(f"{origin}={str(dest / '_')[:-1]}")
+
+    return remappings_clean
diff --git a/crytic_compile/platform/sourcify.py b/crytic_compile/platform/sourcify.py
index 35ae59ae..0567042f 100644
--- a/crytic_compile/platform/sourcify.py
+++ b/crytic_compile/platform/sourcify.py
@@ -20,8 +20,8 @@
 from crytic_compile.compiler.compiler import CompilerVersion
 from crytic_compile.platform import solc_standard_json
 from crytic_compile.platform.abstract_platform import AbstractPlatform
-from crytic_compile.platform.etherscan import _sanitize_remappings
 from crytic_compile.platform.exceptions import InvalidCompilation
+from crytic_compile.platform.explorer_utils import sanitize_remappings
 from crytic_compile.platform.types import Type
 
 if TYPE_CHECKING:
@@ -223,7 +223,7 @@ def _write_config_file(working_dir: str, compiler_version: str, settings: dict[s
         solc_args.append(f"--evm-version {evm_version}")
 
     metadata_config: dict[str, Any] = {
-        "solc_remaps": _sanitize_remappings(remappings, working_dir) if remappings else {},
+        "solc_remaps": sanitize_remappings(remappings, working_dir) if remappings else {},
         "solc_solcs_select": compiler_version,
         "solc_args": " ".join(solc_args),
     }
@@ -233,6 +233,83 @@ def _write_config_file(working_dir: str, compiler_version: str, settings: dict[s
         json.dump(metadata_config, f)
 
 
+def try_compile_from_sourcify(
+    crytic_compile: "CryticCompile", chain_id: str, addr: str, export_dir: str, **kwargs: str
+) -> bool:
+    """Try to compile a contract via Sourcify, returning False if it is not verified there.
+
+    Args:
+        crytic_compile: Associated CryticCompile object.
+        chain_id: Chain ID (decimal string).
+        addr: Contract address.
+        export_dir: Base export directory.
+        **kwargs: Passed through to CompilerVersion (e.g. "solc").
+
+    Returns:
+        bool: True if the contract was found and compiled via Sourcify, False if not verified.
+
+    Raises:
+        InvalidCompilation: If Sourcify returned an unexpected error (not a 404).
+    """
+    try:
+        data = _fetch_sourcify_data(chain_id, addr)
+    except InvalidCompilation:
+        # Contract not on Sourcify, or chain not indexed — fall back to the caller's explorer.
+        return False
+
+    sources = data.get("sources", {})
+    if not sources:
+        return False
+
+    sourcify_export = os.path.join(export_dir, "sourcify-contracts")
+    if not os.path.exists(sourcify_export):
+        os.makedirs(sourcify_export)
+
+    working_dir, filenames = _write_source_files(sources, addr, chain_id, sourcify_export)
+
+    compilation = data.get("compilation", {})
+    compiler_version_str = compilation.get("compilerVersion", "")
+    version_match = re.search(r"(\d+\.\d+\.\d+)", compiler_version_str)
+    if not version_match:
+        raise InvalidCompilation(f"Could not parse compiler version from: {compiler_version_str}")
+    compiler_version = version_match.group(1)
+
+    settings = compilation.get("compilerSettings", {})
+    optimizer = settings.get("optimizer", {})
+    optimization_used = optimizer.get("enabled", False)
+    remappings = sanitize_remappings(settings.get("remappings", []), working_dir) or None
+
+    compilation_unit = CompilationUnit(crytic_compile, compilation.get("name", "Contract"))
+    compilation_unit.compiler_version = CompilerVersion(
+        compiler=kwargs.get("solc", "solc"),
+        version=compiler_version,
+        optimized=optimization_used,
+        optimize_runs=optimizer.get("runs") if optimization_used else None,
+    )
+    compilation_unit.compiler_version.look_for_installed_version()
+
+    proxy_resolution = data.get("proxyResolution")
+    if proxy_resolution and proxy_resolution.get("isProxy"):
+        for impl in proxy_resolution.get("implementations", []):
+            impl_addr = impl.get("address")
+            if impl_addr:
+                compilation_unit.implementation_addresses.add(
+                    f"sourcify-{chain_id}:{_to_checksum_address(impl_addr)}"
+                )
+
+    solc_standard_json.standalone_compile(
+        filenames,
+        compilation_unit,
+        working_dir=working_dir,
+        remappings=remappings,
+        evm_version=settings.get("evmVersion"),
+        via_ir=settings.get("viaIR"),
+    )
+
+    _write_config_file(working_dir, compiler_version, settings)
+    return True
+
+
 class Sourcify(AbstractPlatform):
     """
     Sourcify platform - fetches verified contracts from sourcify.dev
@@ -287,7 +364,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
         settings = compilation.get("compilerSettings", {})
         optimizer = settings.get("optimizer", {})
         optimization_used = optimizer.get("enabled", False)
-        remappings = _sanitize_remappings(settings.get("remappings", []), working_dir) or None
+        remappings = sanitize_remappings(settings.get("remappings", []), working_dir) or None
 
         # Create and configure compilation unit
         compilation_unit = CompilationUnit(crytic_compile, compilation.get("name", "Contract"))
diff --git a/crytic_compile/platform/types.py b/crytic_compile/platform/types.py
index 2d992d4a..962be0a9 100644
--- a/crytic_compile/platform/types.py
+++ b/crytic_compile/platform/types.py
@@ -25,6 +25,7 @@ class Type(IntEnum):
     HARDHAT = 11
     FOUNDRY = 12
     SOURCIFY = 13
+    BLOCKSCOUT = 14
 
     STANDARD = 100
     ARCHIVE = 101
@@ -68,6 +69,8 @@ def __str__(self) -> str:
             return "Foundry"
         if self == Type.SOURCIFY:
             return "Sourcify"
+        if self == Type.BLOCKSCOUT:
+            return "Blockscout"
         raise ValueError
 
     def priority(self) -> int:
@@ -89,4 +92,7 @@ def priority(self) -> int:
         if self in [Type.TRUFFLE, Type.WAFFLE]:
             return 300
 
+        # All explorer-based platforms (ETHERSCAN, SOURCIFY, BLOCKSCOUT) and others
+        # default to 1000. Detection order among them is determined by is_supported()
+        # prefix matching, so they don't conflict.
         return 1000
diff --git a/scripts/ci_test_blockscout.sh b/scripts/ci_test_blockscout.sh
new file mode 100755
index 00000000..412dcdc2
--- /dev/null
+++ b/scripts/ci_test_blockscout.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+### Test Blockscout integration (no API key required)
+# Target format: blockscout-:0x
+ +TARGETS=( + "blockscout-747:0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e" + "blockscout-57073:0x4200000000000000000000000000000000000006" + "blockscout-1088:0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000" + "blockscout-98866:0x4052ACe931bbc647193D23e3442f8688A5845A18" + "blockscout-1514:0x1514000000000000000000000000000000000000" +) + +for target in "${TARGETS[@]}"; do + echo "::group::Blockscout $target" + if ! crytic-compile "$target" --compile-remove-metadata + then + echo "Blockscout $target test failed" + exit 255 + fi + echo "::endgroup::" +done diff --git a/tests/test_blockscout.py b/tests/test_blockscout.py new file mode 100644 index 00000000..8bdf0701 --- /dev/null +++ b/tests/test_blockscout.py @@ -0,0 +1,26 @@ +"""Tests for Blockscout platform chain support.""" + +import pathlib + +import pytest + +from crytic_compile import CryticCompile + +# One verified contract address per Blockscout chain ID. +BLOCKSCOUT_TEST_CONTRACTS: dict[str, str] = { + "747": "0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e", # Flow: WFLOW + "57073": "0x4200000000000000000000000000000000000006", # Ink: WETH + "1088": "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000", # Metis: MVM_Coinbase + "98866": "0x4052ACe931bbc647193D23e3442f8688A5845A18", # Plume: LendRewards + "1514": "0x1514000000000000000000000000000000000000", # Story: WIP +} + + +@pytest.mark.parametrize("chain_id", sorted(BLOCKSCOUT_TEST_CONTRACTS.keys())) +def test_blockscout_chain(chain_id: str, tmp_path: pathlib.Path) -> None: + """Verify that each Blockscout network can fetch and compile a known contract.""" + addr = BLOCKSCOUT_TEST_CONTRACTS[chain_id] + target = f"blockscout-{chain_id}:{addr}" + + cc = CryticCompile(target, export_dir=str(tmp_path)) + assert cc.compilation_units, f"No compilation units produced for {target}"