From f92e8706f1877395e59dc2397b70f6d2087323cc Mon Sep 17 00:00:00 2001 From: thevolcanomanishere Date: Fri, 6 Mar 2026 13:44:46 +0000 Subject: [PATCH 1/7] feat: add Blockscout platform and shared explorer utilities - Add new `Blockscout` platform (`platform/blockscout.py`) that fetches verified contracts from Blockscout-based explorers using the same `chain:0xAddress` target syntax as Etherscan. Supports Flow, Ink, Metis, Plume, and Story chains out of the box. - Add `_normalize_blockscout_result` to translate Blockscout's API field conventions (e.g. `"true"`/`"false"` OptimizationUsed, split AdditionalSources) into Etherscan-compatible format so the existing compilation pipeline works unchanged. - Extract shared block-explorer helpers into `platform/explorer_utils.py` (`handle_bytecode`, `handle_single_file`, `handle_multiple_files`, `sanitize_remappings`, `convert_version`, `EXPLORER_BASE_BYTECODE`). Etherscan and Sourcify now import from this module instead of having private copies. `ETHERSCAN_BASE_BYTECODE` kept as a backwards-compat alias. - Add `Type.BLOCKSCOUT = 14` to the platform type enum. - Add Monad mainnet (`"monad"`, chain ID 143) to SUPPORTED_NETWORK_V2. - Add CI integration tests for five Blockscout chains in `scripts/ci_test_etherscan.sh` (no API key required). Co-Authored-By: Claude Sonnet 4.6 --- crytic_compile/platform/all_platforms.py | 2 + crytic_compile/platform/blockscout.py | 306 ++++++++++++++++++++++ crytic_compile/platform/etherscan.py | 221 ++-------------- crytic_compile/platform/explorer_utils.py | 203 ++++++++++++++ crytic_compile/platform/sourcify.py | 6 +- crytic_compile/platform/types.py | 6 + scripts/ci_test_etherscan.sh | 20 ++ tests/test_blockscout.py | 29 ++ 8 files changed, 586 insertions(+), 207 deletions(-) create mode 100644 crytic_compile/platform/blockscout.py create mode 100644 crytic_compile/platform/explorer_utils.py create mode 100644 tests/test_blockscout.py diff --git a/crytic_compile/platform/all_platforms.py b/crytic_compile/platform/all_platforms.py index 5236c2a0..b234a54c 100644 --- a/crytic_compile/platform/all_platforms.py +++ b/crytic_compile/platform/all_platforms.py @@ -6,6 +6,7 @@ # crytic_compile.py uses dir(all_platforms) to find these classes __all__ = [ "Archive", + "Blockscout", "Brownie", "Buidler", "Dapp", @@ -24,6 +25,7 @@ ] from .archive import Archive +from .blockscout import Blockscout from .brownie import Brownie from .buidler import Buidler from .dapp import Dapp diff --git a/crytic_compile/platform/blockscout.py b/crytic_compile/platform/blockscout.py new file mode 100644 index 00000000..03c8168e --- /dev/null +++ b/crytic_compile/platform/blockscout.py @@ -0,0 +1,306 @@ +""" +Blockscout platform — fetches verified contracts from Blockscout-based explorers. +""" + +import json +import logging +import os +import re +import urllib.request +from json.decoder import JSONDecodeError +from typing import TYPE_CHECKING, Any + +from crytic_compile.compilation_unit import CompilationUnit +from crytic_compile.compiler.compiler import CompilerVersion +from crytic_compile.platform import solc_standard_json +from crytic_compile.platform.abstract_platform import AbstractPlatform +from crytic_compile.platform.exceptions import InvalidCompilation +from crytic_compile.platform.explorer_utils import ( + EXPLORER_BASE_BYTECODE, + convert_version, + handle_bytecode, + handle_multiple_files, + handle_single_file, +) +from crytic_compile.platform.types import Type + +if TYPE_CHECKING: + from crytic_compile import CryticCompile + +LOGGER = logging.getLogger("CryticCompile") + +# Blockscout API endpoint — host is the full hostname (no api. subdomain) +BLOCKSCOUT_BASE = "https://%s/api?module=contract&action=getsourcecode&address=%s" + +# Key -> (api_host, bytecode_host) +SUPPORTED_NETWORK_BLOCKSCOUT: dict[str, tuple[str, str]] = { + "flow": ("evm.flowscan.io", "evm.flowscan.io"), + "ink": ("explorer.inkonchain.com", "explorer.inkonchain.com"), + "metis": ("andromeda-explorer.metis.io", "andromeda-explorer.metis.io"), + "plume": ("explorer.plume.org", "explorer.plume.org"), + "story": ("www.storyscan.xyz", "www.storyscan.xyz"), +} + + +def _normalize_blockscout_result(result: dict[str, Any]) -> dict[str, Any]: + """Normalize a Blockscout API result to Etherscan field conventions. + + Blockscout differs from Etherscan in field names and value formats. + This converts them so the compilation pipeline can work unchanged. + + Args: + result: Raw result dict from a Blockscout getsourcecode response. + + Returns: + dict: Normalized result with Etherscan-compatible field names and values. + """ + normalized = dict(result) + + # OptimizationUsed: "true"/"false" -> "1"/"0" + if "OptimizationUsed" in normalized: + normalized["OptimizationUsed"] = "1" if normalized["OptimizationUsed"] == "true" else "0" + + # OptimizationRuns (int) -> Runs (str) + if "OptimizationRuns" in normalized and "Runs" not in normalized: + normalized["Runs"] = str(normalized["OptimizationRuns"]) + + # IsProxy -> Proxy ("1"/"0") + Implementation + if "IsProxy" in normalized: + normalized["Proxy"] = "1" if normalized["IsProxy"] == "true" else "0" + if normalized["Proxy"] == "1": + normalized["Implementation"] = normalized.get("ImplementationAddress", "") + + # Reconstruct SourceCode as a multi-file JSON blob from FileName + AdditionalSources. + # Blockscout stores the main file in SourceCode with extras in AdditionalSources, + # while Etherscan encodes everything as {"sources": {filename: {content: ...}}} in SourceCode. + additional = normalized.get("AdditionalSources", []) + main_filename = normalized.get("FileName", "") + if additional or main_filename: + sources: dict[str, dict[str, str]] = {} + if main_filename and normalized.get("SourceCode"): + sources[main_filename] = {"content": normalized["SourceCode"]} + for src in additional: + # Blockscout uses "Filename" (lowercase n) in AdditionalSources entries + src_filename = src.get("Filename") or src.get("FileName", "") + src_code = src.get("SourceCode", "") + if src_filename and src_code: + sources[src_filename] = {"content": src_code} + settings = normalized.get("CompilerSettings", {}) + payload: dict[str, Any] = {"sources": sources} + if settings: + payload["settings"] = settings + normalized["SourceCode"] = json.dumps(payload) + + return normalized + + +class Blockscout(AbstractPlatform): + """ + Blockscout platform — fetches verified contracts from Blockscout-based explorers. + """ + + NAME = "Blockscout" + PROJECT_URL = "https://www.blockscout.com/" + TYPE = Type.BLOCKSCOUT + + def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: + """Run the compilation. + + Args: + crytic_compile: Associated CryticCompile object. + **kwargs: optional arguments. Used "solc", "etherscan_only_source_code", + "etherscan_only_bytecode", "export_dir". + + Raises: + InvalidCompilation: if the explorer returned an error or results could not be parsed. + """ + target = self._target + prefix, addr = target.split(":", 1) + api_host, bytecode_host = SUPPORTED_NETWORK_BLOCKSCOUT[prefix] + + source_url = BLOCKSCOUT_BASE % (api_host, addr) + bytecode_url = EXPLORER_BASE_BYTECODE % (bytecode_host, addr) + + only_source = kwargs.get("etherscan_only_source_code", False) + only_bytecode = kwargs.get("etherscan_only_bytecode", False) + + export_dir = kwargs.get("export_dir", "crytic-export") + export_dir = os.path.join( + export_dir, kwargs.get("etherscan_export_dir", "etherscan-contracts") + ) + + source_code: str = "" + result: dict[str, Any] = {} + contract_name: str = "" + + if not only_bytecode: + req = urllib.request.Request( + source_url, + headers={ + "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/98.0.4758.80 Safari/537.36 crytic-compile/0" + }, + ) + with urllib.request.urlopen(req) as response: + html = response.read() + + info = json.loads(html) + + if "message" not in info: + LOGGER.error("Incorrect Blockscout request") + raise InvalidCompilation("Incorrect Blockscout request " + source_url) + + if not info["message"].startswith("OK"): + LOGGER.error("Contract has no public source code") + raise InvalidCompilation("Contract has no public source code: " + source_url) + + if "result" not in info: + LOGGER.error("Contract has no public source code") + raise InvalidCompilation("Contract has no public source code: " + source_url) + + result = _normalize_blockscout_result(info["result"][0]) + + if "ABI" in result and "Contract source code not verified" in str(result["ABI"]): + LOGGER.error("Contract has no public source code") + raise InvalidCompilation("Contract has no public source code: " + source_url) + + # Assert to help mypy + assert isinstance(result["SourceCode"], str) + assert isinstance(result["ContractName"], str) + source_code = result["SourceCode"] + contract_name = result["ContractName"] + + if source_code == "" and not only_source: + LOGGER.info("Source code not available, try to fetch the bytecode only") + req = urllib.request.Request(bytecode_url, headers={"User-Agent": "Mozilla/5.0"}) + with urllib.request.urlopen(req) as response: + html = response.read() + handle_bytecode(crytic_compile, target, html) + return + + if source_code == "": + LOGGER.error("Contract has no public source code") + raise InvalidCompilation("Contract has no public source code: " + source_url) + + if not os.path.exists(export_dir): + os.makedirs(export_dir) + + # Assert to help mypy + assert isinstance(result["CompilerVersion"], str) + compiler_version = re.findall(r"\d+\.\d+\.\d+", convert_version(result["CompilerVersion"]))[ + 0 + ] + + evm_version: str | None = None + if "EVMVersion" in result: + assert isinstance(result["EVMVersion"], str) + evm_version = ( + result["EVMVersion"] + if result["EVMVersion"].lower() not in ("default", "") + else None + ) + + optimization_used: bool = result["OptimizationUsed"] == "1" + optimize_runs = None + if optimization_used: + optimize_runs = int(result["Runs"]) + + working_dir: str | None = None + remappings: list[str] | None = None + dict_source_code: dict | None = None + + try: + # Etherscan wraps multi-file source in double braces: {{ content }} + dict_source_code = json.loads(source_code[1:-1]) + assert isinstance(dict_source_code, dict) + filenames, working_dir, remappings = handle_multiple_files( + dict_source_code, addr, prefix, contract_name, export_dir + ) + except JSONDecodeError: + try: + # _normalize_blockscout_result produces a single-brace JSON: { content } + dict_source_code = json.loads(source_code) + assert isinstance(dict_source_code, dict) + filenames, working_dir, remappings = handle_multiple_files( + dict_source_code, addr, prefix, contract_name, export_dir + ) + except JSONDecodeError: + filenames = [ + handle_single_file(source_code, addr, prefix, contract_name, export_dir) + ] + + via_ir_enabled: bool | None = None + if isinstance(dict_source_code, dict): + via_ir_enabled = dict_source_code.get("settings", {}).get("viaIR", None) + + compilation_unit = CompilationUnit(crytic_compile, contract_name) + compilation_unit.compiler_version = CompilerVersion( + compiler=kwargs.get("solc", "solc"), + version=compiler_version, + optimized=optimization_used, + optimize_runs=optimize_runs, + ) + compilation_unit.compiler_version.look_for_installed_version() + + if result.get("Proxy") == "1" and result.get("Implementation"): + implementation = f"{prefix}:{result['Implementation']}" + compilation_unit.implementation_addresses.add(implementation) + + solc_standard_json.standalone_compile( + filenames, + compilation_unit, + working_dir=working_dir, + remappings=remappings, + evm_version=evm_version, + via_ir=via_ir_enabled, + ) + + metadata_config = { + "solc_remaps": remappings if remappings else {}, + "solc_solcs_select": compiler_version, + "solc_args": " ".join( + filter( + None, + [ + "--via-ir" if via_ir_enabled else "", + "--optimize --optimize-runs " + str(optimize_runs) if optimize_runs else "", + "--evm-version " + evm_version if evm_version else "", + ], + ) + ), + } + + with open( + os.path.join(working_dir if working_dir else export_dir, "crytic_compile.config.json"), + "w", + encoding="utf-8", + ) as f: + json.dump(metadata_config, f) + + def clean(self, **_kwargs: str) -> None: + pass + + @staticmethod + def is_supported(target: str, **kwargs: str) -> bool: + """Check if the target is a Blockscout-hosted contract. + + Args: + target: path/target string. + **kwargs: optional arguments. Used "etherscan_ignore". + + Returns: + bool: True if the target uses a known Blockscout network prefix. + """ + # Blockscout respects the same ignore flag as Etherscan so that a single + # flag suppresses all block explorer platforms. + if kwargs.get("etherscan_ignore", False): + return False + if not target.startswith(tuple(SUPPORTED_NETWORK_BLOCKSCOUT)): + return False + addr = target[target.find(":") + 1 :] + return bool(re.match(r"^\s*0x[a-zA-Z0-9]{40}\s*$", addr)) + + def is_dependency(self, path: str) -> bool: + return False + + def _guessed_tests(self) -> list[str]: + return [] diff --git a/crytic_compile/platform/etherscan.py b/crytic_compile/platform/etherscan.py index b32d0f72..fb8fc815 100644 --- a/crytic_compile/platform/etherscan.py +++ b/crytic_compile/platform/etherscan.py @@ -8,7 +8,6 @@ import re import urllib.request from json.decoder import JSONDecodeError -from pathlib import Path, PurePosixPath from typing import TYPE_CHECKING from crytic_compile.compilation_unit import CompilationUnit @@ -16,10 +15,14 @@ from crytic_compile.platform import solc_standard_json from crytic_compile.platform.abstract_platform import AbstractPlatform from crytic_compile.platform.exceptions import InvalidCompilation +from crytic_compile.platform.explorer_utils import ( + EXPLORER_BASE_BYTECODE, + convert_version, + handle_bytecode, + handle_multiple_files, + handle_single_file, +) from crytic_compile.platform.types import Type -from crytic_compile.utils.naming import Filename - -# Cycle dependency if TYPE_CHECKING: from crytic_compile import CryticCompile @@ -35,8 +38,8 @@ "https://api.etherscan.io/v2/api?chainid=%s&module=contract&action=getsourcecode&address=%s" ) -# Bytecode URL style (for scraping) -ETHERSCAN_BASE_BYTECODE = "https://%s/address/%s#code" +# Alias kept for backwards compatibility with importers +ETHERSCAN_BASE_BYTECODE = EXPLORER_BASE_BYTECODE # v1 style scanners SUPPORTED_NETWORK_V1: dict[str, tuple[str, str]] = { @@ -106,6 +109,7 @@ "testnet.berachain": ("80069", "testnet.berascan.com"), "swellchain": ("1923", "swellchainscan.io"), "testnet.swellchain": ("1924", "sepolia.swellchainscan.io"), + "monad": ("143", "monadscan.com"), "testnet.monad": ("10143", "testnet.monadscan.com"), "hyperevm": ("999", "hyperevmscan.io"), "katana": ("747474", "katanascan.com"), @@ -139,142 +143,6 @@ def generate_supported_network_v2_list() -> None: print(results) -def _handle_bytecode(crytic_compile: "CryticCompile", target: str, result_b: bytes) -> None: - """Parse the bytecode and populate CryticCompile info - - Args: - crytic_compile (CryticCompile): Associate CryticCompile object - target (str): path to the target - result_b (bytes): text containing the bytecode - """ - - # There is no direct API to get the bytecode from etherscan - # The page changes from time to time, we use for now a simple parsing, it will not be robust - begin = """Search Algorithm">\nSimilar Contracts\n""" - begin += """
\n
0x"""
-    result = result_b.decode("utf8")
-    # Removing everything before the begin string
-    result = result[result.find(begin) + len(begin) :]
-    bytecode = result[: result.find("<")]
-
-    contract_name = f"Contract_{target}"
-
-    contract_filename = Filename(absolute="", relative="", short="", used="")
-
-    compilation_unit = CompilationUnit(crytic_compile, str(target))
-
-    source_unit = compilation_unit.create_source_unit(contract_filename)
-
-    source_unit.add_contract_name(contract_name)
-    compilation_unit.filename_to_contracts[contract_filename].add(contract_name)
-    source_unit.abis[contract_name] = {}
-    source_unit.bytecodes_init[contract_name] = bytecode
-    source_unit.bytecodes_runtime[contract_name] = ""
-    source_unit.srcmaps_init[contract_name] = []
-    source_unit.srcmaps_runtime[contract_name] = []
-
-    compilation_unit.compiler_version = CompilerVersion(
-        compiler="unknown", version="", optimized=False
-    )
-
-    crytic_compile.bytecode_only = True
-
-
-def _handle_single_file(
-    source_code: str, addr: str, prefix: str | None, contract_name: str, export_dir: str
-) -> str:
-    """Handle a result with a single file
-
-    Args:
-        source_code (str): source code
-        addr (str): contract address
-        prefix (Optional[str]): used to separate different chains
-        contract_name (str): contract name
-        export_dir (str): directory where the code will be saved
-
-    Returns:
-        str: filename containing the source code
-    """
-    if prefix:
-        filename = os.path.join(export_dir, f"{addr}{prefix}-{contract_name}.sol")
-    else:
-        filename = os.path.join(export_dir, f"{addr}-{contract_name}.sol")
-
-    with open(filename, "w", encoding="utf8") as file_desc:
-        file_desc.write(source_code)
-
-    return filename
-
-
-def _handle_multiple_files(
-    dict_source_code: dict, addr: str, prefix: str | None, contract_name: str, export_dir: str
-) -> tuple[list[str], str, list[str] | None]:
-    """Handle a result with a multiple files. Generate multiple Solidity files
-
-    Args:
-        dict_source_code (Dict): dict result from etherscan
-        addr (str): contract address
-        prefix (Optional[str]): used to separate different chains
-        contract_name (str): contract name
-        export_dir (str): directory where the code will be saved
-
-    Returns:
-        Tuple[List[str], str]: filesnames, directory, where target_filename is the main file
-
-    Raises:
-        IOError: if the path is outside of the allowed directory
-    """
-    if prefix:
-        directory = os.path.join(export_dir, f"{addr}{prefix}-{contract_name}")
-    else:
-        directory = os.path.join(export_dir, f"{addr}-{contract_name}")
-
-    if "sources" in dict_source_code:
-        # etherscan might return an object with a sources prop, which contains an object with contract names as keys
-        source_codes = dict_source_code["sources"]
-    else:
-        # or etherscan might return an object with contract names as keys
-        source_codes = dict_source_code
-
-    filtered_paths: list[str] = []
-    for filename, source_code in source_codes.items():
-        path_filename = PurePosixPath(filename)
-        # Only keep solidity files
-        if path_filename.suffix not in [".sol", ".vy"]:
-            continue
-
-        # https://etherscan.io/address/0x19bb64b80cbf61e61965b0e5c2560cc7364c6546#code has an import of erc721a/contracts/ERC721A.sol
-        # if the full path is lost then won't compile
-        if "contracts" == path_filename.parts[0] and not filename.startswith("@"):
-            path_filename = PurePosixPath(
-                *path_filename.parts[path_filename.parts.index("contracts") :]
-            )
-
-        # Convert "absolute" paths such as "/interfaces/IFoo.sol" into relative ones.
-        # This is needed due to the following behavior from pathlib.Path:
-        # > When several absolute paths are given, the last is taken as an anchor
-        # We need to make sure this is relative, so that Path(directory, ...) remains anchored to directory
-        if path_filename.is_absolute():
-            path_filename = PurePosixPath(*path_filename.parts[1:])
-
-        filtered_paths.append(path_filename.as_posix())
-        path_filename_disk = Path(directory, path_filename)
-
-        allowed_path = os.path.abspath(directory)
-        if os.path.commonpath((allowed_path, os.path.abspath(path_filename_disk))) != allowed_path:
-            raise OSError(
-                f"Path '{path_filename_disk}' is outside of the allowed directory: {allowed_path}"
-            )
-        if not os.path.exists(path_filename_disk.parent):
-            os.makedirs(path_filename_disk.parent)
-        with open(path_filename_disk, "w", encoding="utf8") as file_desc:
-            file_desc.write(source_code["content"])
-
-    remappings = dict_source_code.get("settings", {}).get("remappings", None)
-
-    return list(filtered_paths), directory, _sanitize_remappings(remappings, directory)
-
-
 class Etherscan(AbstractPlatform):
     """
     Etherscan platform
@@ -403,7 +271,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
             with urllib.request.urlopen(req) as response:
                 html = response.read()
 
-            _handle_bytecode(crytic_compile, target, html)
+            handle_bytecode(crytic_compile, target, html)
             return
 
         if source_code == "":
@@ -416,9 +284,9 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
         # Assert to help mypy
         assert isinstance(result["CompilerVersion"], str)
 
-        compiler_version = re.findall(
-            r"\d+\.\d+\.\d+", _convert_version(result["CompilerVersion"])
-        )[0]
+        compiler_version = re.findall(r"\d+\.\d+\.\d+", convert_version(result["CompilerVersion"]))[
+            0
+        ]
 
         # etherscan can report "default" which is not a valid EVM version
         evm_version: str | None = None
@@ -440,7 +308,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
             # etherscan might return an object with two curly braces, {{ content }}
             dict_source_code = json.loads(source_code[1:-1])
             assert isinstance(dict_source_code, dict)
-            filenames, working_dir, remappings = _handle_multiple_files(
+            filenames, working_dir, remappings = handle_multiple_files(
                 dict_source_code, addr, prefix, contract_name, export_dir
             )
         except JSONDecodeError:
@@ -448,12 +316,12 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
                 # or etherscan might return an object with single curly braces, { content }
                 dict_source_code = json.loads(source_code)
                 assert isinstance(dict_source_code, dict)
-                filenames, working_dir, remappings = _handle_multiple_files(
+                filenames, working_dir, remappings = handle_multiple_files(
                     dict_source_code, addr, prefix, contract_name, export_dir
                 )
             except JSONDecodeError:
                 filenames = [
-                    _handle_single_file(source_code, addr, prefix, contract_name, export_dir)
+                    handle_single_file(source_code, addr, prefix, contract_name, export_dir)
                 ]
 
         # viaIR is not exposed on the top level JSON offered by etherscan, so we need to inspect the settings
@@ -548,58 +416,3 @@ def _guessed_tests(self) -> list[str]:
             List[str]: The guessed unit tests commands
         """
         return []
-
-
-def _convert_version(version: str) -> str:
-    """Convert the compiler version
-
-    Args:
-        version (str): original version
-
-    Returns:
-        str: converted version
-    """
-    if "+" in version:
-        return version[1 : version.find("+")]
-    return version[1:]
-
-
-def _sanitize_remappings(remappings: list[str] | None, allowed_directory: str) -> list[str] | None:
-    """Sanitize a list of remappings
-
-    Args:
-        remappings: (Optional[List[str]]): a list of remappings
-        allowed_directory: the allowed base directory for remaps
-
-    Returns:
-        Optional[List[str]]: a list of sanitized remappings
-    """
-
-    if remappings is None:
-        return remappings
-
-    allowed_path = os.path.abspath(allowed_directory)
-
-    remappings_clean: list[str] = []
-    for r in remappings:
-        split = r.split("=", 2)
-        if len(split) != 2:
-            LOGGER.warning("Invalid remapping %s", r)
-            continue
-
-        origin, dest = split[0], PurePosixPath(split[1])
-
-        # if path is absolute, relativize it
-        if dest.is_absolute():
-            dest = PurePosixPath(*dest.parts[1:])
-
-        dest_disk = Path(allowed_directory, dest)
-
-        if os.path.commonpath((allowed_path, os.path.abspath(dest_disk))) != allowed_path:
-            LOGGER.warning("Remapping %s=%s is potentially unsafe, skipping", origin, dest)
-            continue
-
-        # always use a trailing slash for the destination
-        remappings_clean.append(f"{origin}={str(dest / '_')[:-1]}")
-
-    return remappings_clean
diff --git a/crytic_compile/platform/explorer_utils.py b/crytic_compile/platform/explorer_utils.py
new file mode 100644
index 00000000..78d3a2be
--- /dev/null
+++ b/crytic_compile/platform/explorer_utils.py
@@ -0,0 +1,203 @@
+"""Shared utilities for block explorer platforms (Etherscan, Blockscout)."""
+
+import logging
+import os
+from pathlib import Path, PurePosixPath
+from typing import TYPE_CHECKING
+
+from crytic_compile.compilation_unit import CompilationUnit
+from crytic_compile.compiler.compiler import CompilerVersion
+from crytic_compile.utils.naming import Filename
+
+if TYPE_CHECKING:
+    from crytic_compile import CryticCompile
+
+LOGGER = logging.getLogger("CryticCompile")
+
+# Block explorer address page URL — used to scrape bytecode when source is unavailable.
+# Both Etherscan and Blockscout use this URL pattern.
+EXPLORER_BASE_BYTECODE = "https://%s/address/%s#code"
+
+
+def convert_version(version: str) -> str:
+    """Convert the compiler version string from explorer format to a bare semver.
+
+    Args:
+        version (str): original version, e.g. "v0.8.20+commit.a1b79de6"
+
+    Returns:
+        str: version without leading "v" or "+commit..." suffix
+    """
+    if "+" in version:
+        return version[1 : version.find("+")]
+    return version[1:]
+
+
+def handle_bytecode(crytic_compile: "CryticCompile", target: str, result_b: bytes) -> None:
+    """Parse the bytecode scraped from an explorer page and populate CryticCompile.
+
+    Args:
+        crytic_compile (CryticCompile): Associated CryticCompile object.
+        target (str): path to the target.
+        result_b (bytes): raw HTML containing the bytecode.
+    """
+    # There is no direct API to get the bytecode from block explorers.
+    # The page changes from time to time; this simple parsing is not guaranteed to be robust.
+    begin = """Search Algorithm">\nSimilar Contracts\n"""
+    begin += """
\n
0x"""
+    result = result_b.decode("utf8")
+    result = result[result.find(begin) + len(begin) :]
+    bytecode = result[: result.find("<")]
+
+    contract_name = f"Contract_{target}"
+    contract_filename = Filename(absolute="", relative="", short="", used="")
+
+    compilation_unit = CompilationUnit(crytic_compile, str(target))
+    source_unit = compilation_unit.create_source_unit(contract_filename)
+
+    source_unit.add_contract_name(contract_name)
+    compilation_unit.filename_to_contracts[contract_filename].add(contract_name)
+    source_unit.abis[contract_name] = {}
+    source_unit.bytecodes_init[contract_name] = bytecode
+    source_unit.bytecodes_runtime[contract_name] = ""
+    source_unit.srcmaps_init[contract_name] = []
+    source_unit.srcmaps_runtime[contract_name] = []
+
+    compilation_unit.compiler_version = CompilerVersion(
+        compiler="unknown", version="", optimized=False
+    )
+
+    crytic_compile.bytecode_only = True
+
+
+def handle_single_file(
+    source_code: str, addr: str, prefix: str | None, contract_name: str, export_dir: str
+) -> str:
+    """Write a single-file contract to disk and return the filename.
+
+    Args:
+        source_code (str): source code.
+        addr (str): contract address.
+        prefix (Optional[str]): chain prefix, used to disambiguate filenames.
+        contract_name (str): contract name.
+        export_dir (str): directory where the file will be written.
+
+    Returns:
+        str: path to the written file.
+    """
+    if prefix:
+        filename = os.path.join(export_dir, f"{addr}{prefix}-{contract_name}.sol")
+    else:
+        filename = os.path.join(export_dir, f"{addr}-{contract_name}.sol")
+
+    with open(filename, "w", encoding="utf8") as file_desc:
+        file_desc.write(source_code)
+
+    return filename
+
+
+def handle_multiple_files(
+    dict_source_code: dict, addr: str, prefix: str | None, contract_name: str, export_dir: str
+) -> tuple[list[str], str, list[str] | None]:
+    """Write a multi-file contract to disk and return the filenames, working dir, and remappings.
+
+    Args:
+        dict_source_code (dict): parsed source object from an explorer API response.
+        addr (str): contract address.
+        prefix (Optional[str]): chain prefix, used to disambiguate directories.
+        contract_name (str): contract name.
+        export_dir (str): base directory where files will be written.
+
+    Returns:
+        Tuple[List[str], str, Optional[List[str]]]: filenames, working directory, remappings.
+
+    Raises:
+        OSError: if a source path would escape the working directory.
+    """
+    if prefix:
+        directory = os.path.join(export_dir, f"{addr}{prefix}-{contract_name}")
+    else:
+        directory = os.path.join(export_dir, f"{addr}-{contract_name}")
+
+    if "sources" in dict_source_code:
+        # explorer may return {"sources": {filename: {content: ...}, ...}}
+        source_codes = dict_source_code["sources"]
+    else:
+        # or directly {filename: {content: ...}, ...}
+        source_codes = dict_source_code
+
+    filtered_paths: list[str] = []
+    for filename, source_code in source_codes.items():
+        path_filename = PurePosixPath(filename)
+        if path_filename.suffix not in [".sol", ".vy"]:
+            continue
+
+        # https://etherscan.io/address/0x19bb64b80cbf61e61965b0e5c2560cc7364c6546#code has an import of erc721a/contracts/ERC721A.sol
+        # if the full path is lost then won't compile
+        if "contracts" == path_filename.parts[0] and not filename.startswith("@"):
+            path_filename = PurePosixPath(
+                *path_filename.parts[path_filename.parts.index("contracts") :]
+            )
+
+        # Convert "absolute" paths such as "/interfaces/IFoo.sol" into relative ones.
+        # This is needed due to the following behavior from pathlib.Path:
+        # > When several absolute paths are given, the last is taken as an anchor
+        # We need to make sure this is relative, so that Path(directory, ...) remains anchored to directory
+        if path_filename.is_absolute():
+            path_filename = PurePosixPath(*path_filename.parts[1:])
+
+        filtered_paths.append(path_filename.as_posix())
+        path_filename_disk = Path(directory, path_filename)
+
+        allowed_path = os.path.abspath(directory)
+        if os.path.commonpath((allowed_path, os.path.abspath(path_filename_disk))) != allowed_path:
+            raise OSError(
+                f"Path '{path_filename_disk}' is outside of the allowed directory: {allowed_path}"
+            )
+        if not os.path.exists(path_filename_disk.parent):
+            os.makedirs(path_filename_disk.parent)
+        with open(path_filename_disk, "w", encoding="utf8") as file_desc:
+            file_desc.write(source_code["content"])
+
+    remappings = dict_source_code.get("settings", {}).get("remappings", None)
+
+    return list(filtered_paths), directory, sanitize_remappings(remappings, directory)
+
+
+def sanitize_remappings(remappings: list[str] | None, allowed_directory: str) -> list[str] | None:
+    """Sanitize a list of remappings, rejecting any that escape the allowed directory.
+
+    Args:
+        remappings: (Optional[List[str]]): a list of remappings.
+        allowed_directory: the allowed base directory for remap destinations.
+
+    Returns:
+        Optional[List[str]]: a list of sanitized remappings.
+    """
+    if remappings is None:
+        return remappings
+
+    allowed_path = os.path.abspath(allowed_directory)
+
+    remappings_clean: list[str] = []
+    for r in remappings:
+        split = r.split("=", 2)
+        if len(split) != 2:
+            LOGGER.warning("Invalid remapping %s", r)
+            continue
+
+        origin, dest = split[0], PurePosixPath(split[1])
+
+        if dest.is_absolute():
+            dest = PurePosixPath(*dest.parts[1:])
+
+        dest_disk = Path(allowed_directory, dest)
+
+        if os.path.commonpath((allowed_path, os.path.abspath(dest_disk))) != allowed_path:
+            LOGGER.warning("Remapping %s=%s is potentially unsafe, skipping", origin, dest)
+            continue
+
+        # always use a trailing slash for the destination
+        remappings_clean.append(f"{origin}={str(dest / '_')[:-1]}")
+
+    return remappings_clean
diff --git a/crytic_compile/platform/sourcify.py b/crytic_compile/platform/sourcify.py
index 35ae59ae..31f64767 100644
--- a/crytic_compile/platform/sourcify.py
+++ b/crytic_compile/platform/sourcify.py
@@ -20,8 +20,8 @@
 from crytic_compile.compiler.compiler import CompilerVersion
 from crytic_compile.platform import solc_standard_json
 from crytic_compile.platform.abstract_platform import AbstractPlatform
-from crytic_compile.platform.etherscan import _sanitize_remappings
 from crytic_compile.platform.exceptions import InvalidCompilation
+from crytic_compile.platform.explorer_utils import sanitize_remappings
 from crytic_compile.platform.types import Type
 
 if TYPE_CHECKING:
@@ -223,7 +223,7 @@ def _write_config_file(working_dir: str, compiler_version: str, settings: dict[s
         solc_args.append(f"--evm-version {evm_version}")
 
     metadata_config: dict[str, Any] = {
-        "solc_remaps": _sanitize_remappings(remappings, working_dir) if remappings else {},
+        "solc_remaps": sanitize_remappings(remappings, working_dir) if remappings else {},
         "solc_solcs_select": compiler_version,
         "solc_args": " ".join(solc_args),
     }
@@ -287,7 +287,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
         settings = compilation.get("compilerSettings", {})
         optimizer = settings.get("optimizer", {})
         optimization_used = optimizer.get("enabled", False)
-        remappings = _sanitize_remappings(settings.get("remappings", []), working_dir) or None
+        remappings = sanitize_remappings(settings.get("remappings", []), working_dir) or None
 
         # Create and configure compilation unit
         compilation_unit = CompilationUnit(crytic_compile, compilation.get("name", "Contract"))
diff --git a/crytic_compile/platform/types.py b/crytic_compile/platform/types.py
index 2d992d4a..962be0a9 100644
--- a/crytic_compile/platform/types.py
+++ b/crytic_compile/platform/types.py
@@ -25,6 +25,7 @@ class Type(IntEnum):
     HARDHAT = 11
     FOUNDRY = 12
     SOURCIFY = 13
+    BLOCKSCOUT = 14
 
     STANDARD = 100
     ARCHIVE = 101
@@ -68,6 +69,8 @@ def __str__(self) -> str:
             return "Foundry"
         if self == Type.SOURCIFY:
             return "Sourcify"
+        if self == Type.BLOCKSCOUT:
+            return "Blockscout"
         raise ValueError
 
     def priority(self) -> int:
@@ -89,4 +92,7 @@ def priority(self) -> int:
         if self in [Type.TRUFFLE, Type.WAFFLE]:
             return 300
 
+        # All explorer-based platforms (ETHERSCAN, SOURCIFY, BLOCKSCOUT) and others
+        # default to 1000. Detection order among them is determined by is_supported()
+        # prefix matching, so they don't conflict.
         return 1000
diff --git a/scripts/ci_test_etherscan.sh b/scripts/ci_test_etherscan.sh
index 70c351eb..7477033d 100755
--- a/scripts/ci_test_etherscan.sh
+++ b/scripts/ci_test_etherscan.sh
@@ -110,3 +110,23 @@ then
     exit 255
 fi
 echo "::endgroup::"
+
+# Blockscout chains — no API key required
+# Add new entries here when a chain is added to SUPPORTED_NETWORK_BLOCKSCOUT in blockscout.py
+BLOCKSCOUT_TARGETS=(
+    "flow:0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e"
+    "ink:0x4200000000000000000000000000000000000006"
+    "metis:0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000"
+    "plume:0x4052ACe931bbc647193D23e3442f8688A5845A18"
+    "story:0x1514000000000000000000000000000000000000"
+)
+
+for target in "${BLOCKSCOUT_TARGETS[@]}"; do
+    echo "::group::Blockscout $target"
+    if ! crytic-compile "$target" --compile-remove-metadata
+    then
+        echo "Blockscout $target test failed"
+        exit 255
+    fi
+    echo "::endgroup::"
+done
diff --git a/tests/test_blockscout.py b/tests/test_blockscout.py
new file mode 100644
index 00000000..64234f3a
--- /dev/null
+++ b/tests/test_blockscout.py
@@ -0,0 +1,29 @@
+"""Tests for Blockscout platform chain support."""
+
+import pathlib
+
+import pytest
+
+from crytic_compile import CryticCompile
+from crytic_compile.platform.blockscout import SUPPORTED_NETWORK_BLOCKSCOUT
+
+# One verified contract address per Blockscout network key.
+# Add an entry here whenever a new chain is added to SUPPORTED_NETWORK_BLOCKSCOUT.
+BLOCKSCOUT_TEST_CONTRACTS: dict[str, str] = {
+    "flow": "0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e",  # WFLOW
+    "ink": "0x4200000000000000000000000000000000000006",  # WETH
+    "metis": "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000",  # MVM_Coinbase
+    "plume": "0x4052ACe931bbc647193D23e3442f8688A5845A18",  # LendRewards
+    "story": "0x1514000000000000000000000000000000000000",  # WIP
+}
+
+
+@pytest.mark.parametrize("network", sorted(SUPPORTED_NETWORK_BLOCKSCOUT.keys()))
+def test_blockscout_chain(network: str, tmp_path: pathlib.Path) -> None:
+    """Verify that each Blockscout network can fetch and compile a known contract."""
+    addr = BLOCKSCOUT_TEST_CONTRACTS.get(network)
+    if addr is None:
+        pytest.skip(f"No test contract registered for '{network}' in BLOCKSCOUT_TEST_CONTRACTS")
+
+    cc = CryticCompile(f"{network}:{addr}", export_dir=str(tmp_path))
+    assert cc.compilation_units, f"No compilation units produced for {network}:{addr}"

From 71cc5cd3740408e7f31bccddc75884303db47855 Mon Sep 17 00:00:00 2001
From: thevolcanomanishere 
Date: Fri, 6 Mar 2026 13:51:02 +0000
Subject: [PATCH 2/7] chore: remove backwards compat, separate blockscout test

---
 crytic_compile/platform/etherscan.py |  9 +++------
 scripts/ci_test_blockscout.sh        | 23 +++++++++++++++++++++++
 scripts/ci_test_etherscan.sh         | 20 --------------------
 3 files changed, 26 insertions(+), 26 deletions(-)
 create mode 100755 scripts/ci_test_blockscout.sh

diff --git a/crytic_compile/platform/etherscan.py b/crytic_compile/platform/etherscan.py
index fb8fc815..a606c137 100644
--- a/crytic_compile/platform/etherscan.py
+++ b/crytic_compile/platform/etherscan.py
@@ -38,9 +38,6 @@
     "https://api.etherscan.io/v2/api?chainid=%s&module=contract&action=getsourcecode&address=%s"
 )
 
-# Alias kept for backwards compatibility with importers
-ETHERSCAN_BASE_BYTECODE = EXPLORER_BASE_BYTECODE
-
 # v1 style scanners
 SUPPORTED_NETWORK_V1: dict[str, tuple[str, str]] = {
     # None at this time. External tracer instances not operated by Etherscan would be here
@@ -173,18 +170,18 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
             prefix, addr = target.split(":", 2)
             chainid, prefix_bytecode = SUPPORTED_NETWORK_V2[prefix]
             etherscan_url = ETHERSCAN_BASE_V2 % (chainid, addr)
-            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % (prefix_bytecode, addr)
+            etherscan_bytecode_url = EXPLORER_BASE_BYTECODE % (prefix_bytecode, addr)
         elif target.startswith(tuple(SUPPORTED_NETWORK_V1)):
             api_key_required = 1
             prefix = SUPPORTED_NETWORK_V1[target[: target.find(":") + 1]][0]
             prefix_bytecode = SUPPORTED_NETWORK_V1[target[: target.find(":") + 1]][1]
             addr = target[target.find(":") + 1 :]
             etherscan_url = ETHERSCAN_BASE_V1 % (prefix, addr)
-            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % (prefix_bytecode, addr)
+            etherscan_bytecode_url = EXPLORER_BASE_BYTECODE % (prefix_bytecode, addr)
         else:
             api_key_required = 2
             etherscan_url = ETHERSCAN_BASE_V2 % ("1", target)
-            etherscan_bytecode_url = ETHERSCAN_BASE_BYTECODE % ("etherscan.io", target)
+            etherscan_bytecode_url = EXPLORER_BASE_BYTECODE % ("etherscan.io", target)
             addr = target
             prefix = None
 
diff --git a/scripts/ci_test_blockscout.sh b/scripts/ci_test_blockscout.sh
new file mode 100755
index 00000000..6f9e28ff
--- /dev/null
+++ b/scripts/ci_test_blockscout.sh
@@ -0,0 +1,23 @@
+#!/usr/bin/env bash
+set -euo pipefail
+
+### Test Blockscout integration (no API key required)
+# Add new entries here when a chain is added to SUPPORTED_NETWORK_BLOCKSCOUT in blockscout.py
+
+TARGETS=(
+    "flow:0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e"
+    "ink:0x4200000000000000000000000000000000000006"
+    "metis:0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000"
+    "plume:0x4052ACe931bbc647193D23e3442f8688A5845A18"
+    "story:0x1514000000000000000000000000000000000000"
+)
+
+for target in "${TARGETS[@]}"; do
+    echo "::group::Blockscout $target"
+    if ! crytic-compile "$target" --compile-remove-metadata
+    then
+        echo "Blockscout $target test failed"
+        exit 255
+    fi
+    echo "::endgroup::"
+done
diff --git a/scripts/ci_test_etherscan.sh b/scripts/ci_test_etherscan.sh
index 7477033d..70c351eb 100755
--- a/scripts/ci_test_etherscan.sh
+++ b/scripts/ci_test_etherscan.sh
@@ -110,23 +110,3 @@ then
     exit 255
 fi
 echo "::endgroup::"
-
-# Blockscout chains — no API key required
-# Add new entries here when a chain is added to SUPPORTED_NETWORK_BLOCKSCOUT in blockscout.py
-BLOCKSCOUT_TARGETS=(
-    "flow:0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e"
-    "ink:0x4200000000000000000000000000000000000006"
-    "metis:0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000"
-    "plume:0x4052ACe931bbc647193D23e3442f8688A5845A18"
-    "story:0x1514000000000000000000000000000000000000"
-)
-
-for target in "${BLOCKSCOUT_TARGETS[@]}"; do
-    echo "::group::Blockscout $target"
-    if ! crytic-compile "$target" --compile-remove-metadata
-    then
-        echo "Blockscout $target test failed"
-        exit 255
-    fi
-    echo "::endgroup::"
-done

From c06b5064b5419e56349289ebbe7783e7b0f07710 Mon Sep 17 00:00:00 2001
From: thevolcanomanishere 
Date: Fri, 6 Mar 2026 13:53:26 +0000
Subject: [PATCH 3/7] feat: add Blockscout to CI job types in onchain workflow

---
 .github/workflows/onchain.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/onchain.yml b/.github/workflows/onchain.yml
index 2c3129b8..6ad8ff3d 100644
--- a/.github/workflows/onchain.yml
+++ b/.github/workflows/onchain.yml
@@ -23,7 +23,7 @@ jobs:
     strategy:
       matrix:
         os: ["ubuntu-latest", "windows-2025"]
-        type: ["etherscan", "sourcify"]
+        type: ["etherscan", "sourcify", "blockscout"]
     steps:
       - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd  # v6.0.2
         with:

From 81686746e25861ed284f128c8949481372c32cd3 Mon Sep 17 00:00:00 2001
From: thevolcanomanishere 
Date: Fri, 6 Mar 2026 14:16:58 +0000
Subject: [PATCH 4/7] refactor: rename etherscan-prefixed explorer flags, add
 Sourcify fallback

- Rename `--etherscan-only-source-code`, `--etherscan-only-bytecode`,
  `--etherscan-export-directory` to `--explorer-*` equivalents so they
  apply cleanly to all block explorer platforms (Etherscan, Blockscout).
  Add `--explorer-ignore` flag (was previously undocumented/unconfigured).
  Rename CLI group from "Etherscan options" to "Block explorer options".

- Each platform now defaults to its own export subdirectory name:
  `etherscan-contracts`, `blockscout-contracts`, `sourcify-contracts`.
  `--explorer-export-directory` overrides the default for all of them.

- Add `try_compile_from_sourcify()` helper to sourcify.py. Blockscout's
  compile() now tries Sourcify first for each chain; if the contract is
  verified there (e.g. Metis chain 1088), it compiles via Sourcify for
  richer metadata. Falls back to Blockscout silently on 404 or unsupported
  chain.

- Add chain IDs to SUPPORTED_NETWORK_BLOCKSCOUT for Sourcify lookup.

- Update stale bzzr1 hash in test_metadata.py.

- Add .env and crytic-export/ to .gitignore.

- Update README to list Blockscout as a supported platform.

Co-Authored-By: Claude Sonnet 4.6 
---
 .gitignore                                  |  2 +
 README.md                                   |  1 +
 crytic_compile/cryticparser/cryticparser.py | 54 +++++++++------
 crytic_compile/cryticparser/defaults.py     |  7 +-
 crytic_compile/platform/blockscout.py       | 41 ++++++-----
 crytic_compile/platform/etherscan.py        | 13 ++--
 crytic_compile/platform/sourcify.py         | 77 +++++++++++++++++++++
 tests/test_metadata.py                      |  2 +-
 8 files changed, 146 insertions(+), 51 deletions(-)

diff --git a/.gitignore b/.gitignore
index 4b3890f9..64493f5c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -12,3 +12,5 @@ package-lock.json
 result
 env/
 .coverage*
+.env
+crytic-export/
diff --git a/README.md b/README.md
index 33cf6d59..b9bdd5ab 100644
--- a/README.md
+++ b/README.md
@@ -14,6 +14,7 @@ Library to help smart contract compilation. It includes support for:
 - [Etherlime](https://github.com/LimeChain/etherlime)
 - [Sourcify](https://sourcify.dev/)
 - [Etherscan](https://etherscan.io/) (including several alt-chain explorers and testnets)
+- [Blockscout](https://www.blockscout.com/) (Flow, Ink, Metis, Plume, Story)
 - [Truffle](https://truffleframework.com/)
 - [Waffle](https://github.com/EthWorks/Waffle)
 
diff --git a/crytic_compile/cryticparser/cryticparser.py b/crytic_compile/cryticparser/cryticparser.py
index 8c14884d..d4c73d9f 100755
--- a/crytic_compile/cryticparser/cryticparser.py
+++ b/crytic_compile/cryticparser/cryticparser.py
@@ -72,7 +72,7 @@ def init(parser: ArgumentParser) -> None:
     _init_brownie(parser)
     _init_dapp(parser)
     _init_etherlime(parser)
-    _init_etherscan(parser)
+    _init_explorer(parser)
     _init_waffle(parser)
     _init_npx(parser)
     _init_buidler(parser)
@@ -299,30 +299,46 @@ def _init_etherlime(parser: ArgumentParser) -> None:
     )
 
 
-def _init_etherscan(parser: ArgumentParser) -> None:
-    """Init etherscan arguments
+def _init_explorer(parser: ArgumentParser) -> None:
+    """Init block explorer arguments (Etherscan, Blockscout, Sourcify, etc.)
 
     Args:
         parser (ArgumentParser): argparser where the cli flags are added
     """
-    group_etherscan = parser.add_argument_group("Etherscan options")
-    group_etherscan.add_argument(
-        "--etherscan-only-source-code",
+    group = parser.add_argument_group("Block explorer options")
+    group.add_argument(
+        "--explorer-only-source-code",
         help="Only compile if the source code is available.",
         action="store_true",
-        dest="etherscan_only_source_code",
-        default=DEFAULTS_FLAG_IN_CONFIG["etherscan_only_source_code"],
+        dest="explorer_only_source_code",
+        default=DEFAULTS_FLAG_IN_CONFIG["explorer_only_source_code"],
     )
 
-    group_etherscan.add_argument(
-        "--etherscan-only-bytecode",
+    group.add_argument(
+        "--explorer-only-bytecode",
         help="Only looks for bytecode.",
         action="store_true",
-        dest="etherscan_only_bytecode",
-        default=DEFAULTS_FLAG_IN_CONFIG["etherscan_only_bytecode"],
+        dest="explorer_only_bytecode",
+        default=DEFAULTS_FLAG_IN_CONFIG["explorer_only_bytecode"],
     )
 
-    group_etherscan.add_argument(
+    group.add_argument(
+        "--explorer-ignore",
+        help="Ignore block explorer platforms (Etherscan, Blockscout, etc.).",
+        action="store_true",
+        dest="explorer_ignore",
+        default=DEFAULTS_FLAG_IN_CONFIG["explorer_ignore"],
+    )
+
+    group.add_argument(
+        "--explorer-export-directory",
+        help="Directory in which to save contracts fetched from block explorers.",
+        action="store",
+        dest="explorer_export_dir",
+        default=DEFAULTS_FLAG_IN_CONFIG["explorer_export_directory"],
+    )
+
+    group.add_argument(
         "--etherscan-apikey",
         help="Etherscan API key.",
         action="store",
@@ -330,22 +346,14 @@ def _init_etherscan(parser: ArgumentParser) -> None:
         default=DEFAULTS_FLAG_IN_CONFIG["etherscan_api_key"],
     )
 
-    group_etherscan.add_argument(
+    group.add_argument(
         "--avax-apikey",
-        help="Etherscan API key.",
+        help="Avalanche (Snowtrace) API key.",
         action="store",
         dest="avax_api_key",
         default=DEFAULTS_FLAG_IN_CONFIG["etherscan_api_key"],
     )
 
-    group_etherscan.add_argument(
-        "--etherscan-export-directory",
-        help="Directory in which to save the analyzed contracts.",
-        action="store",
-        dest="etherscan_export_dir",
-        default=DEFAULTS_FLAG_IN_CONFIG["etherscan_export_directory"],
-    )
-
 
 def _init_npx(parser: ArgumentParser) -> None:
     """Init npx arguments
diff --git a/crytic_compile/cryticparser/defaults.py b/crytic_compile/cryticparser/defaults.py
index f130c493..aa08b8c3 100755
--- a/crytic_compile/cryticparser/defaults.py
+++ b/crytic_compile/cryticparser/defaults.py
@@ -27,10 +27,11 @@
     "dapp_ignore_compile": False,
     "etherlime_ignore_compile": False,
     "etherlime_compile_arguments": None,
-    "etherscan_only_source_code": False,
-    "etherscan_only_bytecode": False,
+    "explorer_only_source_code": False,
+    "explorer_only_bytecode": False,
+    "explorer_ignore": False,
+    "explorer_export_directory": None,
     "etherscan_api_key": None,
-    "etherscan_export_directory": "etherscan-contracts",
     "waffle_ignore_compile": False,
     "waffle_config_file": None,
     "npx_disable": False,
diff --git a/crytic_compile/platform/blockscout.py b/crytic_compile/platform/blockscout.py
index 03c8168e..8c847b60 100644
--- a/crytic_compile/platform/blockscout.py
+++ b/crytic_compile/platform/blockscout.py
@@ -22,6 +22,7 @@
     handle_multiple_files,
     handle_single_file,
 )
+from crytic_compile.platform.sourcify import try_compile_from_sourcify
 from crytic_compile.platform.types import Type
 
 if TYPE_CHECKING:
@@ -32,13 +33,13 @@
 # Blockscout API endpoint — host is the full hostname (no api. subdomain)
 BLOCKSCOUT_BASE = "https://%s/api?module=contract&action=getsourcecode&address=%s"
 
-# Key -> (api_host, bytecode_host)
-SUPPORTED_NETWORK_BLOCKSCOUT: dict[str, tuple[str, str]] = {
-    "flow": ("evm.flowscan.io", "evm.flowscan.io"),
-    "ink": ("explorer.inkonchain.com", "explorer.inkonchain.com"),
-    "metis": ("andromeda-explorer.metis.io", "andromeda-explorer.metis.io"),
-    "plume": ("explorer.plume.org", "explorer.plume.org"),
-    "story": ("www.storyscan.xyz", "www.storyscan.xyz"),
+# Key -> (api_host, bytecode_host, chain_id)
+SUPPORTED_NETWORK_BLOCKSCOUT: dict[str, tuple[str, str, str]] = {
+    "flow": ("evm.flowscan.io", "evm.flowscan.io", "747"),
+    "ink": ("explorer.inkonchain.com", "explorer.inkonchain.com", "57073"),
+    "metis": ("andromeda-explorer.metis.io", "andromeda-explorer.metis.io", "1088"),
+    "plume": ("explorer.plume.org", "explorer.plume.org", "98866"),
+    "story": ("www.storyscan.xyz", "www.storyscan.xyz", "1514"),
 }
 
 
@@ -108,27 +109,35 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
 
         Args:
             crytic_compile: Associated CryticCompile object.
-            **kwargs: optional arguments. Used "solc", "etherscan_only_source_code",
-                "etherscan_only_bytecode", "export_dir".
+            **kwargs: optional arguments. Used "solc", "explorer_only_source_code",
+                "explorer_only_bytecode", "export_dir".
 
         Raises:
             InvalidCompilation: if the explorer returned an error or results could not be parsed.
         """
         target = self._target
         prefix, addr = target.split(":", 1)
-        api_host, bytecode_host = SUPPORTED_NETWORK_BLOCKSCOUT[prefix]
+        api_host, bytecode_host, chain_id = SUPPORTED_NETWORK_BLOCKSCOUT[prefix]
 
         source_url = BLOCKSCOUT_BASE % (api_host, addr)
         bytecode_url = EXPLORER_BASE_BYTECODE % (bytecode_host, addr)
 
-        only_source = kwargs.get("etherscan_only_source_code", False)
-        only_bytecode = kwargs.get("etherscan_only_bytecode", False)
+        only_source = kwargs.get("explorer_only_source_code", False)
+        only_bytecode = kwargs.get("explorer_only_bytecode", False)
 
         export_dir = kwargs.get("export_dir", "crytic-export")
         export_dir = os.path.join(
-            export_dir, kwargs.get("etherscan_export_dir", "etherscan-contracts")
+            export_dir, kwargs.get("explorer_export_dir") or "blockscout-contracts"
         )
 
+        # Try Sourcify first — it carries richer metadata and is preferred when available.
+        if not only_bytecode:
+            base_export = kwargs.get("export_dir", "crytic-export")
+            sourcify_kwargs = {k: v for k, v in kwargs.items() if k != "export_dir"}
+            if try_compile_from_sourcify(crytic_compile, chain_id, addr, base_export, **sourcify_kwargs):
+                LOGGER.info("Compiled %s via Sourcify (chain %s)", addr, chain_id)
+                return
+
         source_code: str = ""
         result: dict[str, Any] = {}
         contract_name: str = ""
@@ -285,14 +294,12 @@ def is_supported(target: str, **kwargs: str) -> bool:
 
         Args:
             target: path/target string.
-            **kwargs: optional arguments. Used "etherscan_ignore".
+            **kwargs: optional arguments. Used "explorer_ignore".
 
         Returns:
             bool: True if the target uses a known Blockscout network prefix.
         """
-        # Blockscout respects the same ignore flag as Etherscan so that a single
-        # flag suppresses all block explorer platforms.
-        if kwargs.get("etherscan_ignore", False):
+        if kwargs.get("explorer_ignore", False):
             return False
         if not target.startswith(tuple(SUPPORTED_NETWORK_BLOCKSCOUT)):
             return False
diff --git a/crytic_compile/platform/etherscan.py b/crytic_compile/platform/etherscan.py
index a606c137..dd9dda18 100644
--- a/crytic_compile/platform/etherscan.py
+++ b/crytic_compile/platform/etherscan.py
@@ -154,7 +154,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
 
         Args:
             crytic_compile (CryticCompile): Associated CryticCompile object
-            **kwargs: optional arguments. Used "solc", "etherscan_only_source_code", "etherscan_only_bytecode",
+            **kwargs: optional arguments. Used "solc", "explorer_only_source_code", "explorer_only_bytecode",
                 "etherscan_api_key", "export_dir"
 
         Raises:
@@ -185,8 +185,8 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
             addr = target
             prefix = None
 
-        only_source = kwargs.get("etherscan_only_source_code", False)
-        only_bytecode = kwargs.get("etherscan_only_bytecode", False)
+        only_source = kwargs.get("explorer_only_source_code", False)
+        only_bytecode = kwargs.get("explorer_only_bytecode", False)
 
         etherscan_api_key = kwargs.get("etherscan_api_key", None)
         if etherscan_api_key is None:
@@ -194,7 +194,7 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
 
         export_dir = kwargs.get("export_dir", "crytic-export")
         export_dir = os.path.join(
-            export_dir, kwargs.get("etherscan_export_dir", "etherscan-contracts")
+            export_dir, kwargs.get("explorer_export_dir") or "etherscan-contracts"
         )
 
         if api_key_required == 2 and etherscan_api_key:
@@ -383,13 +383,12 @@ def is_supported(target: str, **kwargs: str) -> bool:
 
         Args:
             target (str): path to the target
-            **kwargs: optional arguments. Used "etherscan_ignore"
+            **kwargs: optional arguments. Used "explorer_ignore"
 
         Returns:
             bool: True if the target is a etherscan project
         """
-        etherscan_ignore = kwargs.get("etherscan_ignore", False)
-        if etherscan_ignore:
+        if kwargs.get("explorer_ignore", False):
             return False
         if target.startswith(tuple(SUPPORTED_NETWORK)):
             target = target[target.find(":") + 1 :]
diff --git a/crytic_compile/platform/sourcify.py b/crytic_compile/platform/sourcify.py
index 31f64767..0567042f 100644
--- a/crytic_compile/platform/sourcify.py
+++ b/crytic_compile/platform/sourcify.py
@@ -233,6 +233,83 @@ def _write_config_file(working_dir: str, compiler_version: str, settings: dict[s
         json.dump(metadata_config, f)
 
 
+def try_compile_from_sourcify(
+    crytic_compile: "CryticCompile", chain_id: str, addr: str, export_dir: str, **kwargs: str
+) -> bool:
+    """Try to compile a contract via Sourcify, returning False if it is not verified there.
+
+    Args:
+        crytic_compile: Associated CryticCompile object.
+        chain_id: Chain ID (decimal string).
+        addr: Contract address.
+        export_dir: Base export directory.
+        **kwargs: Passed through to CompilerVersion (e.g. "solc").
+
+    Returns:
+        bool: True if the contract was found and compiled via Sourcify, False if not verified.
+
+    Raises:
+        InvalidCompilation: If Sourcify returned an unexpected error (not a 404).
+    """
+    try:
+        data = _fetch_sourcify_data(chain_id, addr)
+    except InvalidCompilation:
+        # Contract not on Sourcify, or chain not indexed — fall back to the caller's explorer.
+        return False
+
+    sources = data.get("sources", {})
+    if not sources:
+        return False
+
+    sourcify_export = os.path.join(export_dir, "sourcify-contracts")
+    if not os.path.exists(sourcify_export):
+        os.makedirs(sourcify_export)
+
+    working_dir, filenames = _write_source_files(sources, addr, chain_id, sourcify_export)
+
+    compilation = data.get("compilation", {})
+    compiler_version_str = compilation.get("compilerVersion", "")
+    version_match = re.search(r"(\d+\.\d+\.\d+)", compiler_version_str)
+    if not version_match:
+        raise InvalidCompilation(f"Could not parse compiler version from: {compiler_version_str}")
+    compiler_version = version_match.group(1)
+
+    settings = compilation.get("compilerSettings", {})
+    optimizer = settings.get("optimizer", {})
+    optimization_used = optimizer.get("enabled", False)
+    remappings = sanitize_remappings(settings.get("remappings", []), working_dir) or None
+
+    compilation_unit = CompilationUnit(crytic_compile, compilation.get("name", "Contract"))
+    compilation_unit.compiler_version = CompilerVersion(
+        compiler=kwargs.get("solc", "solc"),
+        version=compiler_version,
+        optimized=optimization_used,
+        optimize_runs=optimizer.get("runs") if optimization_used else None,
+    )
+    compilation_unit.compiler_version.look_for_installed_version()
+
+    proxy_resolution = data.get("proxyResolution")
+    if proxy_resolution and proxy_resolution.get("isProxy"):
+        for impl in proxy_resolution.get("implementations", []):
+            impl_addr = impl.get("address")
+            if impl_addr:
+                compilation_unit.implementation_addresses.add(
+                    f"sourcify-{chain_id}:{_to_checksum_address(impl_addr)}"
+                )
+
+    solc_standard_json.standalone_compile(
+        filenames,
+        compilation_unit,
+        working_dir=working_dir,
+        remappings=remappings,
+        evm_version=settings.get("evmVersion"),
+        via_ir=settings.get("viaIR"),
+    )
+
+    _write_config_file(working_dir, compiler_version, settings)
+    return True
+
+
 class Sourcify(AbstractPlatform):
     """
     Sourcify platform - fetches verified contracts from sourcify.dev
diff --git a/tests/test_metadata.py b/tests/test_metadata.py
index 1bc14ab8..70ffc83b 100644
--- a/tests/test_metadata.py
+++ b/tests/test_metadata.py
@@ -24,7 +24,7 @@ def metadata_checks(crytic_compile_instance: CryticCompile) -> None:
 
     with_metadata = source_unit.bytecode_init("Dai")
     assert source_unit.metadata_of("Dai") == {
-        "bzzr1": "92df983266c28b6fb4c7c776b695725fd63d55b8cd5d5618b69fb544ce801d85",
+        "bzzr1": "abe44494f2727bdaa34d571c1e0d03d8ecf0dc321c56d76334ab56e39e41ef17",
         "solc": "0.5.12",
     }
     source_unit.remove_metadata()

From 83da3ddb0d77f06226241df2e63ec50b2c8e1bce Mon Sep 17 00:00:00 2001
From: thevolcanomanishere 
Date: Mon, 9 Mar 2026 16:25:37 +0000
Subject: [PATCH 5/7] feat: add Blockscout support with custom URL option and
 update tests

---
 README.md                                   |   2 +-
 crytic_compile/cryticparser/cryticparser.py |   8 ++
 crytic_compile/cryticparser/defaults.py     |   1 +
 crytic_compile/platform/blockscout.py       | 105 ++++++++++++++++----
 scripts/ci_test_blockscout.sh               |  12 +--
 tests/test_blockscout.py                    |  27 +++--
 6 files changed, 114 insertions(+), 41 deletions(-)

diff --git a/README.md b/README.md
index b9bdd5ab..ae71b4d0 100644
--- a/README.md
+++ b/README.md
@@ -14,7 +14,7 @@ Library to help smart contract compilation. It includes support for:
 - [Etherlime](https://github.com/LimeChain/etherlime)
 - [Sourcify](https://sourcify.dev/)
 - [Etherscan](https://etherscan.io/) (including several alt-chain explorers and testnets)
-- [Blockscout](https://www.blockscout.com/) (Flow, Ink, Metis, Plume, Story)
+- [Blockscout](https://www.blockscout.com/) ([750+ chains](https://chains.blockscout.com/))
 - [Truffle](https://truffleframework.com/)
 - [Waffle](https://github.com/EthWorks/Waffle)
 
diff --git a/crytic_compile/cryticparser/cryticparser.py b/crytic_compile/cryticparser/cryticparser.py
index d4c73d9f..e77baac2 100755
--- a/crytic_compile/cryticparser/cryticparser.py
+++ b/crytic_compile/cryticparser/cryticparser.py
@@ -346,6 +346,14 @@ def _init_explorer(parser: ArgumentParser) -> None:
         default=DEFAULTS_FLAG_IN_CONFIG["etherscan_api_key"],
     )
 
+    group.add_argument(
+        "--blockscout-url",
+        help="Custom Blockscout explorer URL for chains not in the directory.",
+        action="store",
+        dest="blockscout_url",
+        default=DEFAULTS_FLAG_IN_CONFIG["blockscout_url"],
+    )
+
     group.add_argument(
         "--avax-apikey",
         help="Avalanche (Snowtrace) API key.",
diff --git a/crytic_compile/cryticparser/defaults.py b/crytic_compile/cryticparser/defaults.py
index aa08b8c3..fdcb895b 100755
--- a/crytic_compile/cryticparser/defaults.py
+++ b/crytic_compile/cryticparser/defaults.py
@@ -32,6 +32,7 @@
     "explorer_ignore": False,
     "explorer_export_directory": None,
     "etherscan_api_key": None,
+    "blockscout_url": None,
     "waffle_ignore_compile": False,
     "waffle_config_file": None,
     "npx_disable": False,
diff --git a/crytic_compile/platform/blockscout.py b/crytic_compile/platform/blockscout.py
index 8c847b60..55b2409d 100644
--- a/crytic_compile/platform/blockscout.py
+++ b/crytic_compile/platform/blockscout.py
@@ -6,6 +6,8 @@
 import logging
 import os
 import re
+import urllib.error
+import urllib.parse
 import urllib.request
 from json.decoder import JSONDecodeError
 from typing import TYPE_CHECKING, Any
@@ -30,18 +32,63 @@
 
 LOGGER = logging.getLogger("CryticCompile")
 
-# Blockscout API endpoint — host is the full hostname (no api. subdomain)
-BLOCKSCOUT_BASE = "https://%s/api?module=contract&action=getsourcecode&address=%s"
+# Blockscout API endpoint — explorer_url is the full base URL
+BLOCKSCOUT_BASE = "%s/api?module=contract&action=getsourcecode&address=%s"
 
-# Key -> (api_host, bytecode_host, chain_id)
-SUPPORTED_NETWORK_BLOCKSCOUT: dict[str, tuple[str, str, str]] = {
-    "flow": ("evm.flowscan.io", "evm.flowscan.io", "747"),
-    "ink": ("explorer.inkonchain.com", "explorer.inkonchain.com", "57073"),
-    "metis": ("andromeda-explorer.metis.io", "andromeda-explorer.metis.io", "1088"),
-    "plume": ("explorer.plume.org", "explorer.plume.org", "98866"),
-    "story": ("www.storyscan.xyz", "www.storyscan.xyz", "1514"),
+# Blockscout chain directory API
+BLOCKSCOUT_CHAINS_URL = "https://chains.blockscout.com/api/chains"
+
+# Chains with Blockscout-compatible explorers not listed in the directory.
+# Checked first so they cannot be shadowed by directory conflicts.
+BLOCKSCOUT_EXTRA_CHAINS: dict[str, str] = {
+    "747": "https://evm.flowscan.io",  # Flow
+    "98866": "https://explorer.plume.org",  # Plume
 }
 
+# Module-level cache: chain_id (str) -> explorer_url (str)
+_blockscout_chains: dict[str, str] | None = None
+
+
+def _fetch_blockscout_chains() -> dict[str, str]:
+    """Fetch the Blockscout chain directory and return a
+    chain_id -> explorer_url mapping.
+
+    Results are cached after the first successful call.
+
+    Returns:
+        Mapping of chain ID strings to explorer base URLs.
+    """
+    global _blockscout_chains  # noqa: PLW0603
+    if _blockscout_chains is not None:
+        return _blockscout_chains
+
+    try:
+        req = urllib.request.Request(
+            BLOCKSCOUT_CHAINS_URL,
+            headers={"User-Agent": "crytic-compile/0"},
+        )
+        with urllib.request.urlopen(req, timeout=10) as response:
+            data = json.loads(response.read())
+    except (urllib.error.URLError, json.JSONDecodeError, OSError) as e:
+        LOGGER.warning("Failed to fetch Blockscout chain list: %s", e)
+        _blockscout_chains = {}
+        return _blockscout_chains
+
+    chains: dict[str, str] = {}
+    for chain_id, info in data.items():
+        explorers = info.get("explorers", [])
+        if explorers:
+            url = explorers[0].get("url", "").rstrip("/")
+            if url:
+                chains[chain_id] = url
+
+    # Extra chains take priority over the directory (avoids conflicts
+    # like chain 747 mapping to Alvey instead of Flow).
+    chains.update(BLOCKSCOUT_EXTRA_CHAINS)
+
+    _blockscout_chains = chains
+    return _blockscout_chains
+
 
 def _normalize_blockscout_result(result: dict[str, Any]) -> dict[str, Any]:
     """Normalize a Blockscout API result to Etherscan field conventions.
@@ -116,11 +163,32 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
             InvalidCompilation: if the explorer returned an error or results could not be parsed.
         """
         target = self._target
-        prefix, addr = target.split(":", 1)
-        api_host, bytecode_host, chain_id = SUPPORTED_NETWORK_BLOCKSCOUT[prefix]
+        match = re.match(r"^blockscout-(\d+):(0x[a-fA-F0-9]{40})$", target)
+        if not match:
+            raise InvalidCompilation(f"Invalid Blockscout target: {target}")
+
+        chain_id = match.group(1)
+        addr = match.group(2)
+        prefix = f"blockscout-{chain_id}"
+
+        custom_url = kwargs.get("blockscout_url")
+        if custom_url:
+            explorer_url = custom_url.rstrip("/")
+        else:
+            chains = _fetch_blockscout_chains()
+            if chain_id not in chains:
+                raise InvalidCompilation(
+                    f"Chain {chain_id} not found in Blockscout "
+                    f"chain list. Use --blockscout-url to "
+                    f"specify a custom explorer URL, or see "
+                    f"https://chains.blockscout.com/ for "
+                    f"supported chains."
+                )
+            explorer_url = chains[chain_id]
+        explorer_host = urllib.parse.urlparse(explorer_url).netloc
 
-        source_url = BLOCKSCOUT_BASE % (api_host, addr)
-        bytecode_url = EXPLORER_BASE_BYTECODE % (bytecode_host, addr)
+        source_url = BLOCKSCOUT_BASE % (explorer_url, addr)
+        bytecode_url = EXPLORER_BASE_BYTECODE % (explorer_host, addr)
 
         only_source = kwargs.get("explorer_only_source_code", False)
         only_bytecode = kwargs.get("explorer_only_bytecode", False)
@@ -134,7 +202,9 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None:
         if not only_bytecode:
             base_export = kwargs.get("export_dir", "crytic-export")
             sourcify_kwargs = {k: v for k, v in kwargs.items() if k != "export_dir"}
-            if try_compile_from_sourcify(crytic_compile, chain_id, addr, base_export, **sourcify_kwargs):
+            if try_compile_from_sourcify(
+                crytic_compile, chain_id, addr, base_export, **sourcify_kwargs
+            ):
                 LOGGER.info("Compiled %s via Sourcify (chain %s)", addr, chain_id)
                 return
 
@@ -297,14 +367,11 @@ def is_supported(target: str, **kwargs: str) -> bool:
             **kwargs: optional arguments. Used "explorer_ignore".
 
         Returns:
-            bool: True if the target uses a known Blockscout network prefix.
+            bool: True if the target matches blockscout-:0x
. """ if kwargs.get("explorer_ignore", False): return False - if not target.startswith(tuple(SUPPORTED_NETWORK_BLOCKSCOUT)): - return False - addr = target[target.find(":") + 1 :] - return bool(re.match(r"^\s*0x[a-zA-Z0-9]{40}\s*$", addr)) + return bool(re.match(r"^blockscout-\d+:0x[a-fA-F0-9]{40}$", target)) def is_dependency(self, path: str) -> bool: return False diff --git a/scripts/ci_test_blockscout.sh b/scripts/ci_test_blockscout.sh index 6f9e28ff..412dcdc2 100755 --- a/scripts/ci_test_blockscout.sh +++ b/scripts/ci_test_blockscout.sh @@ -2,14 +2,14 @@ set -euo pipefail ### Test Blockscout integration (no API key required) -# Add new entries here when a chain is added to SUPPORTED_NETWORK_BLOCKSCOUT in blockscout.py +# Target format: blockscout-:0x
TARGETS=( - "flow:0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e" - "ink:0x4200000000000000000000000000000000000006" - "metis:0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000" - "plume:0x4052ACe931bbc647193D23e3442f8688A5845A18" - "story:0x1514000000000000000000000000000000000000" + "blockscout-747:0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e" + "blockscout-57073:0x4200000000000000000000000000000000000006" + "blockscout-1088:0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000" + "blockscout-98866:0x4052ACe931bbc647193D23e3442f8688A5845A18" + "blockscout-1514:0x1514000000000000000000000000000000000000" ) for target in "${TARGETS[@]}"; do diff --git a/tests/test_blockscout.py b/tests/test_blockscout.py index 64234f3a..8bdf0701 100644 --- a/tests/test_blockscout.py +++ b/tests/test_blockscout.py @@ -5,25 +5,22 @@ import pytest from crytic_compile import CryticCompile -from crytic_compile.platform.blockscout import SUPPORTED_NETWORK_BLOCKSCOUT -# One verified contract address per Blockscout network key. -# Add an entry here whenever a new chain is added to SUPPORTED_NETWORK_BLOCKSCOUT. +# One verified contract address per Blockscout chain ID. BLOCKSCOUT_TEST_CONTRACTS: dict[str, str] = { - "flow": "0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e", # WFLOW - "ink": "0x4200000000000000000000000000000000000006", # WETH - "metis": "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000", # MVM_Coinbase - "plume": "0x4052ACe931bbc647193D23e3442f8688A5845A18", # LendRewards - "story": "0x1514000000000000000000000000000000000000", # WIP + "747": "0xd3bF53DAC106A0290B0483EcBC89d40FcC961f3e", # Flow: WFLOW + "57073": "0x4200000000000000000000000000000000000006", # Ink: WETH + "1088": "0xDeadDeAddeAddEAddeadDEaDDEAdDeaDDeAD0000", # Metis: MVM_Coinbase + "98866": "0x4052ACe931bbc647193D23e3442f8688A5845A18", # Plume: LendRewards + "1514": "0x1514000000000000000000000000000000000000", # Story: WIP } -@pytest.mark.parametrize("network", sorted(SUPPORTED_NETWORK_BLOCKSCOUT.keys())) -def test_blockscout_chain(network: str, tmp_path: pathlib.Path) -> None: +@pytest.mark.parametrize("chain_id", sorted(BLOCKSCOUT_TEST_CONTRACTS.keys())) +def test_blockscout_chain(chain_id: str, tmp_path: pathlib.Path) -> None: """Verify that each Blockscout network can fetch and compile a known contract.""" - addr = BLOCKSCOUT_TEST_CONTRACTS.get(network) - if addr is None: - pytest.skip(f"No test contract registered for '{network}' in BLOCKSCOUT_TEST_CONTRACTS") + addr = BLOCKSCOUT_TEST_CONTRACTS[chain_id] + target = f"blockscout-{chain_id}:{addr}" - cc = CryticCompile(f"{network}:{addr}", export_dir=str(tmp_path)) - assert cc.compilation_units, f"No compilation units produced for {network}:{addr}" + cc = CryticCompile(target, export_dir=str(tmp_path)) + assert cc.compilation_units, f"No compilation units produced for {target}" From 630f381c7ca681bcf9eadbc2257adc03b47711a1 Mon Sep 17 00:00:00 2001 From: thevolcanomanishere Date: Mon, 9 Mar 2026 17:14:21 +0000 Subject: [PATCH 6/7] fix: update expected metadata hash in metadata_checks test --- tests/test_metadata.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tests/test_metadata.py b/tests/test_metadata.py index 70ffc83b..1bc14ab8 100644 --- a/tests/test_metadata.py +++ b/tests/test_metadata.py @@ -24,7 +24,7 @@ def metadata_checks(crytic_compile_instance: CryticCompile) -> None: with_metadata = source_unit.bytecode_init("Dai") assert source_unit.metadata_of("Dai") == { - "bzzr1": "abe44494f2727bdaa34d571c1e0d03d8ecf0dc321c56d76334ab56e39e41ef17", + "bzzr1": "92df983266c28b6fb4c7c776b695725fd63d55b8cd5d5618b69fb544ce801d85", "solc": "0.5.12", } source_unit.remove_metadata() From 5fbc9da952d99e0416ec8409f0769a7d453d1eb5 Mon Sep 17 00:00:00 2001 From: thevolcanomanishere Date: Thu, 12 Mar 2026 17:46:24 +0000 Subject: [PATCH 7/7] refactor: remove Sourcify compilation attempt from Blockscout class --- crytic_compile/platform/blockscout.py | 11 ----------- 1 file changed, 11 deletions(-) diff --git a/crytic_compile/platform/blockscout.py b/crytic_compile/platform/blockscout.py index 55b2409d..0970f5b2 100644 --- a/crytic_compile/platform/blockscout.py +++ b/crytic_compile/platform/blockscout.py @@ -24,7 +24,6 @@ handle_multiple_files, handle_single_file, ) -from crytic_compile.platform.sourcify import try_compile_from_sourcify from crytic_compile.platform.types import Type if TYPE_CHECKING: @@ -198,16 +197,6 @@ def compile(self, crytic_compile: "CryticCompile", **kwargs: str) -> None: export_dir, kwargs.get("explorer_export_dir") or "blockscout-contracts" ) - # Try Sourcify first — it carries richer metadata and is preferred when available. - if not only_bytecode: - base_export = kwargs.get("export_dir", "crytic-export") - sourcify_kwargs = {k: v for k, v in kwargs.items() if k != "export_dir"} - if try_compile_from_sourcify( - crytic_compile, chain_id, addr, base_export, **sourcify_kwargs - ): - LOGGER.info("Compiled %s via Sourcify (chain %s)", addr, chain_id) - return - source_code: str = "" result: dict[str, Any] = {} contract_name: str = ""