Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
65 changes: 41 additions & 24 deletions node_cli/core/schains.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@
from node_cli.utils.helper import (
error_exit,
get_request,
is_btrfs_subvolume,
read_json,
run_cmd,
safe_load_yml,
Expand Down Expand Up @@ -253,35 +254,51 @@ def ensure_schain_volume(schain: str, schain_type: str, env_type: str) -> None:
logger.warning('Volume %s already exists', schain)


def cleanup_datadir_for_single_chain_node(
def cleanup_datadir_content(datadir_path: str) -> None:
regular_folders_pattern = f'{datadir_path}/[!snapshots]*'
logger.info('Removing regular folders of %s', datadir_path)
for path in glob.glob(regular_folders_pattern):
logger.debug('Removing recursively %s', path)
if os.path.isfile(path):
logger.debug('Deleting file in datadir: %s', path)
os.remove(path)
if os.path.isdir(path):
logger.debug('Deleting folder in datadir: %s', path)
shutil.rmtree(path)

logger.info('Removing subvolumes of %s', datadir_path)
subvolumes_pattern = f'{datadir_path}/snapshots/*/*'
for path in glob.glob(subvolumes_pattern):
if is_btrfs_subvolume(path):
logger.debug('Deleting subvolume %s', path)
rm_btrfs_subvolume(path)
if os.path.isfile(path):
logger.debug('Deleting file in snapshots directory: %s', path)
os.remove(path)
if os.path.isdir(path):
logger.debug('Deleting folder in snapshots directory %s', path)
shutil.rmtree(path)

shutil.rmtree(os.path.join(datadir_path, 'snapshots'), ignore_errors=True)


def cleanup_no_lvm_datadir(
chain_name: str = '', base_path: str = SCHAINS_MNT_DIR_SINGLE_CHAIN
) -> None:
if not chain_name:
if chain_name:
folders = [chain_name]
else:
folders = [f for f in os.listdir(base_path) if os.path.isdir(os.path.join(base_path, f))]
if not folders:
raise NoDataDirForChainError(
f'No data directory found in {base_path}. '
'Please check the path or specify a chain name.'
)
chain_name = folders[0]
base_path = os.path.join(base_path, chain_name)
regular_folders_pattern = f'{base_path}/[!snapshots]*'
logger.info('Removing regular folders')
for filepath in glob.glob(regular_folders_pattern):
if os.path.isdir(filepath):
logger.debug('Removing recursively %s', filepath)
shutil.rmtree(filepath)
if os.path.isfile(filepath):
os.remove(filepath)

logger.info('Removing subvolumes')
subvolumes_pattern = f'{base_path}/snapshots/*/*'
for filepath in glob.glob(subvolumes_pattern):
logger.debug('Deleting subvolume %s', filepath)
if os.path.isdir(filepath):
rm_btrfs_subvolume(filepath)
else:
os.remove(filepath)
logger.info('Cleaning up snapshots folder')
if os.path.isdir(base_path):
shutil.rmtree(base_path)
for folder_name in folders:
folder_path = os.path.join(base_path, folder_name)
if folder_name != 'shared-space':
logger.info('Removing datadir content for %s', folder_path)
cleanup_datadir_content(folder_path)
logger.info('Removing datadir content for %s', folder_path)
if os.path.isdir(folder_path):
shutil.rmtree(folder_path)
1 change: 0 additions & 1 deletion node_cli/fair/fair_node.py
Original file line number Diff line number Diff line change
Expand Up @@ -129,7 +129,6 @@ def request_repair(snapshot_from: str = '') -> None:
print(TEXTS['fair']['node']['repair']['repair_requested'])


@check_inited
@check_user
def cleanup() -> None:
env = compose_node_env(SKALE_DIR_ENV_FILEPATH, save=False, node_type=NodeType.FAIR)
Expand Down
4 changes: 2 additions & 2 deletions node_cli/operations/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@
from node_cli.core.node_options import NodeOptions
from node_cli.core.resources import init_shared_space_volume, update_resource_allocation
from node_cli.core.schains import (
cleanup_datadir_for_single_chain_node,
cleanup_no_lvm_datadir,
update_node_cli_schain_status,
)
from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive
Expand Down Expand Up @@ -425,6 +425,6 @@ def restore(env, backup_path, node_type: NodeType, config_only=False):

def cleanup_sync(env, schain_name: str) -> None:
turn_off(env, node_type=NodeType.SYNC)
cleanup_datadir_for_single_chain_node(schain_name=schain_name)
cleanup_no_lvm_datadir(schain_name=schain_name)
rm_dir(GLOBAL_SKALE_DIR)
rm_dir(SKALE_DIR)
10 changes: 6 additions & 4 deletions node_cli/operations/fair.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@
from node_cli.configs import (
CONTAINER_CONFIG_PATH,
GLOBAL_SKALE_DIR,
NFTABLES_CHAIN_FOLDER_PATH,
SKALE_DIR,
)
from node_cli.core.checks import CheckType
Expand All @@ -35,9 +36,9 @@
from node_cli.core.host import ensure_btrfs_kernel_module_autoloaded, link_env_file, prepare_host
from node_cli.core.nftables import configure_nftables
from node_cli.core.nginx import generate_nginx_config
from node_cli.core.schains import cleanup_datadir_for_single_chain_node
from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot
from node_cli.core.schains import cleanup_no_lvm_datadir
from node_cli.fair.record.chain_record import migrate_chain_record
from node_cli.migrations.fair.from_boot import migrate_nftables_from_boot
from node_cli.operations.base import checked_host, turn_off
from node_cli.operations.common import configure_filebeat, configure_flask, unpack_backup_archive
from node_cli.operations.config_repo import (
Expand All @@ -55,7 +56,7 @@
remove_dynamic_containers,
wait_for_container,
)
from node_cli.utils.helper import rm_dir, str_to_bool
from node_cli.utils.helper import cleanup_dir_content, rm_dir, str_to_bool
from node_cli.utils.meta import FairCliMetaManager
from node_cli.utils.print_formatters import print_failed_requirements_checks

Expand Down Expand Up @@ -241,7 +242,8 @@ def restore_fair(env, backup_path, config_only=False):

def cleanup(env) -> None:
turn_off(env, node_type=NodeType.FAIR)
cleanup_datadir_for_single_chain_node()
cleanup_no_lvm_datadir()
rm_dir(GLOBAL_SKALE_DIR)
rm_dir(SKALE_DIR)
cleanup_dir_content(NFTABLES_CHAIN_FOLDER_PATH)
cleanup_docker_configuration()
20 changes: 20 additions & 0 deletions node_cli/utils/helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -324,6 +324,17 @@ def rm_dir(folder: str) -> None:
logger.info(f"{folder} doesn't exist, skipping...")


def cleanup_dir_content(folder: str) -> None:
if os.path.exists(folder):
logger.info('Removing contents of %s', folder)
for filename in os.listdir(folder):
file_path = os.path.join(folder, filename)
if os.path.isfile(file_path) or os.path.islink(file_path):
os.unlink(file_path)
elif os.path.isdir(file_path):
shutil.rmtree(file_path)


def safe_mkdir(path: str, print_res: bool = False) -> None:
if os.path.exists(path):
logger.debug(f'Directory {path} already exists')
Expand Down Expand Up @@ -402,3 +413,12 @@ def get_ssh_port(ssh_service_name='ssh'):

def is_contract_address(value: str) -> bool:
return bool(re.fullmatch(r'0x[a-fA-F0-9]{40}', value))


def is_btrfs_subvolume(path: str) -> bool:
"""Check if the given path is a Btrfs subvolume."""
try:
output = run_cmd(['btrfs', 'subvolume', 'show', path], check_code=False)
return output.returncode == 0
except subprocess.CalledProcessError:
return False
Empty file modified scripts/build.sh
100644 → 100755
Empty file.
4 changes: 2 additions & 2 deletions tests/core/core_schains_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@

import freezegun

from node_cli.core.schains import cleanup_datadir_for_single_chain_node, toggle_schain_repair_mode
from node_cli.core.schains import cleanup_no_lvm_datadir, toggle_schain_repair_mode
from node_cli.utils.helper import read_json
from tests.helper import CURRENT_DATETIME, CURRENT_TIMESTAMP

Expand Down Expand Up @@ -81,5 +81,5 @@ def test_cleanup_sync_datadir(tmp_sync_datadir):
hash_path.touch()

with mock.patch('node_cli.core.schains.rm_btrfs_subvolume'):
cleanup_datadir_for_single_chain_node(schain_name, base_path=tmp_sync_datadir)
cleanup_no_lvm_datadir(schain_name, base_path=tmp_sync_datadir)
assert not os.path.isdir(base_folder)
Loading