Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
271 changes: 271 additions & 0 deletions astrbot/core/utils/storage_cleaner.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,271 @@
from __future__ import annotations

import os
from collections.abc import Iterable, Mapping
from dataclasses import dataclass
from pathlib import Path

from astrbot import logger
from astrbot.core.utils.astrbot_path import get_astrbot_data_path, get_astrbot_temp_path


@dataclass(frozen=True)
class LogFileConfig:
path: Path
enabled: bool


class StorageCleaner:
Comment thread
sourcery-ai[bot] marked this conversation as resolved.
TARGET_LOGS = "logs"
TARGET_CACHE = "cache"
VALID_TARGETS = {TARGET_LOGS, TARGET_CACHE, "all"}

def __init__(
self,
config: Mapping[str, object],
*,
data_dir: Path | None = None,
temp_dir: Path | None = None,
) -> None:
self._config = config
self._data_dir = data_dir or Path(get_astrbot_data_path())
self._temp_dir = temp_dir or Path(get_astrbot_temp_path())

def get_status(self) -> dict:
logs = self._build_status(self.TARGET_LOGS)
cache = self._build_status(self.TARGET_CACHE)
return {
self.TARGET_LOGS: logs,
self.TARGET_CACHE: cache,
"total_bytes": logs["size_bytes"] + cache["size_bytes"],
}

def cleanup(self, target: str = "all") -> dict:
normalized_target = (target or "all").strip().lower()
if normalized_target not in self.VALID_TARGETS:
raise ValueError(f"Unsupported cleanup target: {target}")

targets = (
[self.TARGET_LOGS, self.TARGET_CACHE]
if normalized_target == "all"
else [normalized_target]
)
results: dict[str, dict] = {}
aggregates = {
"removed_bytes": 0,
"processed_files": 0,
"deleted_files": 0,
"truncated_files": 0,
"failed_files": 0,
}

for target_name in targets:
result = self._cleanup_target(target_name)
results[target_name] = result
for key in aggregates:
aggregates[key] += result[key]

status = self.get_status()

return {
"target": normalized_target,
"results": results,
**aggregates,
"status": status,
}

def _build_status(self, target: str) -> dict:
if target == self.TARGET_LOGS:
files = self._collect_log_files()
primary_path = self._data_dir / "logs"
elif target == self.TARGET_CACHE:
files = self._collect_cache_files()
primary_path = self._temp_dir
else:
raise ValueError(f"Unsupported cleanup target: {target}")

size_bytes, file_count = self._summarize_files(files)
return {
"size_bytes": size_bytes,
"file_count": file_count,
"path": str(primary_path),
"exists": primary_path.exists(),
}

def _cleanup_target(self, target: str) -> dict:
if target == self.TARGET_LOGS:
files = self._collect_log_files()
active_log_files = self._active_log_files()
elif target == self.TARGET_CACHE:
files = self._collect_cache_files()
active_log_files = set()
else:
raise ValueError(f"Unsupported cleanup target: {target}")

removed_bytes = 0
deleted_files = 0
truncated_files = 0
failed_files = 0

for file_path in sorted(files):
if not file_path.exists():
continue

try:
size = file_path.stat().st_size
except OSError as exc:
logger.warning("Failed to stat %s before cleanup: %s", file_path, exc)
failed_files += 1
continue

try:
if file_path in active_log_files:
file_path.write_bytes(b"")
truncated_files += 1
else:
file_path.unlink()
deleted_files += 1
removed_bytes += size
except OSError as exc:
logger.warning("Failed to clean %s: %s", file_path, exc)
failed_files += 1

if target == self.TARGET_CACHE:
self._cleanup_empty_dirs(self._temp_dir)
self._temp_dir.mkdir(parents=True, exist_ok=True)

logger.info(
"Storage cleanup finished: target=%s removed_bytes=%s deleted_files=%s truncated_files=%s failed_files=%s",
target,
removed_bytes,
deleted_files,
truncated_files,
failed_files,
)

return {
"removed_bytes": removed_bytes,
"processed_files": deleted_files + truncated_files,
"deleted_files": deleted_files,
"truncated_files": truncated_files,
"failed_files": failed_files,
}

def _collect_log_files(self) -> set[Path]:
files = set(self._iter_files(self._data_dir / "logs"))
for log_path in self._configured_log_paths():
files.update(self._iter_log_family_files(log_path))
return files

def _collect_cache_files(self) -> set[Path]:
files = set(self._iter_files(self._temp_dir))
files.update(self._data_dir.glob("plugins_custom_*.json"))

for extra_file in (
self._data_dir / "plugins.json",
self._data_dir / "sandbox_skills_cache.json",
):
if extra_file.is_file():
files.add(extra_file)

return files

def _log_file_configs(self) -> list[LogFileConfig]:
return [
LogFileConfig(
path=self._resolve_log_path(
self._get_optional_str("log_file_path"),
default_relative_path="logs/astrbot.log",
),
enabled=self._get_bool("log_file_enable", False),
),
LogFileConfig(
path=self._resolve_log_path(
self._get_optional_str("trace_log_path"),
default_relative_path="logs/astrbot.trace.log",
),
enabled=self._get_bool("trace_log_enable", False),
),
]

def _get_optional_str(self, key: str) -> str | None:
value = self._config.get(key)
return value if isinstance(value, str) else None

def _get_bool(self, key: str, default: bool = False) -> bool:
value = self._config.get(key, default)
return value if isinstance(value, bool) else default

def _configured_log_paths(self) -> set[Path]:
return {config.path for config in self._log_file_configs()}

def _active_log_files(self) -> set[Path]:
return {config.path for config in self._log_file_configs() if config.enabled}

def _resolve_log_path(
self,
configured_path: str | None,
*,
default_relative_path: str,
) -> Path:
path_value = configured_path or default_relative_path
path = Path(path_value)
if path.is_absolute():
return path.resolve()
return (self._data_dir / path).resolve()

def _iter_log_family_files(self, log_path: Path) -> set[Path]:
files: set[Path] = set()
parent_dir = log_path.parent
if log_path.is_file():
files.add(log_path)
if not parent_dir.exists():
return files

suffix = log_path.suffix
stem = log_path.stem if suffix else log_path.name
pattern = f"{stem}.*{suffix}" if suffix else f"{stem}.*"

for candidate in parent_dir.glob(pattern):
if candidate.is_file() and candidate != log_path:
files.add(candidate)

return files

@staticmethod
def _iter_files(path: Path) -> Iterable[Path]:
if path.is_file():
yield path
return
if not path.exists():
return
for child in path.rglob("*"):
if child.is_file():
yield child

@staticmethod
def _summarize_files(files: Iterable[Path]) -> tuple[int, int]:
total_size = 0
file_count = 0
for file_path in files:
if not file_path.exists() or not file_path.is_file():
continue
try:
total_size += file_path.stat().st_size
file_count += 1
except OSError as exc:
logger.debug("Skip %s during storage scan: %s", file_path, exc)
return total_size, file_count

@staticmethod
def _cleanup_empty_dirs(root_dir: Path) -> None:
if not root_dir.exists():
return
for dirpath, dirnames, filenames in os.walk(root_dir, topdown=False):
path = Path(dirpath)
if path == root_dir:
continue
try:
path.rmdir()
except OSError:
continue
30 changes: 30 additions & 0 deletions astrbot/dashboard/routes/stat.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
import asyncio
import os
import re
import threading
Expand All @@ -17,6 +18,7 @@
from astrbot.core.db.migration.helper import check_migration_needed_v4
from astrbot.core.utils.astrbot_path import get_astrbot_path
from astrbot.core.utils.io import get_dashboard_version
from astrbot.core.utils.storage_cleaner import StorageCleaner
from astrbot.core.utils.version_comparator import VersionComparator

from .route import Response, Route, RouteContext
Expand All @@ -39,10 +41,13 @@ def __init__(
"/stat/changelog": ("GET", self.get_changelog),
"/stat/changelog/list": ("GET", self.list_changelog_versions),
"/stat/first-notice": ("GET", self.get_first_notice),
"/stat/storage": ("GET", self.get_storage_status),
"/stat/storage/cleanup": ("POST", self.cleanup_storage),
}
self.db_helper = db_helper
self.register_routes()
self.core_lifecycle = core_lifecycle
self.storage_cleaner = StorageCleaner(self.config)

async def restart_core(self):
if DEMO_MODE:
Expand Down Expand Up @@ -89,6 +94,31 @@ async def get_version(self):
async def get_start_time(self):
return Response().ok({"start_time": self.core_lifecycle.start_time}).__dict__

async def get_storage_status(self):
try:
status = await asyncio.to_thread(self.storage_cleaner.get_status)
return Response().ok(status).__dict__
except Exception:
logger.error("获取存储占用失败", exc_info=True)
return (
Response().error("获取存储占用失败,请查看后端日志了解详情。").__dict__
)

async def cleanup_storage(self):
try:
data = await request.get_json(silent=True)
target = "all"
if isinstance(data, dict):
target = str(data.get("target", "all"))

result = await asyncio.to_thread(self.storage_cleaner.cleanup, target)
return Response().ok(result).__dict__
except ValueError as e:
return Response().error(str(e)).__dict__
except Exception:
logger.error("清理存储失败", exc_info=True)
return Response().error("清理存储失败,请查看后端日志了解详情。").__dict__

async def get_stat(self):
offset_sec = request.args.get("offset_sec", 86400)
offset_sec = int(offset_sec)
Expand Down
18 changes: 17 additions & 1 deletion dashboard/src/assets/mdi-subset/materialdesignicons-subset.css
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
/* Auto-generated MDI subset – 231 icons */
/* Auto-generated MDI subset – 235 icons */
/* Do not edit manually. Run: pnpm run subset-icons */

@font-face {
Expand Down Expand Up @@ -112,6 +112,10 @@
content: "\F09D1";
}

.mdi-broom::before {
content: "\F00E2";
}

.mdi-bug::before {
content: "\F00E4";
}
Expand Down Expand Up @@ -300,6 +304,10 @@
content: "\F1640";
}

.mdi-database-refresh-outline::before {
content: "\F1634";
}

.mdi-delete::before {
content: "\F01B4";
}
Expand All @@ -308,6 +316,10 @@
content: "\F09E7";
}

.mdi-delete-sweep-outline::before {
content: "\F0C62";
}

.mdi-dots-hexagon::before {
content: "\F15FF";
}
Expand Down Expand Up @@ -728,6 +740,10 @@
content: "\F0A66";
}

.mdi-qrcode::before {
content: "\F0432";
}

.mdi-refresh::before {
content: "\F0450";
}
Expand Down
Binary file not shown.
Binary file not shown.
Loading
Loading