diff --git a/README.md b/README.md index 42066ef..a6f4ad3 100644 --- a/README.md +++ b/README.md @@ -5,7 +5,7 @@
- Build modular Python services with plug-and-play plugins, clear team boundaries, and transparent API integration. + 🚀 Build scalable Python services with plugins — like FastAPI + Ray, but modular by design.
diff --git a/src/framex/config.py b/src/framex/config.py
index 64ba37b..1973212 100644
--- a/src/framex/config.py
+++ b/src/framex/config.py
@@ -55,6 +55,7 @@ class TestConfig(BaseModel):
class OauthConfig(BaseModel):
+ provider: str = ""
client_id: str = ""
client_secret: str = ""
authorization_url: str = ""
@@ -84,6 +85,73 @@ def model_post_init(self, context: Any) -> None:
self.jwt_secret = secrets.token_urlsafe(32)
+class RepositoryProviderAuthConfig(BaseModel):
+ token: str = ""
+ token_header: str = "Authorization" # noqa
+ token_scheme: str = "Bearer" # noqa
+
+ def build_headers(self) -> dict[str, str]:
+ if not self.token:
+ return {}
+ if self.token_scheme:
+ return {self.token_header: f"{self.token_scheme} {self.token}"}
+ return {self.token_header: self.token}
+
+
+class GitLabRepositoryAuthEndpointConfig(RepositoryProviderAuthConfig):
+ host: str
+ path_prefix: str = ""
+ token_header: str = "PRIVATE-TOKEN" # noqa
+ token_scheme: str = ""
+
+ def matches(self, host: str, path: str) -> bool:
+ normalized_prefix = self.normalized_path_prefix
+ if self.host.lower() != host.lower():
+ return False
+ if not normalized_prefix:
+ return True
+ return path == normalized_prefix or path.startswith(f"{normalized_prefix}/")
+
+ @property
+ def normalized_path_prefix(self) -> str:
+ if not self.path_prefix:
+ return ""
+ return self.path_prefix if self.path_prefix.startswith("/") else f"/{self.path_prefix}"
+
+
+class GitLabRepositoryAuthConfig(RepositoryProviderAuthConfig):
+ token_header: str = "PRIVATE-TOKEN" # noqa
+ token_scheme: str = ""
+ endpoints: list[GitLabRepositoryAuthEndpointConfig] = Field(default_factory=list)
+
+ def configured_hosts(self) -> set[str]:
+ return {endpoint.host.lower() for endpoint in self.endpoints}
+
+ def build_headers_for_url(self, host: str, path: str) -> dict[str, str]:
+ if endpoint := self.resolve_endpoint(host, path):
+ return endpoint.build_headers()
+ return self.build_headers()
+
+ def resolve_endpoint(self, host: str, path: str) -> GitLabRepositoryAuthEndpointConfig | None:
+ matches = [endpoint for endpoint in self.endpoints if endpoint.matches(host, path)]
+ if not matches:
+ return None
+ return max(matches, key=lambda endpoint: len(endpoint.normalized_path_prefix))
+
+
+class RepositoryAuthConfig(BaseModel):
+ github: RepositoryProviderAuthConfig = Field(default_factory=RepositoryProviderAuthConfig)
+ gitlab: GitLabRepositoryAuthConfig = Field(default_factory=GitLabRepositoryAuthConfig)
+
+
+class RepositoryConfig(BaseModel):
+ auth: RepositoryAuthConfig = Field(default_factory=RepositoryAuthConfig)
+
+
+class DocsConfig(BaseModel):
+ embedded_config_file_whitelist: list[str] = Field(default_factory=list)
+
+
class AuthConfig(BaseModel):
oauth: OauthConfig | None = Field(default=None)
rules: dict[str, list[str]] = Field(default_factory=dict)
@@ -131,8 +199,10 @@ class Settings(BaseSettings):
load_builtin_plugins: list[str] = Field(default_factory=list)
test: TestConfig = Field(default_factory=TestConfig)
+ docs: DocsConfig = Field(default_factory=DocsConfig)
sentry: SentryConfig = Field(default_factory=SentryConfig)
auth: AuthConfig = Field(default_factory=AuthConfig)
+ repository: RepositoryConfig = Field(default_factory=RepositoryConfig)
model_config = SettingsConfigDict(
# `.env.prod` takes priority over `.env`
diff --git a/src/framex/driver/application.py b/src/framex/driver/application.py
index 1c730ea..13b5e76 100644
--- a/src/framex/driver/application.py
+++ b/src/framex/driver/application.py
@@ -5,6 +5,7 @@
from collections.abc import Callable
from contextlib import asynccontextmanager
from datetime import UTC, datetime
+from pathlib import Path
from typing import Annotated, Any
from zoneinfo import ZoneInfo
@@ -23,8 +24,21 @@
from framex.config import settings
from framex.consts import API_PRE_STR, DOCS_URL, OPENAPI_URL, PROJECT_NAME, REDOC_URL, VERSION
-from framex.driver.auth import authenticate, oauth_callback
-from framex.utils import build_swagger_ui_html, format_uptime, safe_error_message
+from framex.driver.auth import authenticate, get_auth_payload, oauth_callback
+from framex.plugin import get_plugin
+from framex.repository import (
+ can_access_repository,
+ get_latest_repository_version,
+ has_newer_release_version,
+ is_private_repository,
+)
+from framex.utils import (
+ build_plugin_config_html,
+ build_swagger_ui_html,
+ collect_embedded_config_files,
+ format_uptime,
+ safe_error_message,
+)
FRAME_START_TIME = datetime.now(tz=UTC)
SHANGHAI_TZ = ZoneInfo("Asia/Shanghai")
@@ -109,6 +123,69 @@ async def _on_start(deployment: Any) -> None:
async def get_documentation(_: Annotated[str, Depends(authenticate)]) -> HTMLResponse:
return build_swagger_ui_html(openapi_url=OPENAPI_URL, title="FrameX Docs")
+ @application.get("/docs/plugin-config", include_in_schema=False)
+ async def get_plugin_config_documentation(
+ request: Request,
+ plugin: str,
+ _: Annotated[str, Depends(authenticate)],
+ ) -> HTMLResponse:
+ if not settings.auth.oauth:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN, detail="Plugin config documentation requires auth"
+ )
+
+ loaded_plugin = get_plugin(plugin)
+ auth_payload = get_auth_payload(request)
+ repo_url = (
+ loaded_plugin.metadata.url if loaded_plugin is not None and loaded_plugin.metadata is not None else ""
+ )
+
+ if not repo_url or auth_payload is None:
+ raise HTTPException(status_code=status.HTTP_403_FORBIDDEN, detail=f"Repository access denied: {plugin}")
+
+ repository_is_private = is_private_repository(repo_url)
+ if repository_is_private is not False:
+ access_result = can_access_repository(
+ repo_url,
+ auth_payload.get("oauth_provider"),
+ auth_payload.get("oauth_access_token"),
+ )
+ if access_result is not True:
+ raise HTTPException(
+ status_code=status.HTTP_403_FORBIDDEN, detail=f"Repository access denied: {plugin}"
+ )
+
+ loaded_config = loaded_plugin.config.model_dump() if loaded_plugin and loaded_plugin.config else None
+ config_data = loaded_config or settings.plugins.get(plugin) # type: ignore
+ if config_data is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Plugin config not found: {plugin}")
+
+ return build_plugin_config_html(
+ config_data,
+ collect_embedded_config_files(
+ config_data,
+ workspace_root=Path.cwd().resolve(),
+ whitelist=settings.docs.embedded_config_file_whitelist,
+ ),
+ )
+
+ @application.get("/docs/plugin-release", include_in_schema=False)
+ async def get_plugin_release_documentation(
+ plugin: str,
+ _: Annotated[str, Depends(authenticate)],
+ ) -> dict[str, Any]:
+ loaded_plugin = get_plugin(plugin)
+ if loaded_plugin is None or loaded_plugin.metadata is None:
+ raise HTTPException(status_code=status.HTTP_404_NOT_FOUND, detail=f"Plugin not found: {plugin}")
+
+ current_version = loaded_plugin.metadata.version
+ current_version = current_version if current_version.startswith("v") else f"v{current_version}"
+ repo_url = loaded_plugin.metadata.url
+ latest_version = get_latest_repository_version(repo_url)
+ if not latest_version or not has_newer_release_version(current_version, latest_version):
+ return {"has_update": False, "latest_version": None, "repo_url": repo_url}
+ return {"has_update": True, "latest_version": latest_version, "repo_url": repo_url}
+
@application.get(REDOC_URL, include_in_schema=False)
async def get_redoc_documentation(_: Annotated[str, Depends(authenticate)]) -> HTMLResponse:
return get_redoc_html(openapi_url=OPENAPI_URL, title="FrameX Redoc")
diff --git a/src/framex/driver/auth.py b/src/framex/driver/auth.py
index 4199c73..0f3d7ee 100644
--- a/src/framex/driver/auth.py
+++ b/src/framex/driver/auth.py
@@ -1,4 +1,6 @@
from datetime import UTC, datetime, timedelta
+from secrets import token_urlsafe
+from typing import Any
import httpx
import jwt
@@ -11,57 +13,97 @@
from framex.consts import AUTH_COOKIE_NAME, DOCS_URL
api_key_header = APIKeyHeader(name="Authorization", auto_error=False)
+SESSION_LIFETIME = timedelta(hours=24)
+_AUTH_SESSIONS: dict[str, dict[str, Any]] = {}
-def create_jwt(payload: dict) -> str:
+def _now_utc() -> datetime:
+ return datetime.now(UTC)
+
+
+def _purge_expired_sessions(now_utc: datetime | None = None) -> None:
+ current = now_utc or _now_utc()
+ expired_session_ids = [
+ session_id for session_id, payload in _AUTH_SESSIONS.items() if payload.get("expires_at", current) <= current
+ ]
+ for session_id in expired_session_ids:
+ _AUTH_SESSIONS.pop(session_id, None)
+
+
+def create_jwt(payload: dict[str, Any]) -> str:
if not settings.auth.oauth:
raise RuntimeError("OAuth not configured")
- now_utc = datetime.now(UTC)
+ now_utc = _now_utc()
+ token_payload = {
+ **payload,
+ "iat": int(now_utc.timestamp()),
+ "exp": int((now_utc + SESSION_LIFETIME).timestamp()),
+ }
+ return jwt.encode(token_payload, settings.auth.oauth.jwt_secret, algorithm=settings.auth.oauth.jwt_algorithm)
- payload.update(
- {
- "iat": int(now_utc.timestamp()),
- "exp": int((now_utc + timedelta(hours=24)).timestamp()),
- }
- )
- return jwt.encode(payload, settings.auth.oauth.jwt_secret, algorithm=settings.auth.oauth.jwt_algorithm)
+def create_auth_session(session_payload: dict[str, Any]) -> str:
+ now_utc = _now_utc()
+ expires_at = now_utc + SESSION_LIFETIME
+ session_id = token_urlsafe(32)
+ _AUTH_SESSIONS[session_id] = {
+ **session_payload,
+ "expires_at": expires_at,
+ }
+ _purge_expired_sessions(now_utc)
+ return session_id
-def auth_jwt(request: Request) -> bool:
- if not settings.auth.oauth:
- return False
- token = request.cookies.get(AUTH_COOKIE_NAME)
- if not token:
- return False
+def decode_auth_token(token: str | None) -> dict[str, Any] | None:
+ if not settings.auth.oauth or not token:
+ return None
try:
- jwt.decode(
+ payload = jwt.decode(
token,
settings.auth.oauth.jwt_secret,
algorithms=[settings.auth.oauth.jwt_algorithm],
)
- return True
except (jwt.InvalidTokenError, jwt.ExpiredSignatureError):
- return False
+ return None
+ if not isinstance(payload, dict):
+ return None
-def authenticate(request: Request, api_key: str | None = Depends(api_key_header)) -> None:
- if settings.auth.oauth:
- if token := request.cookies.get(AUTH_COOKIE_NAME):
- try:
- jwt.decode(
- token,
- settings.auth.oauth.jwt_secret,
- algorithms=[settings.auth.oauth.jwt_algorithm],
- )
- return
+ session_id = payload.get("session_id")
+ if not isinstance(session_id, str) or not session_id:
+ return None
- except Exception as e:
- from framex.log import logger
+ now_utc = _now_utc()
+ _purge_expired_sessions(now_utc)
+ session_payload = _AUTH_SESSIONS.get(session_id)
+ if session_payload is None:
+ return None
- logger.warning(f"JWT decode failed: {e}")
+ expires_at = session_payload.get("expires_at")
+ if not isinstance(expires_at, datetime) or expires_at <= now_utc:
+ _AUTH_SESSIONS.pop(session_id, None)
+ return None
+
+ return {
+ **payload,
+ **{key: value for key, value in session_payload.items() if key != "expires_at"},
+ }
+
+
+def get_auth_payload(request: Request) -> dict[str, Any] | None:
+ return decode_auth_token(request.cookies.get(AUTH_COOKIE_NAME))
+
+
+def auth_jwt(request: Request) -> bool:
+ return get_auth_payload(request) is not None
+
+
+def authenticate(request: Request, api_key: str | None = Depends(api_key_header)) -> None:
+ if settings.auth.oauth:
+ if get_auth_payload(request) is not None:
+ return
if api_key and api_key in (settings.auth.get_auth_keys(request.url.path) or []):
return
@@ -74,7 +116,7 @@ def authenticate(request: Request, api_key: str | None = Depends(api_key_header)
f"?client_id={settings.auth.oauth.client_id}"
"&response_type=code"
f"&redirect_uri={settings.auth.oauth.call_back_url}"
- "&scope=read_user"
+ "&scope=read_user%20read_api"
)
},
)
@@ -116,12 +158,23 @@ async def oauth_callback(code: str) -> Response:
"message": f"Welcome {username}",
"username": username,
"email": user_resp.get("email"),
+ "oauth_provider": settings.auth.oauth.provider,
+ "oauth_access_token": auth_token,
}
+ session_id = create_auth_session(user_info)
res = RedirectResponse(url=DOCS_URL, status_code=status.HTTP_302_FOUND)
res.set_cookie(
AUTH_COOKIE_NAME,
- create_jwt(user_info),
+ create_jwt(
+ {
+ "message": user_info["message"],
+ "username": username,
+ "email": user_info["email"],
+ "oauth_provider": settings.auth.oauth.provider,
+ "session_id": session_id,
+ }
+ ),
httponly=True,
samesite="lax",
)
diff --git a/src/framex/plugin/on.py b/src/framex/plugin/on.py
index d418a1f..a4c9880 100644
--- a/src/framex/plugin/on.py
+++ b/src/framex/plugin/on.py
@@ -57,6 +57,7 @@ def decorator(cls: type) -> type:
version,
plugin.module.__plugin_meta__.description,
plugin.module.__plugin_meta__.url,
+ plugin.name,
)
plugin_apis.append(
diff --git a/src/framex/plugins/proxy/__init__.py b/src/framex/plugins/proxy/__init__.py
index dfe2c63..78d1aa2 100644
--- a/src/framex/plugins/proxy/__init__.py
+++ b/src/framex/plugins/proxy/__init__.py
@@ -174,6 +174,7 @@ async def _parse_openai_docs(self, url: str) -> None:
f"v{__plugin_meta__.version}",
__plugin_meta__.description,
__plugin_meta__.url,
+ __plugin_meta__.name,
)
await adapter.call_func(
plugin_api,
diff --git a/src/framex/repository/__init__.py b/src/framex/repository/__init__.py
new file mode 100644
index 0000000..3cbd378
--- /dev/null
+++ b/src/framex/repository/__init__.py
@@ -0,0 +1,13 @@
+from .versioning import (
+ can_access_repository,
+ get_latest_repository_version,
+ has_newer_release_version,
+ is_private_repository,
+)
+
+__all__ = [
+ "can_access_repository",
+ "get_latest_repository_version",
+ "has_newer_release_version",
+ "is_private_repository",
+]
diff --git a/src/framex/repository/providers/__init__.py b/src/framex/repository/providers/__init__.py
new file mode 100644
index 0000000..e724da5
--- /dev/null
+++ b/src/framex/repository/providers/__init__.py
@@ -0,0 +1,17 @@
+"""Repository hosting providers used by version lookup."""
+
+from .base import RepositoryVersionProvider
+from .github import GITHUB_PROVIDER
+from .gitlab import GITLAB_PROVIDER
+
+REPOSITORY_VERSION_PROVIDERS: tuple[RepositoryVersionProvider, ...] = (
+ GITHUB_PROVIDER,
+ GITLAB_PROVIDER,
+)
+
+__all__ = [
+ "GITHUB_PROVIDER",
+ "GITLAB_PROVIDER",
+ "REPOSITORY_VERSION_PROVIDERS",
+ "RepositoryVersionProvider",
+]
diff --git a/src/framex/repository/providers/base.py b/src/framex/repository/providers/base.py
new file mode 100644
index 0000000..b351484
--- /dev/null
+++ b/src/framex/repository/providers/base.py
@@ -0,0 +1,93 @@
+"""Base types and shared helpers for repository version providers."""
+
+from abc import ABC, abstractmethod
+from typing import Any
+from urllib.parse import ParseResult
+
+import httpx
+
+DEFAULT_HTTP_TIMEOUT = 2.0
+
+
+class RepositoryVersionProvider(ABC):
+ """Abstract interface for repository hosting providers."""
+
+ name: str
+
+ @abstractmethod
+ def matches(self, parsed_url: ParseResult) -> bool:
+ """Return whether this provider can handle the parsed repository URL."""
+
+ @abstractmethod
+ def get_latest_version(self, parsed_url: ParseResult) -> str | None:
+ """Return the latest published version for the repository URL."""
+
+ def has_repository_access(self, parsed_url: ParseResult, access_token: str) -> bool:
+ """Return whether the given user token can access the repository URL."""
+
+ raise NotImplementedError("RepositoryVersionProvider does not implement access checking")
+
+ def is_public_repository(self, parsed_url: ParseResult) -> bool | None:
+ """Return whether the repository is publicly accessible without authentication."""
+
+ raise NotImplementedError("RepositoryVersionProvider does not implement public repository checking")
+
+ @staticmethod
+ def extract_repository_parts(parsed_url: ParseResult) -> list[str]:
+ """Split a repository path into normalized URL parts."""
+
+ parts = [part for part in parsed_url.path.split("/") if part]
+ if parts:
+ parts[-1] = parts[-1].removesuffix(".git")
+ return parts
+
+ @staticmethod
+ def fetch_json(url: str, headers: dict[str, str] | None = None) -> dict[str, Any] | None:
+ """Fetch a JSON object and return `None` when it cannot be consumed."""
+
+ try:
+ with httpx.Client(timeout=DEFAULT_HTTP_TIMEOUT, headers=headers) as client:
+ response = client.get(url, follow_redirects=True)
+ except httpx.HTTPError:
+ return None
+
+ if response.status_code != 200:
+ return None
+
+ try:
+ payload = response.json()
+ except ValueError:
+ return None
+
+ return payload if isinstance(payload, dict) else None
+
+ @staticmethod
+ def can_fetch(
+ url: str,
+ headers: dict[str, str] | None = None,
+ *,
+ follow_redirects: bool = True,
+ ) -> bool:
+ """Return whether the resource can be fetched successfully."""
+
+ try:
+ with httpx.Client(timeout=DEFAULT_HTTP_TIMEOUT, headers=headers) as client:
+ response = client.get(url, follow_redirects=follow_redirects)
+ except httpx.HTTPError:
+ return False
+
+ return response.status_code == 200
+
+ @staticmethod
+ def extract_version(payload: dict[str, Any] | None) -> str | None:
+ """Read a version-like string from a release payload."""
+
+ if payload is None:
+ return None
+
+ latest_version = payload.get("tag_name") or payload.get("name")
+ if not isinstance(latest_version, str):
+ return None
+
+ latest_version = latest_version.strip()
+ return latest_version or None
diff --git a/src/framex/repository/providers/github.py b/src/framex/repository/providers/github.py
new file mode 100644
index 0000000..c8ebe78
--- /dev/null
+++ b/src/framex/repository/providers/github.py
@@ -0,0 +1,63 @@
+"""GitHub repository version provider."""
+
+from urllib.parse import ParseResult
+
+from framex.config import settings
+
+from .base import RepositoryVersionProvider
+
+GITHUB_HOSTS = frozenset({"github.com", "www.github.com"})
+GITHUB_API_HEADERS = {
+ "Accept": "application/vnd.github+json",
+ "User-Agent": "framex-docs",
+}
+
+
+class GitHubRepositoryVersionProvider(RepositoryVersionProvider):
+ """Resolve latest release versions for GitHub repositories."""
+
+ name = "github"
+
+ def matches(self, parsed_url: ParseResult) -> bool:
+ return parsed_url.netloc in GITHUB_HOSTS
+
+ def get_latest_version(self, parsed_url: ParseResult) -> str | None:
+ repository = self._extract_owner_and_repository(parsed_url)
+ if repository is None:
+ return None
+
+ owner, repo = repository
+ api_url = f"https://api.github.com/repos/{owner}/{repo}/releases/latest"
+ headers = {**GITHUB_API_HEADERS, **settings.repository.auth.github.build_headers()}
+ payload = self.fetch_json(api_url, headers=headers)
+ return self.extract_version(payload)
+
+ def is_public_repository(self, parsed_url: ParseResult) -> bool | None:
+ repository = self._extract_owner_and_repository(parsed_url)
+ if repository is None:
+ return None
+
+ owner, repo = repository
+ api_url = f"https://api.github.com/repos/{owner}/{repo}"
+ return self.can_fetch(api_url, headers=GITHUB_API_HEADERS)
+
+ def has_repository_access(self, parsed_url: ParseResult, access_token: str) -> bool:
+ repository = self._extract_owner_and_repository(parsed_url)
+ if repository is None or not access_token:
+ return False
+
+ owner, repo = repository
+ api_url = f"https://api.github.com/repos/{owner}/{repo}"
+ headers = {**GITHUB_API_HEADERS, "Authorization": f"Bearer {access_token}"}
+ return self.can_fetch(api_url, headers=headers)
+
+ def _extract_owner_and_repository(self, parsed_url: ParseResult) -> tuple[str, str] | None:
+ parts = self.extract_repository_parts(parsed_url)
+ if len(parts) < 2:
+ return None
+ return parts[0], parts[1]
+
+
+GITHUB_PROVIDER = GitHubRepositoryVersionProvider()
+
+__all__ = ["GITHUB_PROVIDER", "GitHubRepositoryVersionProvider"]
diff --git a/src/framex/repository/providers/gitlab.py b/src/framex/repository/providers/gitlab.py
new file mode 100644
index 0000000..c0f85d3
--- /dev/null
+++ b/src/framex/repository/providers/gitlab.py
@@ -0,0 +1,91 @@
+"""GitLab repository version provider."""
+
+from urllib.parse import ParseResult, quote
+
+from framex.config import settings
+
+from .base import RepositoryVersionProvider
+
+GITLAB_PRIMARY_HOST = "gitlab.com"
+GITLAB_HOST_SUFFIX = ".gitlab.com"
+GITLAB_RESERVED_PATH_MARKERS = {"-", "tree", "blob", "raw", "commits", "branches", "tags", "merge_requests"}
+
+
+class GitLabRepositoryVersionProvider(RepositoryVersionProvider):
+ """Resolve latest release versions for GitLab repositories."""
+
+ name = "gitlab"
+
+ def matches(self, parsed_url: ParseResult) -> bool:
+ host = parsed_url.netloc.lower()
+ return (
+ host == GITLAB_PRIMARY_HOST
+ or host.endswith(GITLAB_HOST_SUFFIX)
+ or host in settings.repository.auth.gitlab.configured_hosts()
+ )
+
+ def get_latest_version(self, parsed_url: ParseResult) -> str | None:
+ headers = settings.repository.auth.gitlab.build_headers_for_url(parsed_url.netloc, parsed_url.path) or None
+ project_path = self._resolve_project_path(parsed_url, headers=headers, require_fetch=False)
+ if project_path is None:
+ return None
+
+ api_url = self._build_release_api_url(parsed_url, project_path)
+ payload = self.fetch_json(api_url, headers=headers)
+ return self.extract_version(payload)
+
+ def is_public_repository(self, parsed_url: ParseResult) -> bool | None:
+ return self.can_fetch(self._build_repository_web_url(parsed_url), follow_redirects=False)
+
+ def has_repository_access(self, parsed_url: ParseResult, access_token: str) -> bool:
+ if not access_token:
+ return False
+
+ headers = {"Authorization": f"Bearer {access_token}"}
+ return self._resolve_project_path(parsed_url, headers=headers, require_fetch=True) is not None
+
+ def _resolve_project_path(
+ self,
+ parsed_url: ParseResult,
+ headers: dict[str, str] | None = None,
+ require_fetch: bool = True,
+ ) -> str | None:
+ candidates = self._iter_project_path_candidates(parsed_url)
+ if not candidates:
+ return None
+ if not require_fetch and len(candidates) == 1:
+ return candidates[0]
+
+ for candidate in candidates:
+ if self.can_fetch(self._build_project_api_url(parsed_url, candidate), headers=headers):
+ return candidate
+ return None
+
+ def _iter_project_path_candidates(self, parsed_url: ParseResult) -> list[str]:
+ parts = self.extract_repository_parts(parsed_url)
+ if len(parts) < 2:
+ return []
+
+ marker_index = next((index for index, part in enumerate(parts) if part in GITLAB_RESERVED_PATH_MARKERS), None)
+ max_length = marker_index if marker_index is not None else len(parts)
+ if max_length < 2:
+ return []
+
+ return ["/".join(parts[:length]) for length in range(max_length, 1, -1)]
+
+ def _build_project_api_url(self, parsed_url: ParseResult, project_path: str) -> str:
+ project_id = quote(project_path, safe="")
+ return f"{parsed_url.scheme}://{parsed_url.netloc}/api/v4/projects/{project_id}"
+
+ def _build_release_api_url(self, parsed_url: ParseResult, project_path: str) -> str:
+ project_id = quote(project_path, safe="")
+ return f"{parsed_url.scheme}://{parsed_url.netloc}/api/v4/projects/{project_id}/releases/permalink/latest"
+
+ def _build_repository_web_url(self, parsed_url: ParseResult) -> str:
+ normalized = parsed_url._replace(params="", query="", fragment="")
+ return normalized.geturl()
+
+
+GITLAB_PROVIDER = GitLabRepositoryVersionProvider()
+
+__all__ = ["GITLAB_PROVIDER", "GitLabRepositoryVersionProvider"]
diff --git a/src/framex/repository/versioning.py b/src/framex/repository/versioning.py
new file mode 100644
index 0000000..53c2c33
--- /dev/null
+++ b/src/framex/repository/versioning.py
@@ -0,0 +1,68 @@
+"""Repository version lookup entrypoints."""
+
+import re
+from functools import lru_cache
+from urllib.parse import ParseResult, urlparse
+
+from framex.repository.providers.base import RepositoryVersionProvider
+
+from .providers import REPOSITORY_VERSION_PROVIDERS
+
+VERSION_PATTERN = re.compile(r"\d+(?:\.\d+)*")
+
+
+def _get_provider_for_url(repo_url: str) -> tuple[RepositoryVersionProvider | None, ParseResult]:
+ parsed_url = urlparse(repo_url)
+ provider = next((provider for provider in REPOSITORY_VERSION_PROVIDERS if provider.matches(parsed_url)), None)
+ return provider, parsed_url
+
+
+@lru_cache(maxsize=128)
+def get_latest_repository_version(repo_url: str) -> str | None:
+ provider, parsed_url = _get_provider_for_url(repo_url)
+ if provider is None:
+ return None
+ return provider.get_latest_version(parsed_url)
+
+
+def is_private_repository(repo_url: str) -> bool | None:
+ provider, parsed_url = _get_provider_for_url(repo_url)
+ if provider is None:
+ return None
+
+ is_public = provider.is_public_repository(parsed_url)
+ if is_public is None:
+ return None
+ return not is_public
+
+
+def can_access_repository(repo_url: str, provider_name: str | None, access_token: str | None) -> bool | None:
+ provider, parsed_url = _get_provider_for_url(repo_url)
+ if provider is None:
+ return None
+
+ if not provider_name or provider.name != provider_name:
+ return None
+ if not access_token:
+ return False
+
+ return provider.has_repository_access(parsed_url, access_token)
+
+
+def has_newer_release_version(current_version: str, latest_version: str) -> bool:
+ current_parts = _normalize_version(current_version)
+ latest_parts = _normalize_version(latest_version)
+ if current_parts is None or latest_parts is None:
+ return False
+
+ max_length = max(len(current_parts), len(latest_parts))
+ current_padded = current_parts + (0,) * (max_length - len(current_parts))
+ latest_padded = latest_parts + (0,) * (max_length - len(latest_parts))
+ return latest_padded > current_padded
+
+
+def _normalize_version(version: str) -> tuple[int, ...] | None:
+ match = VERSION_PATTERN.search(version)
+ if match is None:
+ return None
+ return tuple(int(part) for part in match.group(0).split("."))
diff --git a/src/framex/utils/__init__.py b/src/framex/utils/__init__.py
new file mode 100644
index 0000000..05c8324
--- /dev/null
+++ b/src/framex/utils/__init__.py
@@ -0,0 +1,41 @@
+from .cache import cache_decode, cache_encode
+from .common import (
+ StreamEnventType,
+ escape_tag,
+ extract_method_params,
+ format_uptime,
+ make_stream_event,
+ path_to_module_name,
+ plugin_to_deployment_name,
+ safe_error_message,
+ shorten_str,
+)
+from .config_docs import (
+ build_plugin_config_html,
+ collect_embedded_config_files,
+ mask_sensitive_config_data,
+ mask_sensitive_config_text,
+ mask_sensitive_embedded_config_content,
+)
+from .docs import build_plugin_description, build_swagger_ui_html
+
+__all__ = [
+ "StreamEnventType",
+ "build_plugin_config_html",
+ "build_plugin_description",
+ "build_swagger_ui_html",
+ "cache_decode",
+ "cache_encode",
+ "collect_embedded_config_files",
+ "escape_tag",
+ "extract_method_params",
+ "format_uptime",
+ "make_stream_event",
+ "mask_sensitive_config_data",
+ "mask_sensitive_config_text",
+ "mask_sensitive_embedded_config_content",
+ "path_to_module_name",
+ "plugin_to_deployment_name",
+ "safe_error_message",
+ "shorten_str",
+]
diff --git a/src/framex/utils/cache.py b/src/framex/utils/cache.py
new file mode 100644
index 0000000..fd4a9e1
--- /dev/null
+++ b/src/framex/utils/cache.py
@@ -0,0 +1,80 @@
+import base64
+import importlib
+import json
+import zlib
+from datetime import datetime
+from enum import Enum
+from itertools import cycle
+from typing import Any
+
+
+def xor_crypt(data: bytes, key: str = "01234567890abcdefghijklmnopqrstuvwxyz") -> bytes:
+ return bytes(a ^ b for a, b in zip(data, cycle(key.encode())))
+
+
+def cache_encode(data: Any) -> str:
+ def transform(obj: Any) -> Any:
+ if hasattr(obj, "__dict__"):
+ raw_attributes = {k: transform(v) for k, v in obj.__dict__.items() if not k.startswith("_")}
+ return {
+ "__type__": "dynamic_obj",
+ "__module__": obj.__class__.__module__,
+ "__class__": obj.__class__.__name__,
+ "data": raw_attributes,
+ }
+ if isinstance(obj, list):
+ return [transform(i) for i in obj]
+ if isinstance(obj, dict):
+ return {k: transform(v) for k, v in obj.items()}
+ if isinstance(obj, datetime):
+ return obj.isoformat()
+ if isinstance(obj, Enum):
+ return obj.value
+ return obj
+
+ json_str = json.dumps(transform(data), ensure_ascii=False)
+ compressed = zlib.compress(json_str.encode("utf-8"))
+ encrypted = xor_crypt(compressed)
+ return base64.b64encode(encrypted).decode("ascii")
+
+
+def cache_decode(res: Any) -> Any:
+ current = res
+ while isinstance(current, str):
+ try:
+ decoded_bytes = base64.b64decode(current, validate=True)
+ current = zlib.decompress(xor_crypt(decoded_bytes)).decode("utf-8")
+ except Exception:
+ try:
+ temp = json.loads(current)
+ if temp == current:
+ break
+ current = temp
+ except Exception:
+ break
+
+ def restore_models(item: Any) -> Any:
+ if isinstance(item, list):
+ return [restore_models(i) for i in item]
+
+ if isinstance(item, dict):
+ if item.get("__type__") == "dynamic_obj":
+ try:
+ module = importlib.import_module(item["__module__"])
+ cls = getattr(module, item["__class__"])
+
+ cleaned_data = {k: restore_models(v) for k, v in item["data"].items()}
+
+ if hasattr(cls, "model_validate"):
+ return cls.model_validate(cleaned_data)
+ return cls(**cleaned_data)
+ except Exception:
+ from types import SimpleNamespace
+
+ return SimpleNamespace(**{k: restore_models(v) for k, v in item["data"].items()})
+
+ return {k: restore_models(v) for k, v in item.items()}
+
+ return item
+
+ return restore_models(current)
diff --git a/src/framex/utils/common.py b/src/framex/utils/common.py
new file mode 100644
index 0000000..66cf271
--- /dev/null
+++ b/src/framex/utils/common.py
@@ -0,0 +1,86 @@
+import inspect
+import json
+import re
+from collections.abc import Callable
+from datetime import timedelta
+from enum import StrEnum
+from pathlib import Path
+from typing import Any
+
+from pydantic import BaseModel
+
+
+def plugin_to_deployment_name(plugin_name: str, obj_name: str) -> str:
+ return f"{plugin_name}.{obj_name}"
+
+
+def path_to_module_name(path: Path) -> str:
+ """Convert path to module name."""
+ rel_path = path.resolve().relative_to(Path.cwd().resolve())
+ if rel_path.stem == "__init__":
+ module_name = ".".join(rel_path.parts[:-1])
+ else:
+ module_name = ".".join([*rel_path.parts[:-1], rel_path.stem]) # type: ignore[arg-type]
+ return module_name.removeprefix("src.")
+
+
+def escape_tag(s: str) -> str:
+ """Escape Referenced Config: {html.escape(file_path)}
\n {html.escape(file_content)}\n Plugin Config (TOML)
+ {escaped_toml}
+