diff --git a/README.md b/README.md index e7f27f0..0532746 100644 --- a/README.md +++ b/README.md @@ -56,43 +56,34 @@ phi login ## Quick start -### Single-sequence / single-structure jobs +### Try the tutorial -```bash -# Structure prediction (ESMFold) -phi folding --fasta sequences.fasta - -# Complex structure prediction (AlphaFold2 multimer) -phi complex_folding --fasta binder_target.fasta +The fastest way to get started — downloads five example PD-L1 binder structures +and walks you through the full pipeline: -# Sequence design via inverse folding (ProteinMPNN) -phi inverse_folding --pdb design.pdb --num-sequences 20 +```bash +phi tutorial ``` -### Batch scoring workflow +This fetches the example files, prints step-by-step instructions, and leaves +you ready to run `phi filter`. + +### Scoring your own structures ```bash -# 1. Upload a directory of PDB/CIF files +# 1. Upload PDB/CIF files phi upload ./designs/ -# Output: -# dataset_id d7c3a1b2-... -# Dashboard: https://design.dynotx.com/dashboard/datasets/d7c3a1b2-... -# Run a job against this dataset: -# phi folding --dataset-id d7c3a1b2-... -# phi complex_folding --dataset-id d7c3a1b2-... -# phi inverse_folding --dataset-id d7c3a1b2-... -# phi filter --dataset-id d7c3a1b2-... --preset default --wait - -# 2. Run the full filter pipeline (inverse folding → folding → complex folding → score) -phi filter --dataset-id d7c3a1b2-... --preset default --wait +# 2. Run the full filter pipeline +phi filter --preset default --wait -# 3. Download results (structures, scores CSV, raw score JSONs) +# 3. View scores and download results +phi scores phi download --out ./results/ ``` -After each command, `phi` prints the active dataset and job IDs and a link to -the dashboard: +After each command, `phi` prints the active dataset and a link to the +dashboard: ``` Active: dataset [d7c3a1b2-...] · job [cb4553f5-...] @@ -105,6 +96,7 @@ Dashboard: https://design.dynotx.com/dashboard/datasets/d7c3a1b2-... | Command | Alias | Description | |---|---|---| +| `phi tutorial` | — | Download example structures and print a step-by-step walkthrough | | `phi login` | — | Verify API key and print identity | | `phi upload` | — | Upload PDB/CIF files or a directory | | `phi fetch` | — | Download a structure from RCSB PDB or AlphaFold DB, crop, and optionally upload | diff --git a/src/phi/cli.py b/src/phi/cli.py index 2145e24..80d7440 100755 --- a/src/phi/cli.py +++ b/src/phi/cli.py @@ -16,6 +16,7 @@ ) from phi.commands.research import cmd_notes, cmd_research from phi.commands.structure import cmd_fetch +from phi.commands.tutorial import cmd_tutorial from phi.config import _load_state from phi.display import _C_BLUE, _die, console from phi.parser import build_parser @@ -23,6 +24,7 @@ COMMANDS = { "login": cmd_login, + "tutorial": cmd_tutorial, "upload": cmd_upload, "ingest-session": cmd_ingest_session, "datasets": cmd_datasets, diff --git a/src/phi/commands/models.py b/src/phi/commands/models.py index 5233147..34be5b3 100644 --- a/src/phi/commands/models.py +++ b/src/phi/commands/models.py @@ -1,4 +1,7 @@ +from __future__ import annotations + import argparse +import os from pathlib import Path from phi.api import _require_key, _submit @@ -6,6 +9,13 @@ from phi.download import _download_job, _read_fasta from phi.polling import _poll +_DESIGN_ENABLED = os.environ.get("DYNO_ENABLE_DESIGN", "").lower() in ("1", "true", "yes") + + +def _require_design_flag() -> None: + if not _DESIGN_ENABLED: + _die("This command is not yet available.") + def _run_model_job(job_type: str, params: dict, args: argparse.Namespace) -> None: from phi.config import POLL_INTERVAL as _INTERVAL @@ -92,6 +102,7 @@ def cmd_boltz(args: argparse.Namespace) -> None: def cmd_rfdiffusion3(args: argparse.Namespace) -> None: + _require_design_flag() params: dict = { "num_designs": args.num_designs, "inference_steps": args.steps, @@ -129,6 +140,7 @@ def cmd_rfdiffusion3(args: argparse.Namespace) -> None: def cmd_boltzgen(args: argparse.Namespace) -> None: + _require_design_flag() params: dict = { "protocol": args.protocol, "num_designs": args.num_designs, diff --git a/src/phi/commands/research.py b/src/phi/commands/research.py index c356c0f..7f0c47e 100644 --- a/src/phi/commands/research.py +++ b/src/phi/commands/research.py @@ -111,8 +111,8 @@ def cmd_notes(args: argparse.Namespace) -> None: return content: str = data.get("content") or "" - gcs_url: str | None = data.get("gcs_url") - gcs_uri: str | None = data.get("gcs_uri") + data.get("gcs_url") + data.get("gcs_uri") if args.out: out = Path(args.out) @@ -124,8 +124,6 @@ def cmd_notes(args: argparse.Namespace) -> None: dest.parent.mkdir(parents=True, exist_ok=True) dest.write_text(content, encoding="utf-8") console.print(f"[{_C_SAND}]Notes saved[/] → {dest}") - if gcs_url: - console.print(f"[dim]Download URL:[/] {gcs_url}") return if args.json: @@ -141,5 +139,3 @@ def cmd_notes(args: argparse.Namespace) -> None: padding=(1, 2), ) ) - if gcs_uri: - console.print(f"[dim]Storage URI:[/] {gcs_uri}") diff --git a/src/phi/commands/structure.py b/src/phi/commands/structure.py index aeca492..5685713 100644 --- a/src/phi/commands/structure.py +++ b/src/phi/commands/structure.py @@ -153,14 +153,12 @@ def _fetch_and_upload( ("Source ", source_url), ("File ", str(out_path)), ("Dataset ", dataset_id), - ("GCS URI ", gcs_uri), ]: console.print(f" [bold]{label}[/bold] {value}") console.print() console.print(" [dim]Next steps:[/dim]") - console.print( - f" [dim] phi design --target-pdb-gcs {gcs_uri} --hotspots --num-designs 50[/dim]" - ) + console.print(f" [dim] phi upload {out_path}[/dim]") + console.print(" [dim] phi filter --preset default --wait[/dim]") console.rule() diff --git a/src/phi/commands/tutorial.py b/src/phi/commands/tutorial.py new file mode 100644 index 0000000..2fd3f4b --- /dev/null +++ b/src/phi/commands/tutorial.py @@ -0,0 +1,88 @@ +from __future__ import annotations + +import argparse +import urllib.request +from pathlib import Path + +from phi.api import _request +from phi.config import _save_state, _ssl_context +from phi.display import _C_BLUE, _C_SAND, _die, console + + +def cmd_tutorial(args: argparse.Namespace) -> None: + out = Path(args.out) + + # ── 1. Fetch manifest (standard Clerk JWT auth, same as all endpoints) ─── + console.print("[dim]Fetching tutorial dataset …[/]") + try: + manifest = _request("GET", "/tutorial") + except Exception as exc: + _die( + f"Could not reach the tutorial endpoint: {exc}\n" + " Check your connection and API key, then try again." + ) + + files: list[dict] = manifest.get("files", []) + dataset_id: str | None = manifest.get("dataset_id") + message: str | None = manifest.get("message") + + if not files: + _die("No tutorial files returned by the API.") + + # ── 2. Download each file (plain HTTP — signed URLs are self-authenticating) + out.mkdir(parents=True, exist_ok=True) + console.print(f" Downloading {len(files)} file(s) to [{_C_BLUE}]{out}/[/] …\n") + + for entry in files: + filename: str = entry["filename"] + url: str = entry["url"] + dest = out / filename + dest.parent.mkdir(parents=True, exist_ok=True) + try: + req = urllib.request.Request(url) + with urllib.request.urlopen(req, context=_ssl_context()) as resp: + dest.write_bytes(resp.read()) + console.print(f" [bold {_C_SAND}]✓[/] {filename}") + except Exception as exc: + _die(f"Failed to download {filename}: {exc}") + + # ── 3. Cache dataset_id so phi filter needs zero extra flags ───────────── + if dataset_id: + _save_state({"dataset_id": dataset_id}) + console.print( + f"\n[dim]dataset_id [{_C_BLUE}]{dataset_id}[/] cached — " + f"run [bold]phi filter[/] to start scoring.[/]" + ) + + # ── 4. Print step-by-step guide ────────────────────────────────────────── + if message: + console.print(f"\n[dim]{message}[/]") + + if dataset_id: + upload_step = "[dim] (skipped — dataset already ready)[/]" + else: + upload_step = f" [{_C_SAND}]phi upload {out}/[/]" + + console.print(f""" +[bold]── Tutorial: PD-L1 binder scoring pipeline ──────────────────[/] + +You have {len(files)} example binder structures in [{_C_BLUE}]{out}/[/]. + +[bold]Step 1 — Upload[/] +{upload_step} + +[bold]Step 2 — Run the filter pipeline[/] + [{_C_SAND}]phi filter --preset default --wait[/] + + Runs: ProteinMPNN → ESMFold → AlphaFold2 → score + Typical runtime: 10–30 min for {len(files)} structures. + +[bold]Step 3 — View scores[/] + [{_C_SAND}]phi scores[/] + +[bold]Step 4 — Download results[/] + [{_C_SAND}]phi download --out ./results[/] + +[bold]Dashboard[/] + [{_C_BLUE}]https://design.dynotx.com/dashboard[/] +""") diff --git a/src/phi/display.py b/src/phi/display.py index bc0b2bd..424410e 100644 --- a/src/phi/display.py +++ b/src/phi/display.py @@ -272,7 +272,8 @@ def _print_status(s: dict) -> None: table.add_column("filename", style=_C_BLUE, no_wrap=True) table.add_column("type", style="dim") for f in files[:10]: - fname = f.get("filename") or f.get("gcs_url", "?") + raw = f.get("filename") or f.get("gcs_url", "?") + fname = raw.split("/")[-1] if raw.startswith("gs://") else raw ftype = f.get("artifact_type", "") table.add_row(fname, ftype) if len(files) > 10: diff --git a/src/phi/parser.py b/src/phi/parser.py index c0b04ba..2111784 100644 --- a/src/phi/parser.py +++ b/src/phi/parser.py @@ -4,15 +4,16 @@ from phi.config import _FILTER_PRESETS, POLL_INTERVAL _CLI_EPILOG = """\ +Quick start: + phi tutorial # download example structures + print step-by-step guide + phi filter --preset default --wait + phi scores + phi download --out ./results + Fetch and prepare target structures: phi fetch --pdb 4ZQK --chain A --residues 56-290 --out target.pdb phi fetch --uniprot Q9NZQ7 --trim-low-confidence 70 --upload -Design (backbone generation): - phi design --target-pdb target.pdb --hotspots A45,A67 --num-designs 50 - phi design --length 80 --num-designs 20 - phi boltzgen --yaml design.yaml --protocol protein-anything --num-designs 10 - Validation (fold + score): phi esmfold --fasta sequences.fasta phi alphafold --fasta complex.fasta @@ -20,10 +21,10 @@ phi esm2 --fasta sequences.fasta phi boltz --fasta complex.fasta -Batch filter pipeline (100-50,000 designs): - phi upload --dir ./designs/ --file-type pdb - phi filter --dataset-id --preset default --wait - phi download --out ./results +Batch filter pipeline: + phi upload ./designs/ + phi filter --preset default --wait + phi download --out ./results Dataset management: phi datasets # list your datasets @@ -253,6 +254,17 @@ def build_parser() -> argparse.ArgumentParser: ) sub = root.add_subparsers(dest="command", required=True) + p = sub.add_parser( + "tutorial", + help="Download example structures and print a step-by-step scoring walkthrough", + ) + p.add_argument( + "--out", + metavar="DIR", + default="examples", + help="Directory to download example files into (default: ./examples)", + ) + p = sub.add_parser("login", help="Verify API key and print connection + identity details") p.add_argument("--json", action="store_true")