diff --git a/README.md b/README.md index ae91ecfbaf..2f169cfe97 100644 --- a/README.md +++ b/README.md @@ -86,10 +86,47 @@ Option 1: Install in a virtualenv ```sh -uv venv && . .venv/bin/activate -uv pip install 'dimos[base,unitree]' -# replay recorded data to test that the system is working -# IMPORTANT: First replay run will show a black rerun window while 2.4 GB downloads from LFS +# Clone the repository +git clone --branch dev --single-branch https://github.com/dimensionalOS/dimos.git +cd dimos + +# Create and activate virtual environment +python3 -m venv venv +source venv/bin/activate + +sudo apt install portaudio19-dev python3-pyaudio + +# Install LFS +sudo apt install git-lfs +git lfs install + +# Install torch and torchvision if not already installed +# Example CUDA 11.7, Pytorch 2.0.1 (replace with your required pytorch version if different) +pip install torch==2.0.1 torchvision torchaudio --index-url https://download.pytorch.org/whl/cu118 +``` + +#### Install dependencies +```bash +# CPU only (reccomended to attempt first) +uv sync --extra cpu --extra dev + +# CUDA install +uv sync --extra cuda --extra dev + +# Jetson Jetpack 6.2 (Python 3.10 only) +uv sync --extra jetson-jp6-cuda126 --extra dev + +# Copy and configure environment variables +cp default.env .env +``` + +#### Test the install +```bash +pytest -s dimos/ +``` + +#### Test Dimensional with a replay UnitreeGo2 stream (no robot required) +```bash dimos --replay run unitree-go2 ``` diff --git a/bin/test b/bin/test new file mode 100755 index 0000000000..46956da72c --- /dev/null +++ b/bin/test @@ -0,0 +1,384 @@ +#!/usr/bin/env -S deno run --allow-all --no-lock +import { $ } from "https://esm.sh/@jsr/david__dax@0.43.2/mod.ts" // see: https://github.com/dsherret/dax +import { Console, green, red, cyan, yellow } from "https://deno.land/x/quickr@0.8.13/main/console.js" +import { FileSystem, glob } from "https://deno.land/x/quickr@0.8.13/main/file_system.js" + +const projectDir = await FileSystem.walkUpUntil(".git") +const cachePath = `${projectDir}/.cache.ignore/test_cache.json` +await FileSystem.ensureFolderExists(FileSystem.dirname(cachePath)) + +function printHelp() { + console.log(`Usage: run/tests [options] [marker1,marker2 ...] + +Options: + --help, -h Show this help + --all Enable all markers + --without Comma-separated markers to disable (applied after --all) + --only-failed Run only tests that failed in the last cache + --timeout-ms Kill a test process after n milliseconds (wall time) + --refresh-test-list Rebuild cached test list + +Markers can be provided as comma-separated values or repeated args.`) +} + +function unique(list) { + return [...new Set(list)] +} + +function parseCliMarkers(args) { + const markers = [] + for (const arg of args) { + for (const part of arg.split(",")) { + const trimmed = part.trim() + if (trimmed.length > 0) markers.push(trimmed) + } + } + return unique(markers) +} + +function parseArgs(args) { + const markers = [] + const without = [] + let enableAll = false + let onlyFailed = false + let timeoutMs = null + let refreshTestList = false + for (let i = 0; i < args.length; i += 1) { + const arg = args[i] + if (arg === "--help" || arg === "-h") { + printHelp() + Deno.exit(0) + } + if (arg === "--all") { + enableAll = true + continue + } + if (arg === "--only-failed") { + onlyFailed = true + continue + } + if (arg === "--refresh-test-list") { + refreshTestList = true + continue + } + if (arg === "--timeout-ms") { + const value = args[i + 1] + if (!value) { + console.error("Missing value for --timeout-ms") + Deno.exit(2) + } + const parsed = Number.parseInt(value, 10) + if (!Number.isFinite(parsed) || parsed <= 0) { + console.error(`Invalid value for --timeout-ms: ${value}`) + Deno.exit(2) + } + timeoutMs = parsed + i += 1 + continue + } + if (arg.startsWith("--timeout-ms=")) { + const value = arg.slice("--timeout-ms=".length) + const parsed = Number.parseInt(value, 10) + if (!Number.isFinite(parsed) || parsed <= 0) { + console.error(`Invalid value for --timeout-ms: ${value}`) + Deno.exit(2) + } + timeoutMs = parsed + continue + } + if (arg === "--without") { + const value = args[i + 1] + if (!value) { + console.error("Missing value for --without") + Deno.exit(2) + } + without.push(...parseCliMarkers([value])) + i += 1 + continue + } + if (arg.startsWith("--without=")) { + without.push(...parseCliMarkers([arg.slice("--without=".length)])) + continue + } + markers.push(...parseCliMarkers([arg])) + } + return { + markers: unique(markers), + without: unique(without), + enableAll, + onlyFailed, + timeoutMs, + refreshTestList, + } +} + +function parseAvailableMarkers(output) { + const markers = [] + const cleaned = output.replace( + // eslint-disable-next-line no-control-regex + /[\u001b\u009b][[()#;?]*(?:[0-9]{1,4}(?:;[0-9]{0,4})*)?[0-9A-ORZcf-nqry=><]/g, + "", + ) + const re = /^@pytest\.mark\.([A-Za-z0-9_]+):/gm + let match + while ((match = re.exec(cleaned)) !== null) { + markers.push(match[1]) + } + return unique(markers) +} + +async function loadCache(path) { + try { + const raw = await Deno.readTextFile(path) + const parsed = JSON.parse(raw) + if (parsed && typeof parsed === "object" && parsed.tests && typeof parsed.tests === "object") { + return parsed + } + } catch { + // ignore missing or invalid cache + } + return { tests: {} } +} + +async function saveCache(path, cache) { + const json = JSON.stringify(cache, null, 2) + await Deno.writeTextFile(path, json + "\n") +} + +function getEffectiveMarkers(itemMarkers, availableMarkerSet) { + const filtered = [] + for (const marker of itemMarkers) { + if (availableMarkerSet.has(marker)) filtered.push(marker) + } + return unique(filtered) +} + +function buildCollectorScript() { + return `import contextlib +import io +import json +import pytest + +class Collector: + def __init__(self): + self.items = [] + + def pytest_collection_finish(self, session): + for item in session.items: + markers = sorted({m.name for m in item.iter_markers()}) + self.items.append({\"nodeid\": item.nodeid, \"markers\": markers}) + +collector = Collector() + +buf = io.StringIO() +with contextlib.redirect_stdout(buf), contextlib.redirect_stderr(buf): + ret = pytest.main([\"--collect-only\", \"-q\", \"--disable-warnings\", \"--continue-on-collection-errors\", \"-p\", \"no:warnings\", \"-o\", \"addopts=\"], plugins=[collector]) + +print(json.dumps({\"items\": collector.items, \"retcode\": ret, \"output\": buf.getvalue()})) +` +} + +async function collectTests(pythonCmd) { + const scriptPath = await Deno.makeTempFile({ suffix: ".py" }) + try { + await Deno.writeTextFile(scriptPath, buildCollectorScript()) + const collectRaw = await $`${pythonCmd} ${scriptPath}`.text() + return JSON.parse(collectRaw) + } finally { + await Deno.remove(scriptPath).catch(() => {}) + } +} + +async function runSingleTest(pythonCmd, nodeid) { + const start = performance.now() + let code = 0 + let output = "" + try { + const timeoutSeconds = timeoutMs ? Math.max(timeoutMs / 1000, 0.001) : null + if (timeoutSeconds && timeoutCmd) { + output = await $`${timeoutCmd} ${timeoutSeconds} ${pythonCmd} -m pytest -o ${"addopts=-v -p no:warnings -ra --color=yes"} -r s --maxfail=1 ${nodeid}`.text() + } else { + output = await $`${pythonCmd} -m pytest -o ${"addopts=-v -p no:warnings -ra --color=yes"} -r s --maxfail=1 ${nodeid}`.text() + } + } catch (err) { + code = typeof err?.code === "number" ? err.code : 1 + output = `${err?.stdout ?? ""}${err?.stderr ?? ""}` + } + const wallDurationSeconds = (performance.now() - start) / 1000 + let durationSeconds = wallDurationSeconds + const durationMatch = [...output.matchAll(/\bin ([0-9.]+)s\b/g)].pop() + if (durationMatch && durationMatch[1]) { + const parsed = Number.parseFloat(durationMatch[1]) + if (Number.isFinite(parsed)) { + durationSeconds = parsed + } + } + let status = "pass" + if (code !== 0) { + status = "fail" + } else if (/\bSKIPPED\b/i.test(output) || /\bskipped\b/i.test(output)) { + status = "skip" + } + return { code, durationSeconds, wallDurationSeconds, status } +} + +const pythonCmd = "python" + +async function findTimeoutCommand() { + try { + await $`command -v timeout`.quiet() + return "timeout" + } catch { + // ignore + } + try { + await $`command -v gtimeout`.quiet() + return "gtimeout" + } catch { + // ignore + } + return null +} + +const markerOutput = await $`${pythonCmd} -m pytest --markers`.text() +const availableMarkers = parseAvailableMarkers(markerOutput) +const availableMarkerSet = new Set(availableMarkers) + +console.log("Available markers:") +for (const marker of availableMarkers) { + console.log(`- ${marker}`) +} + +const { markers: requestedMarkers, without: withoutMarkers, enableAll, onlyFailed, timeoutMs, refreshTestList } = parseArgs( + Deno.args, +) +const timeoutCmd = await findTimeoutCommand() +if (timeoutMs && !timeoutCmd) { + console.error("Warning: --timeout-ms provided but no timeout/gtimeout command found; running without a timeout.") +} +const enabledMarkerSet = new Set(requestedMarkers) +if (enableAll) { + for (const marker of availableMarkers) enabledMarkerSet.add(marker) +} +for (const marker of withoutMarkers) enabledMarkerSet.delete(marker) +const enabledMarkers = [...enabledMarkerSet] +console.log("") +console.log(`Enabled markers: ${enabledMarkers.length > 0 ? enabledMarkers.join(", ") : "(none)"}`) +if (withoutMarkers.length > 0) { + console.log(`Disabled markers: ${withoutMarkers.join(", ")}`) +} +if (onlyFailed) { + console.log("Only failed tests: enabled") +} +console.log("") + +const cache = await loadCache(cachePath) + +let items = cache.test_list +if (!items || refreshTestList) { + console.log("Getting all test names (takes a second to scan the entire project)") + const collection = await collectTests(pythonCmd) + if (collection.retcode !== 0) { + if (collection.output && collection.output.trim().length > 0) { + const trimmed = collection.output.trim() + const maxChars = 8000 + const sliced = trimmed.length > maxChars ? trimmed.slice(trimmed.length - maxChars) : trimmed + console.error(sliced) + } + if (!collection.items || collection.items.length === 0) { + console.error("Test collection failed.") + Deno.exit(collection.retcode) + } + console.error("Test collection had errors; continuing with collected tests.") + } + items = collection.items || [] + cache.test_list = items + await saveCache(cachePath, cache) +} else { + console.log("Using cached test list, use --refresh-test-list if new tests have been added.") +} + +console.log("") +console.log(`Collected ${items.length} tests:`) +for (const item of items) { + console.log(item.nodeid) +} + +const selectedTests = [] +for (const item of items) { + const effectiveMarkers = getEffectiveMarkers(item.markers || [], availableMarkerSet) + const hasMarkers = effectiveMarkers.length > 0 + const enabledByMarkers = effectiveMarkers.some((marker) => enabledMarkerSet.has(marker)) + if (!hasMarkers || enabledByMarkers) { + selectedTests.push({ nodeid: item.nodeid, markers: effectiveMarkers }) + } +} + +console.log("") +console.log(`Selected ${selectedTests.length} tests.`) + +let filteredTests = selectedTests +if (onlyFailed) { + filteredTests = selectedTests.filter((test) => cache.tests[test.nodeid]?.status === "fail") + console.log(`Only-failed selection: ${filteredTests.length} tests.`) +} + +const unknown = [] +const known = [] +for (const test of filteredTests) { + const cached = cache.tests[test.nodeid] + const lastSuccessWall = cached?.last_success_wall_duration + const wallDuration = cached?.wall_duration_seconds + const lastSuccessDuration = cached?.last_success_duration + if (typeof lastSuccessWall === "number" && Number.isFinite(lastSuccessWall)) { + known.push({ ...test, durationSeconds: lastSuccessWall }) + } else if (typeof wallDuration === "number" && Number.isFinite(wallDuration)) { + known.push({ ...test, durationSeconds: wallDuration }) + } else if (typeof lastSuccessDuration === "number" && Number.isFinite(lastSuccessDuration)) { + known.push({ ...test, durationSeconds: lastSuccessDuration }) + } else if (cached && typeof cached.duration_seconds === "number" && Number.isFinite(cached.duration_seconds)) { + known.push({ ...test, durationSeconds: cached.duration_seconds }) + } else { + unknown.push(test) + } +} + +known.sort((a, b) => a.durationSeconds - b.durationSeconds) +const runQueue = [...unknown, ...known] + +for (const test of runQueue) { + console.log("") + Console.write(`Running ${test.nodeid}...`) + const { code, durationSeconds, wallDurationSeconds, status } = await runSingleTest(pythonCmd, test.nodeid) + const durationSecondsFixed = Number(durationSeconds.toFixed(3)) + const wallDurationSecondsFixed = Number(wallDurationSeconds.toFixed(3)) + cache.tests[test.nodeid] = { + duration_seconds: durationSecondsFixed, + wall_duration_seconds: wallDurationSecondsFixed, + status, + last_run: new Date().toISOString(), + last_success_duration: + status === "pass" ? durationSecondsFixed : cache.tests[test.nodeid]?.last_success_duration, + last_success_wall_duration: + status === "pass" ? wallDurationSecondsFixed : cache.tests[test.nodeid]?.last_success_wall_duration, + markers: test.markers, + } + await saveCache(cachePath, cache) + Console.write(` [${cyan(durationSecondsFixed)}s `) + if (status === "pass") { + Console.write(` ${green("✓")} ]`) + } else if (status === "fail") { + Console.write(` ${red("✗")} ]`) + } else if (status === "skip") { + Console.write(` ${yellow("-")} ]`) + } +} + +const failures = Object.entries(cache.tests).filter(([, entry]) => entry.status === "fail").length +console.log("") +if (failures > 0) { + console.log(`Completed with ${failures} failing test(s). ${red("✗")}`) + Deno.exit(1) +} else { + console.log(`Completed with 0 failing tests! ${green("✓")}`) +} diff --git a/dimos/hardware/sensors/camera/test_webcam.py b/dimos/hardware/sensors/camera/test_webcam.py index e40a73acc9..1db173988c 100644 --- a/dimos/hardware/sensors/camera/test_webcam.py +++ b/dimos/hardware/sensors/camera/test_webcam.py @@ -32,6 +32,7 @@ def dimos(): @pytest.mark.tool +@pytest.mark.neverending def test_streaming_single(dimos) -> None: camera = dimos.deploy( CameraModule, diff --git a/pyproject.toml b/pyproject.toml index e6b542a65e..d4cf843512 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -42,6 +42,7 @@ dependencies = [ "pydantic", "python-dotenv", "annotation-protocol>=1.4.0", + "lazy_loader", # Multiprocess "dask[complete]==2025.5.1", @@ -51,7 +52,9 @@ dependencies = [ "colorlog==6.9.0", # Core Msgs "opencv-python", - "open3d", + # open3d-unofficial-arm works on non-arm as well (falls back to open3d) + "open3d-unofficial-arm; platform_machine == 'aarch64' and platform_system == 'Linux'", + "open3d>=0.18.0; platform_machine != 'aarch64' or platform_system != 'Linux'", # CLI "pydantic-settings>=2.11.0,<3", "textual==3.7.1", @@ -125,9 +128,9 @@ visualization = [ ] agents = [ - "langchain>=1,<1.2.4", + "langchain==1.2.3", "langchain-chroma>=1,<2", - "langchain-core>=1,<2", + "langchain-core==1.2.3", "langchain-openai>=1,<2", "langchain-text-splitters>=1,<2", "langchain-huggingface>=1,<2", @@ -206,11 +209,23 @@ cpu = [ cuda = [ "cupy-cuda12x==13.6.0", "nvidia-nvimgcodec-cu12[all]", - "onnxruntime-gpu>=1.17.1", # Only versions supporting both cuda11 and cuda12 + # Exclude on aarch64 Linux where jetson-jp6-cuda126 extra provides Jetson-specific wheels + "onnxruntime-gpu>=1.17.1; platform_machine != 'aarch64' or sys_platform != 'linux'", "ctransformers[cuda]==0.2.27", "mmengine>=0.10.3", "mmcv>=2.1.0", - "xformers>=0.0.20", + "xformers>=0.0.20; platform_machine != 'aarch64' or sys_platform != 'linux'", + + # Detic GPU stack + "mss", + "dataclasses", + "ftfy", + "regex", + "fasttext", + "lvis", + "nltk", + "clip", + "detectron2", ] dev = [ @@ -418,3 +433,13 @@ default-groups = [] [tool.uv.sources] contact-graspnet-pytorch = { git = "https://github.com/dimensionalOS/contact_graspnet_pytorch.git" } +detectron2 = { git = "https://github.com/facebookresearch/detectron2.git", tag = "v0.6" } +torch = { index = "jetson-jp6-cuda126", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" } +torchvision = { index = "jetson-jp6-cuda126", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" } +onnxruntime-gpu = { index = "jetson-jp6-cuda126", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" } +xformers = { index = "jetson-jp6-cuda126", marker = "platform_machine == 'aarch64' and sys_platform == 'linux'" } + +[[tool.uv.index]] +name = "jetson-jp6-cuda126" +url = "https://pypi.jetson-ai-lab.io/jp6/cu126" +explicit = true diff --git a/uv.lock b/uv.lock index 75f839303f..1820fd013f 100644 --- a/uv.lock +++ b/uv.lock @@ -2020,9 +2020,9 @@ requires-dist = [ { name = "hydra-core", marker = "extra == 'perception'", specifier = ">=1.3.0" }, { name = "ipykernel", marker = "extra == 'misc'" }, { name = "kaleido", marker = "extra == 'manipulation'", specifier = ">=0.2.1" }, - { name = "langchain", marker = "extra == 'agents'", specifier = ">=1,<1.2.4" }, + { name = "langchain", marker = "extra == 'agents'", specifier = "==1.2.3" }, { name = "langchain-chroma", marker = "extra == 'agents'", specifier = ">=1,<2" }, - { name = "langchain-core", marker = "extra == 'agents'", specifier = ">=1,<2" }, + { name = "langchain-core", marker = "extra == 'agents'", specifier = "==1.2.3" }, { name = "langchain-huggingface", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "langchain-ollama", marker = "extra == 'agents'", specifier = ">=1,<2" }, { name = "langchain-openai", marker = "extra == 'agents'", specifier = ">=1,<2" }, @@ -3982,7 +3982,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "1.2.6" +version = "1.2.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpatch" }, @@ -3994,9 +3994,9 @@ dependencies = [ { name = "typing-extensions" }, { name = "uuid-utils" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b9/ce/ba5ed5ea6df22965b2893c2ed28ebb456204962723d408904c4acfa5e942/langchain_core-1.2.6.tar.gz", hash = "sha256:b4e7841dd7f8690375aa07c54739178dc2c635147d475e0c2955bf82a1afa498", size = 833343, upload-time = "2026-01-02T21:35:44.749Z" } +sdist = { url = "https://files.pythonhosted.org/packages/70/ea/8380184b287da43d3d2556475b985cf3e27569e9d8bbe33195600a98cabb/langchain_core-1.2.3.tar.gz", hash = "sha256:61f5197aa101cd5605879ef37f2b0ac56c079974d94d347849b8d4fe18949746", size = 803567, upload-time = "2025-12-18T20:13:10.574Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6f/40/0655892c245d8fbe6bca6d673ab5927e5c3ab7be143de40b52289a0663bc/langchain_core-1.2.6-py3-none-any.whl", hash = "sha256:aa6ed954b4b1f4504937fe75fdf674317027e9a91ba7a97558b0de3dc8004e34", size = 489096, upload-time = "2026-01-02T21:35:43.391Z" }, + { url = "https://files.pythonhosted.org/packages/f0/57/cfc1d12e273d33d16bab7ce9a135244e6f5677a92a5a99e69a61b22b7d93/langchain_core-1.2.3-py3-none-any.whl", hash = "sha256:c3501cf0219daf67a0ae23f6d6bdf3b41ab695efd8f0f3070a566e368b8c3dc7", size = 476384, upload-time = "2025-12-18T20:13:08.998Z" }, ] [[package]] @@ -4028,16 +4028,16 @@ wheels = [ [[package]] name = "langchain-openai" -version = "1.1.7" +version = "1.1.6" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "openai" }, { name = "tiktoken" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/38/b7/30bfc4d1b658a9ee524bcce3b0b2ec9c45a11c853a13c4f0c9da9882784b/langchain_openai-1.1.7.tar.gz", hash = "sha256:f5ec31961ed24777548b63a5fe313548bc6e0eb9730d6552b8c6418765254c81", size = 1039134, upload-time = "2026-01-07T19:44:59.728Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/67/228dc28b4498ea16422577013b5bb4ba35a1b99f8be975d6747c7a9f7e6a/langchain_openai-1.1.6.tar.gz", hash = "sha256:e306612654330ae36fb6bbe36db91c98534312afade19e140c3061fe4208dac8", size = 1038310, upload-time = "2025-12-18T17:58:52.84Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/64/a1/50e7596aca775d8c3883eceeaf47489fac26c57c1abe243c00174f715a8a/langchain_openai-1.1.7-py3-none-any.whl", hash = "sha256:34e9cd686aac1a120d6472804422792bf8080a2103b5d21ee450c9e42d053815", size = 84753, upload-time = "2026-01-07T19:44:58.629Z" }, + { url = "https://files.pythonhosted.org/packages/db/5b/1f6521df83c1a8e8d3f52351883b59683e179c0aa1bec75d0a77a394c9e7/langchain_openai-1.1.6-py3-none-any.whl", hash = "sha256:c42d04a67a85cee1d994afe400800d2b09ebf714721345f0b651eb06a02c3948", size = 84701, upload-time = "2025-12-18T17:58:51.527Z" }, ] [[package]]