Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
41 commits
Select commit Hold shift + click to select a range
6cd48bf
json cleanup, tests reformat
leshy May 27, 2025
ba8eab1
check gitconfig before test
leshy May 28, 2025
1cd5915
set-safe-directory on the github action level
leshy May 28, 2025
932d720
workflow fix
leshy May 28, 2025
d3c2ac7
improved tests, verifying workflow config
leshy May 28, 2025
6fa0928
workflow config fix
leshy May 28, 2025
f7e9096
brute forcing config
leshy May 28, 2025
8fbec9a
another fix attempt
leshy May 28, 2025
e0d4e51
dif hashes need to be sorted
leshy May 28, 2025
b418179
testing in-ci pre-commit run
leshy May 28, 2025
01071ce
added pre-commit-config
leshy May 28, 2025
1d51629
forcing CI code cleanup
leshy May 28, 2025
b8f6d92
double pre-commit
leshy May 28, 2025
c24d11b
permissions added for auto-commits
leshy May 28, 2025
f35ed84
CI code cleanup
leshy May 28, 2025
1efa924
attempting to trigger docker builds after pre-commit
leshy May 28, 2025
bf23841
Merge branch 'env/cleanup' of github.com:dimensionalOS/dimos into env…
leshy May 28, 2025
437a7c6
workflow typo
leshy May 28, 2025
f8c1bae
workflow pytest reference fix
leshy May 28, 2025
b850fba
code cleanup needs permissions to call docker build
leshy May 28, 2025
759fe4f
pre-commit hooks in dev container
leshy May 28, 2025
95854ae
Auto-compress test data: test_file.txt
leshy May 28, 2025
ef8fe73
Auto-compress test data: test_file.txt
leshy May 28, 2025
9224f74
lfs hook fixes
leshy May 28, 2025
97319cf
lfs hook fixes 2
leshy May 28, 2025
20866bd
triggering rebuild 2
leshy May 28, 2025
5d6a960
final cleanup of the lfs script
leshy May 28, 2025
1b4f155
removed temp test files
leshy May 28, 2025
226eaaf
CI code cleanup
leshy May 28, 2025
64824ac
cleanup
leshy May 28, 2025
adfaed3
CI code cleanup
leshy May 28, 2025
ec8e3ea
pre-commit doesn't push to LFS, it just checks
leshy May 28, 2025
4a7aed7
null glob fix
leshy May 28, 2025
bf88749
slightly nicer lfs check output
leshy May 28, 2025
2d8049c
small workflow naming fixes
leshy May 28, 2025
2de8581
better lfs_check output
leshy May 28, 2025
022a5bf
renaming actions for better UI view
leshy May 28, 2025
0592c67
even shorter naming
leshy May 28, 2025
adeb3f7
checking explicit action naming
leshy May 28, 2025
1bcb9bd
decoupling workflows
leshy May 28, 2025
0c23a15
re-coupling workflows
leshy May 28, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .devcontainer/devcontainer.json
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
"containerEnv": {
"PYTHONPATH": "${localEnv:PYTHONPATH}:/workspaces/dimos"
},
"postCreateCommand": "git config --global --add safe.directory /workspaces/dimos",
"postCreateCommand": "git config --global --add safe.directory /workspaces/dimos && cd /workspaces/dimos && pre-commit install",
"settings": {
"notebook.formatOnSave.enabled": true,
"notebook.codeActionsOnSave": {
Expand Down
33 changes: 33 additions & 0 deletions .github/workflows/cleanup.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
name: cleanup
on: push

permissions:
contents: write
packages: write
pull-requests: read

jobs:
pre-commit:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v3
- name: Run pre-commit
id: pre-commit-first
uses: pre-commit/action@v3.0.1
continue-on-error: true

- name: Re-run pre-commit if failed initially
id: pre-commit-retry
if: steps.pre-commit-first.outcome == 'failure'
uses: pre-commit/action@v3.0.1
continue-on-error: false

- name: Commit code changes
uses: stefanzweifel/git-auto-commit-action@v5
with:
commit_message: "CI code cleanup"

docker:
needs: [pre-commit]
uses: ./.github/workflows/docker.yml
34 changes: 17 additions & 17 deletions .github/workflows/docker.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name: docker-tree
on: push
name: docker
on: workflow_call

permissions:
contents: read
Expand Down Expand Up @@ -56,7 +56,7 @@ jobs:
echo "branch tag determined: ${branch_tag}"
echo branch_tag="${branch_tag}" >> "$GITHUB_OUTPUT"

build-ros:
ros:
needs: [check-changes]
if: needs.check-changes.outputs.ros == 'true'
uses: ./.github/workflows/_docker-build-template.yml
Expand All @@ -66,21 +66,21 @@ jobs:

# just a debugger
inspect-needs:
needs: [check-changes, build-ros]
needs: [check-changes, ros]
runs-on: dimos-runner-ubuntu-2204
if: always()
steps:
- run: |
echo '${{ toJSON(needs) }}'

build-python:
needs: [check-changes, build-ros]
python:
needs: [check-changes, ros]
if: |
${{
always() && !cancelled() &&
needs.check-changes.result == 'success' &&
((needs.build-ros.result == 'success') ||
(needs.build-ros.result == 'skipped' &&
((needs.ros.result == 'success') ||
(needs.ros.result == 'skipped' &&
needs.check-changes.outputs.python == 'true'))
}}
uses: ./.github/workflows/_docker-build-template.yml
Expand All @@ -89,14 +89,14 @@ jobs:
target: base-ros-python
freespace: true

build-dev:
needs: [check-changes, build-python]
dev:
needs: [check-changes, python]
if: |
${{
always() && !cancelled() &&
needs.check-changes.result == 'success' &&
((needs.build-python.result == 'success') ||
(needs.build-python.result == 'skipped' &&
((needs.python.result == 'success') ||
(needs.python.result == 'skipped' &&
needs.check-changes.outputs.dev == 'true'))
}}
uses: ./.github/workflows/_docker-build-template.yml
Expand All @@ -105,15 +105,15 @@ jobs:
target: dev

run-tests:
needs: [check-changes, build-dev]
needs: [check-changes, dev]
if: |
${{
always() && !cancelled() &&
needs.check-changes.result == 'success' &&
((needs.build-dev.result == 'success') ||
(needs.build-dev.result == 'skipped' &&
((needs.dev.result == 'success') ||
(needs.dev.result == 'skipped' &&
needs.check-changes.outputs.tests == 'true'))
}}
uses: ./.github/workflows/pytest.yml
uses: ./.github/workflows/tests.yml
with:
branch-tag: ${{ needs.build-dev.result != 'success' && 'dev' || needs.check-changes.outputs.branch-tag }}
branch-tag: ${{ needs.dev.result != 'success' && 'dev' || needs.check-changes.outputs.branch-tag }}
4 changes: 3 additions & 1 deletion .github/workflows/pytest.yml → .github/workflows/tests.yml
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
name: testing
name: tests

on:
workflow_call:
Expand All @@ -21,6 +21,8 @@ jobs:

steps:
- uses: actions/checkout@v4

- name: Run tests
run: |
git config --global --add safe.directory '*'
/entrypoint.sh bash -c "pytest"
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -22,5 +22,7 @@ assets/agent/memory.txt
tests/data/*
!tests/data/.lfs/

# node modules (for dev tooling)
# node env (used by devcontainers cli)
node_modules
package.json
package-lock.json
33 changes: 33 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
default_stages: [pre-commit]
exclude: (dimos/models/.*)|(deprecated)
repos:
- repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.11.11
hooks:
#- id: ruff-check
# args: [--fix]
- id: ruff-format
stages: [pre-commit]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.6.0
hooks:
- id: check-case-conflict
- id: trailing-whitespace
language: python
types: [text]
stages: [pre-push]
- id: check-json
- id: check-toml
- id: check-yaml
- id: pretty-format-json
name: format json
args: [ --autofix, --no-sort-keys ]

- repo: local
hooks:
- id: lfs_check
name: LFS data
always_run: true
pass_filenames: false
entry: bin/lfs_check
language: script
42 changes: 42 additions & 0 deletions bin/lfs_check
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
#!/bin/bash

RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'

ROOT=$(git rev-parse --show-toplevel)
cd $ROOT

new_data=()

# Enable nullglob to make globs expand to nothing when not matching
shopt -s nullglob

# Iterate through all directories in tests/data
for dir_path in tests/data/*; do

# Extract directory name
dir_name=$(basename "$dir_path")

# Skip .lfs directory if it exists
[ "$dir_name" = ".lfs" ] && continue

# Define compressed file path
compressed_file="tests/data/.lfs/${dir_name}.tar.gz"

# Check if compressed file already exists
if [ -f "$compressed_file" ]; then
continue
fi

new_data+=("$dir_name")
done

if [ ${#new_data[@]} -gt 0 ]; then
echo -e "${RED}✗${NC} New test data detected at /tests/data:"
echo -e " ${GREEN}${new_data[@]}${NC}"
echo -e "\nEither delete or run ${GREEN}./bin/lfs_push${NC}"
echo -e "(lfs_push will compress the files into /tests/data/.lfs/, upload to LFS, and add them to your commit)"
exit 1
fi
98 changes: 98 additions & 0 deletions bin/lfs_push
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
#!/bin/bash
# Compresses directories in tests/data/* into tests/data/.lfs/dirname.tar.gz
# Pushes to LFS

set -e

# Colors for output
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m' # No Color

#echo -e "${GREEN}Running test data compression check...${NC}"

ROOT=$(git rev-parse --show-toplevel)
cd $ROOT

# Check if tests/data exists
if [ ! -d "tests/data" ]; then
echo -e "${YELLOW}No tests/data directory found, skipping compression.${NC}"
exit 0
fi

# Track if any compression was performed
compressed_dirs=()

# Iterate through all directories in tests/data
for dir_path in tests/data/*; do
# Skip if no directories found (glob didn't match)
[ ! "$dir_path" ] && continue

# Extract directory name
dir_name=$(basename "$dir_path")

# Skip .lfs directory if it exists
[ "$dir_name" = ".lfs" ] && continue

# Define compressed file path
compressed_file="tests/data/.lfs/${dir_name}.tar.gz"

# Check if compressed file already exists
if [ -f "$compressed_file" ]; then
continue
fi

echo -e " ${YELLOW}Compressing${NC} $dir_path -> $compressed_file"

# Show directory size before compression
dir_size=$(du -sh "$dir_path" | cut -f1)
echo -e " Data size: ${YELLOW}$dir_size${NC}"

# Create compressed archive with progress bar
# Use tar with gzip compression, excluding hidden files and common temp files
tar -czf "$compressed_file" \
--exclude='*.tmp' \
--exclude='*.temp' \
--exclude='.DS_Store' \
--exclude='Thumbs.db' \
--checkpoint=1000 \
--checkpoint-action=dot \
-C "tests/data" \
"$dir_name"

if [ $? -eq 0 ]; then
# Show compressed file size
compressed_size=$(du -sh "$compressed_file" | cut -f1)
echo -e " ${GREEN}✓${NC} Successfully compressed $dir_name (${GREEN}$dir_size${NC} → ${GREEN}$compressed_size${NC})"
compressed_dirs+=("$dir_name")

# Add the compressed file to git LFS tracking
git add "$compressed_file"

echo -e " ${GREEN}✓${NC} git-add $compressed_file"

else
echo -e " ${RED}✗${NC} Failed to compress $dir_name"
exit 1
fi
done

if [ ${#compressed_dirs[@]} -gt 0 ]; then
# Create commit message with compressed directory names
if [ ${#compressed_dirs[@]} -eq 1 ]; then
commit_msg="Auto-compress test data: ${compressed_dirs[0]}"
else
# Join array elements with commas
dirs_list=$(IFS=', '; echo "${compressed_dirs[*]}")
commit_msg="Auto-compress test data: ${dirs_list}"
fi

#git commit -m "$commit_msg"
echo -e "${GREEN}✓${NC} Compressed file references added. Uploading..."
git lfs push origin $(git branch --show-current)
echo -e "${GREEN}✓${NC} Uploaded to LFS"
else
echo -e "${GREEN}✓${NC} No test data to compress"
fi

31 changes: 25 additions & 6 deletions dimos/utils/test_testing.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,12 +12,15 @@ def test_pull_file():

# delete decompressed test file if it exists
if test_file_decompressed.exists():
test_file_compressed.unlink()
test_file_decompressed.unlink()

# delete lfs archive file if it exists
if test_file_compressed.exists():
test_file_compressed.unlink()

assert not test_file_compressed.exists()
assert not test_file_decompressed.exists()

# pull the lfs file reference from git
env = os.environ.copy()
env["GIT_LFS_SKIP_SMUDGE"] = "1"
Expand All @@ -31,7 +34,7 @@ def test_pull_file():

# ensure we have a pointer file from git (small ASCII text file)
assert test_file_compressed.exists()
test_file_compressed.stat().st_size < 200
assert test_file_compressed.stat().st_size < 200

# trigger a data file pull
assert testing.testData(test_file_name) == test_file_decompressed
Expand All @@ -42,12 +45,18 @@ def test_pull_file():

# validate hashes
with test_file_compressed.open("rb") as f:
assert test_file_compressed.stat().st_size > 200
compressed_sha256 = hashlib.sha256(f.read()).hexdigest()
assert compressed_sha256 == "cdfd708d66e6dd5072ed7636fc10fb97754f8d14e3acd6c3553663e27fc96065"
assert (
compressed_sha256 == "b8cf30439b41033ccb04b09b9fc8388d18fb544d55b85c155dbf85700b9e7603"
)

with test_file_decompressed.open("rb") as f:
decompressed_sha256 = hashlib.sha256(f.read()).hexdigest()
assert decompressed_sha256 == "55d451dde49b05e3ad386fdd4ae9e9378884b8905bff1ca8aaea7d039ff42ddd"
assert (
decompressed_sha256
== "55d451dde49b05e3ad386fdd4ae9e9378884b8905bff1ca8aaea7d039ff42ddd"
)


def test_pull_dir():
Expand Down Expand Up @@ -79,13 +88,23 @@ def test_pull_dir():

# ensure we have a pointer file from git (small ASCII text file)
assert test_dir_compressed.exists()
test_dir_compressed.stat().st_size < 200
assert test_dir_compressed.stat().st_size < 200

# trigger a data file pull
assert testing.testData(test_dir_name) == test_dir_decompressed
assert test_dir_compressed.stat().st_size > 200

# validate data is received
assert test_dir_compressed.exists()
assert test_dir_decompressed.exists()

assert len(list(test_dir_decompressed.iterdir())) == 2
for [file, expected_hash] in zip(
sorted(test_dir_decompressed.iterdir()),
[
"6c3aaa9a79853ea4a7453c7db22820980ceb55035777f7460d05a0fa77b3b1b3",
"456cc2c23f4ffa713b4e0c0d97143c27e48bbe6ef44341197b31ce84b3650e74",
],
):
with file.open("rb") as f:
sha256 = hashlib.sha256(f.read()).hexdigest()
assert sha256 == expected_hash
Loading