diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..a8e733f --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,35 @@ +name: CI + +on: + push: + branches: [ main ] + pull_request: + +jobs: + tests: + runs-on: ubuntu-latest + + steps: + - name: Checkout repository + uses: actions/checkout@v3 + + - name: Set up Python + uses: actions/setup-python@v4 + with: + python-version: '3.10' + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install torch --index-url https://download.pytorch.org/whl/cpu + pip install numpy pandas scikit-learn + pip install -r requirements-dev.txt + + - name: Ruff (tests only) + run: ruff check tests + + - name: Black (tests only) + run: black --check tests + + - name: Run tests + run: pytest diff --git a/README.md b/README.md index 0ac9b0d..1c85829 100644 --- a/README.md +++ b/README.md @@ -245,3 +245,21 @@ Repository, çeşitli transformer varyantları için performans karşılaştırm - Model optimizasyon teknikleri analizi Detaylı sonuçlar için `Architecture/` dizinini ve generate edilen PNG dosyalarını kontrol edin. + +## ✅ Test ve Kod Kalitesi + +Hafif testleri ve kod kalite kontrollerini çalıştırmak için isteğe bağlı geliştirme bağımlılıklarını yükleyin: + +```bash +pip install -r requirements-dev.txt +``` + +Ardından aşağıdaki komutları çalıştırabilirsiniz: + +```bash +pytest +ruff check tests +black --check tests +``` + +Sürekli entegrasyon iş akışı bu kontrolleri otomatik olarak yürütür. diff --git a/pytest.ini b/pytest.ini new file mode 100644 index 0000000..5ee6477 --- /dev/null +++ b/pytest.ini @@ -0,0 +1,2 @@ +[pytest] +testpaths = tests diff --git a/requirements-dev.txt b/requirements-dev.txt index 5a5074e..8f19fe0 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,5 +1,4 @@ -# Development and Training Utilities -jupyter>=1.0.0 -notebook>=6.5.0 -fairscale>=0.4.0 -deepspeed>=0.9.0 +# Optional development dependencies +pytest>=7.4.0 +black>=23.9.0 +ruff>=0.1.9 diff --git a/tests/test_rope_modules.py b/tests/test_rope_modules.py new file mode 100644 index 0000000..78ca9be --- /dev/null +++ b/tests/test_rope_modules.py @@ -0,0 +1,53 @@ +import importlib.util +from pathlib import Path + +import pytest + +torch = pytest.importorskip("torch") + + +def load_rope_module(unique_name: str): + base_dir = Path(__file__).resolve().parents[1] + module_path = base_dir / "Architecture" / "partial-rope-full-rope.py" + spec = importlib.util.spec_from_file_location(unique_name, module_path) + module = importlib.util.module_from_spec(spec) + assert spec.loader is not None + spec.loader.exec_module(module) + return module + + +def test_partial_rope_preserves_non_rotary_dimensions(): + module = load_rope_module("rope_partial_module") + dim = 12 + partial_factor = 0.5 + rope = module.PartialRoPE(dim=dim, partial_rotary_factor=partial_factor) + + torch.manual_seed(0) + q = torch.randn(1, 2, 3, dim) + k = torch.randn(1, 2, 3, dim) + + q_embed, k_embed = rope(q.clone(), k.clone()) + + assert q_embed.shape == q.shape + assert k_embed.shape == k.shape + + rotary_dim = int(dim * partial_factor) + assert torch.allclose(q_embed[..., rotary_dim:], q[..., rotary_dim:]) + assert torch.allclose(k_embed[..., rotary_dim:], k[..., rotary_dim:]) + + +def test_attention_with_rope_output_shape(): + module = load_rope_module("rope_attention_module") + dim = 16 + num_heads = 4 + head_dim = dim // num_heads + rope = module.FullRoPE(dim=head_dim) + attention = module.AttentionWithRoPE(dim=dim, num_heads=num_heads, rope_module=rope) + + torch.manual_seed(1) + dummy_input = torch.randn(2, 5, dim) + + output = attention(dummy_input) + + assert output.shape == dummy_input.shape + assert torch.isfinite(output).all() diff --git a/tests/test_time_series_transformer.py b/tests/test_time_series_transformer.py new file mode 100644 index 0000000..bc170c6 --- /dev/null +++ b/tests/test_time_series_transformer.py @@ -0,0 +1,66 @@ +import importlib.util +import sys +from pathlib import Path + +import numpy as np +import pandas as pd +import pytest + +torch = pytest.importorskip("torch") + + +def load_time_series_module(unique_name: str): + base_dir = Path(__file__).resolve().parents[1] + module_path = base_dir / "Time series - Transformers" / "train.py" + spec = importlib.util.spec_from_file_location(unique_name, module_path) + module = importlib.util.module_from_spec(spec) + assert spec.loader is not None + spec.loader.exec_module(module) + return module + + +def test_time_series_transformer_forward_output_shape(): + module = load_time_series_module("time_series_train_forward") + model = module.TimeSeriesTransformer( + input_dim=3, d_model=12, n_heads=3, num_layers=1 + ) + + dummy_batch = torch.randn(2, 5, 3) + output = model(dummy_batch) + + assert output.shape == (2, 1) + assert torch.isfinite(output).all() + + +def test_train_creates_model_artifact(tmp_path, monkeypatch): + module = load_time_series_module("time_series_train_exec") + + data = pd.DataFrame({"value": np.linspace(0, 1, 40)}) + data_path = tmp_path / "synthetic.csv" + data.to_csv(data_path, index=False) + + model_path = tmp_path / "model.pth" + + monkeypatch.setattr( + sys, + "argv", + [ + "train.py", + "--data", + str(data_path), + "--seq_length", + "5", + "--batch_size", + "4", + "--epochs", + "1", + "--lr", + "0.01", + "--model_path", + str(model_path), + ], + ) + + module.train() + + assert model_path.exists()