Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
35 changes: 35 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,35 @@
name: CI

on:
push:
branches: [ main ]
pull_request:

jobs:
tests:
runs-on: ubuntu-latest

steps:
- name: Checkout repository
uses: actions/checkout@v3

- name: Set up Python
uses: actions/setup-python@v4
with:
python-version: '3.10'

- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install torch --index-url https://download.pytorch.org/whl/cpu
pip install numpy pandas scikit-learn
pip install -r requirements-dev.txt
Comment on lines +21 to +26
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

P1 Badge Install runtime requirements before running tests

The workflow only installs torch, numpy, pandas, scikit‑learn and the dev tools, but the added tests import modules such as Architecture/partial-rope-full-rope.py, which executes from transformers import AutoTokenizer and other dependencies that live in requirements.txt. Because those packages aren’t installed here, pytest will raise ModuleNotFoundError before any test runs, causing the CI job to fail deterministically. Install the regular project requirements (or otherwise skip the tests when optional dependencies are missing) before executing linting and pytest.

Useful? React with 👍 / 👎.


- name: Ruff (tests only)
run: ruff check tests

- name: Black (tests only)
run: black --check tests

- name: Run tests
run: pytest
18 changes: 18 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -245,3 +245,21 @@ Repository, çeşitli transformer varyantları için performans karşılaştırm
- Model optimizasyon teknikleri analizi

Detaylı sonuçlar için `Architecture/` dizinini ve generate edilen PNG dosyalarını kontrol edin.

## ✅ Test ve Kod Kalitesi

Hafif testleri ve kod kalite kontrollerini çalıştırmak için isteğe bağlı geliştirme bağımlılıklarını yükleyin:

```bash
pip install -r requirements-dev.txt
```

Ardından aşağıdaki komutları çalıştırabilirsiniz:

```bash
pytest
ruff check tests
black --check tests
```

Sürekli entegrasyon iş akışı bu kontrolleri otomatik olarak yürütür.
2 changes: 2 additions & 0 deletions pytest.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
[pytest]
testpaths = tests
9 changes: 4 additions & 5 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
# Development and Training Utilities
jupyter>=1.0.0
notebook>=6.5.0
fairscale>=0.4.0
deepspeed>=0.9.0
# Optional development dependencies
pytest>=7.4.0
black>=23.9.0
ruff>=0.1.9
53 changes: 53 additions & 0 deletions tests/test_rope_modules.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
import importlib.util
from pathlib import Path

import pytest

torch = pytest.importorskip("torch")


def load_rope_module(unique_name: str):
base_dir = Path(__file__).resolve().parents[1]
module_path = base_dir / "Architecture" / "partial-rope-full-rope.py"
spec = importlib.util.spec_from_file_location(unique_name, module_path)
module = importlib.util.module_from_spec(spec)
assert spec.loader is not None
spec.loader.exec_module(module)
return module


def test_partial_rope_preserves_non_rotary_dimensions():
module = load_rope_module("rope_partial_module")
dim = 12
partial_factor = 0.5
rope = module.PartialRoPE(dim=dim, partial_rotary_factor=partial_factor)

torch.manual_seed(0)
q = torch.randn(1, 2, 3, dim)
k = torch.randn(1, 2, 3, dim)

q_embed, k_embed = rope(q.clone(), k.clone())

assert q_embed.shape == q.shape
assert k_embed.shape == k.shape

rotary_dim = int(dim * partial_factor)
assert torch.allclose(q_embed[..., rotary_dim:], q[..., rotary_dim:])
assert torch.allclose(k_embed[..., rotary_dim:], k[..., rotary_dim:])


def test_attention_with_rope_output_shape():
module = load_rope_module("rope_attention_module")
dim = 16
num_heads = 4
head_dim = dim // num_heads
rope = module.FullRoPE(dim=head_dim)
attention = module.AttentionWithRoPE(dim=dim, num_heads=num_heads, rope_module=rope)

torch.manual_seed(1)
dummy_input = torch.randn(2, 5, dim)

output = attention(dummy_input)

assert output.shape == dummy_input.shape
assert torch.isfinite(output).all()
66 changes: 66 additions & 0 deletions tests/test_time_series_transformer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,66 @@
import importlib.util
import sys
from pathlib import Path

import numpy as np
import pandas as pd
import pytest

torch = pytest.importorskip("torch")


def load_time_series_module(unique_name: str):
base_dir = Path(__file__).resolve().parents[1]
module_path = base_dir / "Time series - Transformers" / "train.py"
spec = importlib.util.spec_from_file_location(unique_name, module_path)
module = importlib.util.module_from_spec(spec)
assert spec.loader is not None
spec.loader.exec_module(module)
return module


def test_time_series_transformer_forward_output_shape():
module = load_time_series_module("time_series_train_forward")
model = module.TimeSeriesTransformer(
input_dim=3, d_model=12, n_heads=3, num_layers=1
)

dummy_batch = torch.randn(2, 5, 3)
output = model(dummy_batch)

assert output.shape == (2, 1)
assert torch.isfinite(output).all()


def test_train_creates_model_artifact(tmp_path, monkeypatch):
module = load_time_series_module("time_series_train_exec")

data = pd.DataFrame({"value": np.linspace(0, 1, 40)})
data_path = tmp_path / "synthetic.csv"
data.to_csv(data_path, index=False)

model_path = tmp_path / "model.pth"

monkeypatch.setattr(
sys,
"argv",
[
"train.py",
"--data",
str(data_path),
"--seq_length",
"5",
"--batch_size",
"4",
"--epochs",
"1",
"--lr",
"0.01",
"--model_path",
str(model_path),
],
)

module.train()

assert model_path.exists()
Loading