Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
264 changes: 264 additions & 0 deletions backend/app/crud/isl_verse_markers_crud.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,264 @@
"""CRUD operations for the ISL verse markers API."""
from typing import List, Dict, Any
from sqlalchemy.orm import Session
from sqlalchemy.exc import SQLAlchemyError
import db_models
from custom_exceptions import NotAvailableException, AlreadyExistsException,UnprocessableException
from dependencies import logger


def _ensure_verse_zero(markers):
if not any(str(m["verse"]) == "0" for m in markers):
markers.insert(0, {"verse": 0, "time": "00:00:00:00"})
return markers


def update_verse_markers(
db_session: Session,
isl_bible_id: int,
markers: List[Dict[str, Any]],
):
"""Updates verse markers for the given ISL Bible ID."""
logger.info("Updating ISL verse markers")

isl_bible_rec = db_session.query(db_models.IslVideo).filter_by(
id=isl_bible_id
).first()

if not isl_bible_rec:
logger.error("ISL Bible %s not found",isl_bible_id)
raise NotAvailableException(
detail=f"ISL Bible {isl_bible_id} not found"
)

record = db_session.query(db_models.IslVerseMarkers).filter_by(
isl_video_id=isl_bible_id
).first()

if not record:
logger.error("Verse markers not found for ISL Bible %s", isl_bible_id)
raise NotAvailableException(
detail=f"Verse markers not found for ISL Bible {isl_bible_id}"
)

markers = _ensure_verse_zero(markers)
record.verse_markers_json = markers
Comment on lines +44 to +45
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggestion: The update path writes marker data without validating verse values against clean_bible, unlike the create path. This allows updates to persist non-existent verses/ranges for the book/chapter, causing inconsistent data and downstream lookup failures. Run the same verse validation used in bulk create before saving. [incomplete implementation]

Severity Level: Major ⚠️
- ❌ ISL verse marker updates can persist non-existent verse IDs.
- ⚠️ Bulk create enforces verse validity; updates bypass it.
- ⚠️ Downstream consumers may misinterpret or skip invalid markers.
Steps of Reproduction ✅
1. Open `backend/app/crud/isl_verse_markers_crud.py` and locate `update_verse_markers` at
lines 16–50; note that it only calls `_ensure_verse_zero(markers)` at line 44 and then
assigns `record.verse_markers_json = markers` at line 45 before committing at line 47.

2. In a Python shell or test, create an `IslVideo` row and corresponding `IslVerseMarkers`
row for `isl_bible_id=X` using the same SQLAlchemy models imported at lines 5 and 24–26,
and ensure that `db_models.CleanBible` contains verses only up to a certain maximum (e.g.,
1–10) for that `book_id`/`chapter`.

3. Still in the shell/test, call `update_verse_markers(db_session, isl_bible_id=X,
markers=[{"verse": 999, "time": "00:00:10:00"}])`, where verse `999` does not exist in
`db_models.CleanBible` for that book/chapter; observe that `update_verse_markers` runs
without calling `_validate_marker_verses` (defined at lines 120–158 and used only in
`add_verse_markers_bulk` at line 248).

4. Query `db_models.IslVerseMarkers` for `isl_video_id=X` after the update and see that
`verse_markers_json` now contains verse `999`, demonstrating that the update path
persisted an invalid verse that the bulk-create path (`add_verse_markers_bulk` at lines
174–264) would have rejected via `_validate_marker_verses`.

Fix in Cursor | Fix in VSCode Claude

(Use Cmd/Ctrl + Click for best experience)

Prompt for AI Agent 🤖
This is a comment left during a code review.

**Path:** backend/app/crud/isl_verse_markers_crud.py
**Line:** 44:45
**Comment:**
	*Incomplete Implementation: The update path writes marker data without validating verse values against `clean_bible`, unlike the create path. This allows updates to persist non-existent verses/ranges for the book/chapter, causing inconsistent data and downstream lookup failures. Run the same verse validation used in bulk create before saving.

Validate the correctness of the flagged issue. If correct, How can I resolve this? If you propose a fix, implement it and please make it concise.
Once fix is implemented, also check other comments on the same PR, and ask user if the user wants to fix the rest of the comments as well. if said yes, then fetch all the comments validate the correctness and implement a minimal fix
👍 | 👎


db_session.commit()
db_session.refresh(record)

return record

def get_verse_markers(
db_session: Session,
isl_bible_id: int,
):
"""Retrieves verse markers for the given islbible id"""
record = db_session.query(db_models.IslVerseMarkers).filter_by(
isl_video_id=isl_bible_id
).first()

if not record:
logger.error("Verse markers not found for ISL Bible %s",isl_bible_id)
raise NotAvailableException(
detail=f"Verse markers not found for ISL Bible {isl_bible_id}"
)

return record
def _build_bulk_delete_response(deleted_ids, errors):
"""Build consistent bulk delete response structure."""
return {
"data": {"deletedCount": len(deleted_ids),
"deletedIds": deleted_ids,
"errors": errors if errors else None,
},
"all_failed": len(deleted_ids) == 0 and len(errors) > 0,
"has_errors": len(errors) > 0,
}

def delete_verse_markers_bulk(
db_session: Session,
isl_bible_ids: List[int],
):
"""Deletes verse markers for the given ISL Bible IDs."""
logger.info("Deleting ISL verse markers")
deleted_ids = []
errors = []

for isl_id in isl_bible_ids:
try:
record = db_session.query(db_models.IslVerseMarkers).filter_by(
isl_video_id=isl_id
).first()

if not record:
logger.error("Verse markers not found for ISL Bible %s",isl_id)
errors.append(f"Verse markers not found for ISL Bible {isl_id}")
continue

db_session.delete(record)
deleted_ids.append(isl_id)

except SQLAlchemyError as exc:
logger.error("Error deleting ISL Bible %s: %s", isl_id, exc)
errors.append(f"Error deleting ISL Bible {isl_id}: {exc}")

db_session.commit()
Comment on lines +102 to +106
Copy link
Copy Markdown

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Suggestion: The loop catches SQLAlchemyError but never rolls back the session; if an ORM error occurs, the session can remain in a failed transaction state and the final commit() will raise, turning a partial-failure response into a 500. Roll back inside the exception path (or isolate each delete in its own transaction) before continuing. [logic error]

Severity Level: Major ⚠️
- ❌ Bulk delete can raise 500 after first database error.
- ⚠️ Session remains in failed state for subsequent operations.
- ⚠️ Partial successes are lost due to failing final commit.
Steps of Reproduction ✅
1. Open `backend/app/crud/isl_verse_markers_crud.py` and inspect
`delete_verse_markers_bulk` at lines 79–108; note that it wraps the per-ID delete logic in
a `try` block (lines 89–100), catches `SQLAlchemyError` at line 102 to log and append an
error, but does not call `db_session.rollback()` before eventually calling
`db_session.commit()` at line 106.

2. In a test or shell, obtain a `db_session` and construct `isl_bible_ids` such that
deleting one of the corresponding `IslVerseMarkers` rows triggers a `SQLAlchemyError` (for
example, by forcing a database-level error or simulating a constraint/connection issue so
that `db_session.delete(record)` or the pending flush fails).

3. Call `delete_verse_markers_bulk(db_session, isl_bible_ids)`; when the problematic ID is
processed, the `except SQLAlchemyError as exc:` block at lines 102–104 will run, logging
the error and recording it in the `errors` list but leaving the SQLAlchemy session in a
failed transaction state.

4. After the loop completes, observe that `db_session.commit()` at line 106 is invoked on
this failed session and raises a further SQLAlchemy error (e.g., `PendingRollbackError`),
causing the bulk delete caller (such as an API endpoint or job that uses this CRUD
function) to see a 500 error instead of a structured partial-success response.

Fix in Cursor | Fix in VSCode Claude

(Use Cmd/Ctrl + Click for best experience)

Prompt for AI Agent 🤖
This is a comment left during a code review.

**Path:** backend/app/crud/isl_verse_markers_crud.py
**Line:** 102:106
**Comment:**
	*Logic Error: The loop catches `SQLAlchemyError` but never rolls back the session; if an ORM error occurs, the session can remain in a failed transaction state and the final `commit()` will raise, turning a partial-failure response into a 500. Roll back inside the exception path (or isolate each delete in its own transaction) before continuing.

Validate the correctness of the flagged issue. If correct, How can I resolve this? If you propose a fix, implement it and please make it concise.
Once fix is implemented, also check other comments on the same PR, and ask user if the user wants to fix the rest of the comments as well. if said yes, then fetch all the comments validate the correctness and implement a minimal fix
👍 | 👎


return _build_bulk_delete_response(deleted_ids, errors)



def get_all_verse_markers(db_session: Session):
"""Get all verse markers without isl bible id"""
return db_session.query(db_models.IslVerseMarkers).all()

def _timestamp_to_frames(timestamp: str) -> int:
hours, minutes, seconds, frames = map(int, timestamp.split(":"))
return (((hours * 60) + minutes) * 60 + seconds) * 100 + frames

def _validate_marker_verses(db_session, isl_bible, markers):
"""
Validate verses exist for given book/chapter.
"""

book_id = isl_bible.book_id
chapter = isl_bible.chapter

# Allow intro chapter
if chapter == 0:
return
valid_verses = {
int(row.verse)
for row in db_session.query(db_models.CleanBible).filter_by(
book_id=book_id,
chapter=chapter
).all()
}
for marker in markers:
verse = marker["verse"]

# intro marker
if verse == 0:
continue

if isinstance(verse, str) and "_" in verse:
start, end = map(int, verse.split("_"))

for verse_number in range(start, end + 1):
if verse_number not in valid_verses:
raise UnprocessableException(
detail=f"Invalid verse {verse_number} for chapter {chapter}"
)

else:
if int(verse) not in valid_verses:
raise UnprocessableException(
detail=f"Invalid verse {verse} for chapter {chapter}"
)

def _validate_timestamp_order(markers):
previous = -1

for marker in markers:
current = _timestamp_to_frames(marker["time"])

if current <= previous:
raise UnprocessableException(
detail="timestamps must be in increasing order"
)

previous = current

def add_verse_markers_bulk(
db_session: Session,
payload: Dict[int, List[Dict[str, Any]]],
):
"""
Bulk create verse markers.

Payload format:
{
1: [
{
"verse": 0,
"time": "00:00:00:00"
}
],
2: [
{
"verse": "12_13",
"time": "00:01:20:10"
}
]
}
"""

logger.info("Adding bulk ISL verse markers")

created_records = []

for isl_bible_id, markers in payload.items():
markers = [
m.model_dump() if hasattr(m, "model_dump") else m
for m in markers]
# Validate ISL video exists
isl_video = (
db_session.query(db_models.IslVideo)
.filter_by(id=isl_bible_id)
.first()
)

if not isl_video:
logger.error(
"ISL Video %s not found",
isl_bible_id
)
raise NotAvailableException(
detail=f"ISL Video {isl_bible_id} not found"
)

# Check existing verse markers
existing = (
db_session.query(db_models.IslVerseMarkers)
.filter_by(isl_video_id=isl_bible_id)
.first()
)

if existing:
logger.error(
"Verse markers already exist for ISL Video %s",
isl_bible_id
)
raise AlreadyExistsException(
detail=(
f"Verse markers already exist "
f"for ISL Video {isl_bible_id}"
)
)

# Ensure verse 0 exists
markers = _ensure_verse_zero(markers)

# Validate timestamp order
_validate_timestamp_order(markers)

# Validate verses against clean_bible
# chapter 0 allowed
_validate_marker_verses(db_session,isl_video,markers)

record = db_models.IslVerseMarkers(
isl_video_id=isl_bible_id,
verse_markers_json=markers
)

db_session.add(record)

created_records.append({
"id": isl_bible_id,
"markers": markers
})

db_session.commit()

return created_records
11 changes: 10 additions & 1 deletion backend/app/db_models.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,4 +309,13 @@ class M2MClient(Base):
client_secret_hash = Column(String, nullable=False)
name = Column(String, nullable=False)
is_active = Column(Boolean, nullable=False, default=True)
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)
created_at = Column(DateTime(timezone=True), nullable=False, default=utcnow)

class IslVerseMarkers(Base):
"""Corresponds to table isl_verse_markers in vachan DB(postgres)"""
__tablename__ = "isl_verse_marker"

id = Column(Integer, primary_key=True, index=True)
isl_video_id = Column(Integer, ForeignKey("isl_video.id", ondelete="CASCADE"),
nullable=False, unique=True)
verse_markers_json = Column(JSONB, nullable=False)
2 changes: 2 additions & 0 deletions backend/app/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
from router.structural import router as structural_router
from router.m2m_auth import router as m2m_auth_router
from router.content_songs import router as content_songs_router
from router.isl_verse_markers import router as isl_verse_marker_router


init_db()
Expand Down Expand Up @@ -190,3 +191,4 @@ async def root():
app.include_router(structural_router)
app.include_router(m2m_auth_router)
app.include_router(content_songs_router)
app.include_router(isl_verse_marker_router)
Loading
Loading