From d4532e593ba959c2a8d7e522e6af655b894f5054 Mon Sep 17 00:00:00 2001 From: Rory Byrne Date: Tue, 10 Feb 2026 13:03:16 +0000 Subject: [PATCH 1/3] feat: implement deposition upload workflow with semantics domain MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Add new semantics domain (ontologies + schemas) and enhance the deposition domain with conventions, spreadsheet-based metadata entry, file management, and the validation return-to-draft pipeline. New domains and aggregates: - Ontology: immutable versioned term collections with hierarchy support - Schema: typed field definitions referencing ontologies - Convention: submission templates bundling schema + file requirements Deposition enhancements: - Full lifecycle: create → metadata → files → submit → return-to-draft - Spreadsheet template generation with ontology-backed dropdowns - Spreadsheet upload with structural validation - File upload with SHA-256 checksums and convention enforcement - Domain events for all deposition mutations Infrastructure: - OBO Graphs parser for ontology import from external sources - Postgres repositories for ontology, schema, convention - Cross-domain reader adapters (SchemaReader, OntologyReader) - LocalFileStorageAdapter with atomic writes - OpenpyxlSpreadsheetAdapter for .xlsx generation and parsing - REST routes: /ontologies, /schemas, /conventions, /depositions - Database migration for new tables Closes #30 396 unit tests passing. --- Justfile | 7 + .../versions/add_deposition_tables.py | 108 ++++ .../ontologies/biological-sex.obographs.json | 82 +++ server/ontologies/license.obographs.json | 122 +++++ server/osa/application/api/rest/app.py | 8 + .../application/api/v1/routes/conventions.py | 46 ++ .../application/api/v1/routes/depositions.py | 164 ++++++ .../application/api/v1/routes/ontologies.py | 59 +++ .../osa/application/api/v1/routes/schemas.py | 46 ++ server/osa/application/di.py | 4 + .../osa/domain/deposition/command/create.py | 20 +- .../deposition/command/create_convention.py | 50 ++ .../domain/deposition/command/delete_files.py | 34 +- .../osa/domain/deposition/command/submit.py | 24 +- .../osa/domain/deposition/command/update.py | 20 +- .../osa/domain/deposition/command/upload.py | 23 +- .../deposition/command/upload_spreadsheet.py | 46 ++ server/osa/domain/deposition/event/created.py | 12 + .../domain/deposition/event/file_deleted.py | 10 + .../domain/deposition/event/file_uploaded.py | 12 + .../deposition/event/metadata_updated.py | 12 + .../handler}/__init__.py | 0 .../deposition/handler/return_to_draft.py | 27 + .../osa/domain/deposition/model/aggregate.py | 59 ++- .../osa/domain/deposition/model/convention.py | 18 + server/osa/domain/deposition/model/value.py | 16 +- server/osa/domain/deposition/port/__init__.py | 4 +- .../deposition/port/convention_repository.py | 24 + .../domain/deposition/port/ontology_reader.py | 15 + .../osa/domain/deposition/port/repository.py | 25 +- .../domain/deposition/port/schema_reader.py | 18 + .../osa/domain/deposition/port/spreadsheet.py | 40 ++ server/osa/domain/deposition/port/storage.py | 38 +- .../domain/deposition/query/download_file.py | 35 ++ .../deposition/query/download_template.py | 52 ++ .../domain/deposition/query/get_convention.py | 39 ++ .../domain/deposition/query/get_deposition.py | 44 ++ .../deposition/query/list_conventions.py | 44 ++ .../deposition/query/list_depositions.py | 54 ++ .../domain/deposition/service/convention.py | 57 +++ .../domain/deposition/service/deposition.py | 202 +++++++- .../osa/domain/deposition/util/di/provider.py | 70 ++- .../{schema/adapter => semantics}/__init__.py | 0 .../{schema => semantics}/command/__init__.py | 0 .../semantics/command/create_ontology.py | 67 +++ .../domain/semantics/command/create_schema.py | 41 ++ .../semantics/command/import_ontology.py | 44 ++ .../{schema => semantics}/event/__init__.py | 0 .../model => semantics/handler}/__init__.py | 0 .../port => semantics/model}/__init__.py | 0 server/osa/domain/semantics/model/ontology.py | 36 ++ server/osa/domain/semantics/model/schema.py | 23 + server/osa/domain/semantics/model/value.py | 80 +++ .../query => semantics/port}/__init__.py | 0 .../domain/semantics/port/ontology_fetcher.py | 13 + .../semantics/port/ontology_repository.py | 24 + .../semantics/port/schema_repository.py | 24 + .../service => semantics/query}/__init__.py | 0 .../domain/semantics/query/get_ontology.py | 34 ++ .../osa/domain/semantics/query/get_schema.py | 32 ++ .../domain/semantics/query/list_ontologies.py | 44 ++ .../domain/semantics/query/list_schemas.py | 42 ++ .../osa/domain/semantics/service/__init__.py | 0 .../osa/domain/semantics/service/ontology.py | 63 +++ server/osa/domain/semantics/service/schema.py | 61 +++ server/osa/domain/semantics/util/__init__.py | 0 .../osa/domain/semantics/util/di/__init__.py | 0 .../osa/domain/semantics/util/di/provider.py | 53 ++ server/osa/domain/semantics/util/obographs.py | 91 ++++ server/osa/domain/shared/model/srn.py | 12 +- server/osa/domain/shared/model/validator.py | 8 + .../validation/event/validation_failed.py | 10 + .../validation/handler/validate_deposition.py | 39 +- server/osa/infrastructure/event/di.py | 3 + server/osa/infrastructure/http/__init__.py | 1 + server/osa/infrastructure/http/di.py | 35 ++ .../infrastructure/http/ontology_fetcher.py | 17 + .../persistence/adapter/readers.py | 84 +++ .../persistence/adapter/spreadsheet.py | 134 +++++ .../persistence/adapter/storage.py | 108 ++-- server/osa/infrastructure/persistence/di.py | 43 ++ .../persistence/mappers/deposition.py | 25 +- .../persistence/repository/convention.py | 68 +++ .../persistence/repository/deposition.py | 55 +- .../persistence/repository/ontology.py | 109 ++++ .../persistence/repository/schema.py | 60 +++ .../osa/infrastructure/persistence/tables.py | 72 ++- server/pyproject.toml | 2 + .../unit/domain/auth/test_handler_configs.py | 121 ++++- .../unit/domain/deposition/test_convention.py | 72 +++ .../deposition/test_convention_service.py | 149 ++++++ .../unit/domain/deposition/test_deposition.py | 132 +++++ .../deposition/test_deposition_service.py | 477 ++++++++++++++++++ .../domain/deposition/test_event_chain.py | 105 ++++ .../domain/deposition/test_return_to_draft.py | 64 +++ .../domain/deposition/test_spreadsheet.py | 178 +++++++ .../tests/unit/domain/semantics/__init__.py | 0 .../domain/semantics/test_import_ontology.py | 137 +++++ .../domain/semantics/test_obographs_parser.py | 295 +++++++++++ .../unit/domain/semantics/test_ontology.py | 101 ++++ .../domain/semantics/test_ontology_service.py | 171 +++++++ .../unit/domain/semantics/test_schema.py | 118 +++++ .../domain/semantics/test_schema_service.py | 177 +++++++ .../http/test_ontology_fetcher.py | 54 ++ server/uv.lock | 35 ++ 105 files changed, 5660 insertions(+), 173 deletions(-) create mode 100644 server/migrations/versions/add_deposition_tables.py create mode 100644 server/ontologies/biological-sex.obographs.json create mode 100644 server/ontologies/license.obographs.json create mode 100644 server/osa/application/api/v1/routes/conventions.py create mode 100644 server/osa/application/api/v1/routes/depositions.py create mode 100644 server/osa/application/api/v1/routes/ontologies.py create mode 100644 server/osa/application/api/v1/routes/schemas.py create mode 100644 server/osa/domain/deposition/command/create_convention.py create mode 100644 server/osa/domain/deposition/command/upload_spreadsheet.py create mode 100644 server/osa/domain/deposition/event/created.py create mode 100644 server/osa/domain/deposition/event/file_deleted.py create mode 100644 server/osa/domain/deposition/event/file_uploaded.py create mode 100644 server/osa/domain/deposition/event/metadata_updated.py rename server/osa/domain/{schema => deposition/handler}/__init__.py (100%) create mode 100644 server/osa/domain/deposition/handler/return_to_draft.py create mode 100644 server/osa/domain/deposition/model/convention.py create mode 100644 server/osa/domain/deposition/port/convention_repository.py create mode 100644 server/osa/domain/deposition/port/ontology_reader.py create mode 100644 server/osa/domain/deposition/port/schema_reader.py create mode 100644 server/osa/domain/deposition/port/spreadsheet.py create mode 100644 server/osa/domain/deposition/query/download_file.py create mode 100644 server/osa/domain/deposition/query/download_template.py create mode 100644 server/osa/domain/deposition/query/get_convention.py create mode 100644 server/osa/domain/deposition/query/get_deposition.py create mode 100644 server/osa/domain/deposition/query/list_conventions.py create mode 100644 server/osa/domain/deposition/query/list_depositions.py create mode 100644 server/osa/domain/deposition/service/convention.py rename server/osa/domain/{schema/adapter => semantics}/__init__.py (100%) rename server/osa/domain/{schema => semantics}/command/__init__.py (100%) create mode 100644 server/osa/domain/semantics/command/create_ontology.py create mode 100644 server/osa/domain/semantics/command/create_schema.py create mode 100644 server/osa/domain/semantics/command/import_ontology.py rename server/osa/domain/{schema => semantics}/event/__init__.py (100%) rename server/osa/domain/{schema/model => semantics/handler}/__init__.py (100%) rename server/osa/domain/{schema/port => semantics/model}/__init__.py (100%) create mode 100644 server/osa/domain/semantics/model/ontology.py create mode 100644 server/osa/domain/semantics/model/schema.py create mode 100644 server/osa/domain/semantics/model/value.py rename server/osa/domain/{schema/query => semantics/port}/__init__.py (100%) create mode 100644 server/osa/domain/semantics/port/ontology_fetcher.py create mode 100644 server/osa/domain/semantics/port/ontology_repository.py create mode 100644 server/osa/domain/semantics/port/schema_repository.py rename server/osa/domain/{schema/service => semantics/query}/__init__.py (100%) create mode 100644 server/osa/domain/semantics/query/get_ontology.py create mode 100644 server/osa/domain/semantics/query/get_schema.py create mode 100644 server/osa/domain/semantics/query/list_ontologies.py create mode 100644 server/osa/domain/semantics/query/list_schemas.py create mode 100644 server/osa/domain/semantics/service/__init__.py create mode 100644 server/osa/domain/semantics/service/ontology.py create mode 100644 server/osa/domain/semantics/service/schema.py create mode 100644 server/osa/domain/semantics/util/__init__.py create mode 100644 server/osa/domain/semantics/util/di/__init__.py create mode 100644 server/osa/domain/semantics/util/di/provider.py create mode 100644 server/osa/domain/semantics/util/obographs.py create mode 100644 server/osa/domain/shared/model/validator.py create mode 100644 server/osa/domain/validation/event/validation_failed.py create mode 100644 server/osa/infrastructure/http/__init__.py create mode 100644 server/osa/infrastructure/http/di.py create mode 100644 server/osa/infrastructure/http/ontology_fetcher.py create mode 100644 server/osa/infrastructure/persistence/adapter/readers.py create mode 100644 server/osa/infrastructure/persistence/adapter/spreadsheet.py create mode 100644 server/osa/infrastructure/persistence/repository/convention.py create mode 100644 server/osa/infrastructure/persistence/repository/ontology.py create mode 100644 server/osa/infrastructure/persistence/repository/schema.py create mode 100644 server/tests/unit/domain/deposition/test_convention.py create mode 100644 server/tests/unit/domain/deposition/test_convention_service.py create mode 100644 server/tests/unit/domain/deposition/test_deposition.py create mode 100644 server/tests/unit/domain/deposition/test_deposition_service.py create mode 100644 server/tests/unit/domain/deposition/test_event_chain.py create mode 100644 server/tests/unit/domain/deposition/test_return_to_draft.py create mode 100644 server/tests/unit/domain/deposition/test_spreadsheet.py create mode 100644 server/tests/unit/domain/semantics/__init__.py create mode 100644 server/tests/unit/domain/semantics/test_import_ontology.py create mode 100644 server/tests/unit/domain/semantics/test_obographs_parser.py create mode 100644 server/tests/unit/domain/semantics/test_ontology.py create mode 100644 server/tests/unit/domain/semantics/test_ontology_service.py create mode 100644 server/tests/unit/domain/semantics/test_schema.py create mode 100644 server/tests/unit/domain/semantics/test_schema_service.py create mode 100644 server/tests/unit/infrastructure/http/test_ontology_fetcher.py diff --git a/Justfile b/Justfile index d750323..4365a96 100644 --- a/Justfile +++ b/Justfile @@ -58,6 +58,13 @@ dev-down: open-ui: open http://localhost:8080 +# === Code Quality === + +# Lint all code (server + web) +lint: + cd server && just lint + cd web && pnpm lint + # === Individual Service Development === # Run server independently (requires database) diff --git a/server/migrations/versions/add_deposition_tables.py b/server/migrations/versions/add_deposition_tables.py new file mode 100644 index 0000000..f0eacb7 --- /dev/null +++ b/server/migrations/versions/add_deposition_tables.py @@ -0,0 +1,108 @@ +"""add_deposition_tables + +Add ontologies, ontology_terms, schemas, conventions tables. +Alter depositions: add convention_srn, drop provenance. + +Revision ID: add_deposition_tables +Revises: add_authorization +Create Date: 2026-02-08 + +""" + +from typing import Sequence, Union + +import sqlalchemy as sa +from alembic import op + +# revision identifiers, used by Alembic. +revision: str = "add_deposition_tables" +down_revision: Union[str, Sequence[str], None] = "add_authorization" +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Add semantics/convention tables and update depositions.""" + # ONTOLOGIES + op.create_table( + "ontologies", + sa.Column("srn", sa.String(), nullable=False), + sa.Column("title", sa.String(255), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint("srn"), + ) + + # ONTOLOGY TERMS + op.create_table( + "ontology_terms", + sa.Column("id", sa.String(), nullable=False), + sa.Column("ontology_srn", sa.String(), nullable=False), + sa.Column("term_id", sa.String(255), nullable=False), + sa.Column("label", sa.String(255), nullable=False), + sa.Column("synonyms", sa.JSON(), nullable=False), + sa.Column("parent_ids", sa.JSON(), nullable=False), + sa.Column("definition", sa.Text(), nullable=True), + sa.Column("deprecated", sa.Boolean(), nullable=False, server_default="false"), + sa.PrimaryKeyConstraint("id"), + sa.ForeignKeyConstraint( + ["ontology_srn"], + ["ontologies.srn"], + ondelete="CASCADE", + ), + sa.UniqueConstraint("ontology_srn", "term_id", name="uq_ontology_term"), + ) + op.create_index("idx_ontology_terms_ontology_srn", "ontology_terms", ["ontology_srn"]) + + # SCHEMAS + op.create_table( + "schemas", + sa.Column("srn", sa.String(), nullable=False), + sa.Column("title", sa.String(255), nullable=False), + sa.Column("fields", sa.JSON(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint("srn"), + ) + + # CONVENTIONS + op.create_table( + "conventions", + sa.Column("srn", sa.String(), nullable=False), + sa.Column("title", sa.String(255), nullable=False), + sa.Column("description", sa.Text(), nullable=True), + sa.Column("schema_srn", sa.String(), nullable=False), + sa.Column("file_requirements", sa.JSON(), nullable=False), + sa.Column("validator_refs", sa.JSON(), nullable=False), + sa.Column("created_at", sa.DateTime(timezone=True), nullable=False), + sa.PrimaryKeyConstraint("srn"), + ) + + # ALTER DEPOSITIONS: add convention_srn, drop provenance + op.add_column( + "depositions", + sa.Column("convention_srn", sa.String(), nullable=True), + ) + op.drop_column("depositions", "provenance") + + +def downgrade() -> None: + """Reverse: restore depositions, drop new tables.""" + # DEPOSITIONS: re-add provenance, drop convention_srn + op.add_column( + "depositions", + sa.Column("provenance", sa.JSON(), nullable=False, server_default="{}"), + ) + op.drop_column("depositions", "convention_srn") + + # CONVENTIONS + op.drop_table("conventions") + + # SCHEMAS + op.drop_table("schemas") + + # ONTOLOGY TERMS + op.drop_index("idx_ontology_terms_ontology_srn", table_name="ontology_terms") + op.drop_table("ontology_terms") + + # ONTOLOGIES + op.drop_table("ontologies") diff --git a/server/ontologies/biological-sex.obographs.json b/server/ontologies/biological-sex.obographs.json new file mode 100644 index 0000000..bab8b7d --- /dev/null +++ b/server/ontologies/biological-sex.obographs.json @@ -0,0 +1,82 @@ +{ + "graphs": [ + { + "id": "http://purl.obolibrary.org/obo/osa/biological-sex.owl", + "lbl": "Biological Sex", + "meta": { + "version": "1.0.0", + "definition": { + "val": "An ontology of biological sex categories for scientific data annotation." + } + }, + "nodes": [ + { + "id": "OSAO:0000001", + "lbl": "biological sex", + "type": "CLASS", + "meta": { + "definition": { + "val": "The biological sex of an organism, determined by chromosomal, gonadal, and anatomical characteristics." + } + } + }, + { + "id": "OSAO:0000002", + "lbl": "female", + "type": "CLASS", + "meta": { + "definition": { + "val": "An organism that produces ova or has XX sex chromosomes." + }, + "synonyms": [ + { "val": "F" } + ] + } + }, + { + "id": "OSAO:0000003", + "lbl": "male", + "type": "CLASS", + "meta": { + "definition": { + "val": "An organism that produces spermatozoa or has XY sex chromosomes." + }, + "synonyms": [ + { "val": "M" } + ] + } + }, + { + "id": "OSAO:0000004", + "lbl": "intersex", + "type": "CLASS", + "meta": { + "definition": { + "val": "An organism with sex characteristics that do not fit typical definitions of male or female." + } + } + }, + { + "id": "OSAO:0000005", + "lbl": "unknown sex", + "type": "CLASS", + "meta": { + "definition": { + "val": "The biological sex of the organism has not been determined." + }, + "synonyms": [ + { "val": "undetermined" }, + { "val": "not recorded" } + ] + } + } + ], + "edges": [ + { "sub": "OSAO:0000002", "pred": "is_a", "obj": "OSAO:0000001" }, + { "sub": "OSAO:0000003", "pred": "is_a", "obj": "OSAO:0000001" }, + { "sub": "OSAO:0000004", "pred": "is_a", "obj": "OSAO:0000001" }, + { "sub": "OSAO:0000005", "pred": "is_a", "obj": "OSAO:0000001" } + ] + } + ] +} diff --git a/server/ontologies/license.obographs.json b/server/ontologies/license.obographs.json new file mode 100644 index 0000000..1a34b48 --- /dev/null +++ b/server/ontologies/license.obographs.json @@ -0,0 +1,122 @@ +{ + "graphs": [ + { + "id": "http://purl.obolibrary.org/obo/osa/license.owl", + "lbl": "License", + "meta": { + "version": "1.0.0", + "definition": { + "val": "An ontology of common open-access and open-source license types for scientific data." + } + }, + "nodes": [ + { + "id": "OSAO:1000001", + "lbl": "license", + "type": "CLASS", + "meta": { + "definition": { + "val": "A legal instrument governing the use and redistribution of a creative work or dataset." + } + } + }, + { + "id": "OSAO:1000002", + "lbl": "CC0 1.0", + "type": "CLASS", + "meta": { + "definition": { + "val": "Creative Commons Zero v1.0 Universal — public domain dedication." + }, + "synonyms": [ + { "val": "CC0" }, + { "val": "Public Domain" } + ], + "xrefs": [ + { "val": "SPDX:CC0-1.0" } + ] + } + }, + { + "id": "OSAO:1000003", + "lbl": "CC BY 4.0", + "type": "CLASS", + "meta": { + "definition": { + "val": "Creative Commons Attribution 4.0 International — requires attribution." + }, + "synonyms": [ + { "val": "CC-BY" } + ], + "xrefs": [ + { "val": "SPDX:CC-BY-4.0" } + ] + } + }, + { + "id": "OSAO:1000004", + "lbl": "CC BY-SA 4.0", + "type": "CLASS", + "meta": { + "definition": { + "val": "Creative Commons Attribution-ShareAlike 4.0 International — requires attribution and share-alike." + }, + "xrefs": [ + { "val": "SPDX:CC-BY-SA-4.0" } + ] + } + }, + { + "id": "OSAO:1000005", + "lbl": "CC BY-NC 4.0", + "type": "CLASS", + "meta": { + "definition": { + "val": "Creative Commons Attribution-NonCommercial 4.0 International — requires attribution, non-commercial use." + }, + "xrefs": [ + { "val": "SPDX:CC-BY-NC-4.0" } + ] + } + }, + { + "id": "OSAO:1000006", + "lbl": "CC BY-NC-SA 4.0", + "type": "CLASS", + "meta": { + "definition": { + "val": "Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International." + }, + "xrefs": [ + { "val": "SPDX:CC-BY-NC-SA-4.0" } + ] + } + }, + { + "id": "OSAO:1000007", + "lbl": "MIT License", + "type": "CLASS", + "meta": { + "definition": { + "val": "A permissive open-source license with minimal restrictions on reuse." + }, + "synonyms": [ + { "val": "MIT" } + ], + "xrefs": [ + { "val": "SPDX:MIT" } + ] + } + } + ], + "edges": [ + { "sub": "OSAO:1000002", "pred": "is_a", "obj": "OSAO:1000001" }, + { "sub": "OSAO:1000003", "pred": "is_a", "obj": "OSAO:1000001" }, + { "sub": "OSAO:1000004", "pred": "is_a", "obj": "OSAO:1000001" }, + { "sub": "OSAO:1000005", "pred": "is_a", "obj": "OSAO:1000001" }, + { "sub": "OSAO:1000006", "pred": "is_a", "obj": "OSAO:1000001" }, + { "sub": "OSAO:1000007", "pred": "is_a", "obj": "OSAO:1000001" } + ] + } + ] +} diff --git a/server/osa/application/api/rest/app.py b/server/osa/application/api/rest/app.py index e9d6fef..8bbb6f2 100644 --- a/server/osa/application/api/rest/app.py +++ b/server/osa/application/api/rest/app.py @@ -9,9 +9,13 @@ from osa.application.api.v1.routes import ( admin, auth, + conventions, + depositions, events, health, + ontologies, records, + schemas, search, stats, validation, @@ -78,6 +82,10 @@ def create_app() -> FastAPI: app_instance.include_router(records.router, prefix="/api/v1") app_instance.include_router(search.router, prefix="/api/v1") app_instance.include_router(stats.router, prefix="/api/v1") + app_instance.include_router(ontologies.router, prefix="/api/v1") + app_instance.include_router(schemas.router, prefix="/api/v1") + app_instance.include_router(conventions.router, prefix="/api/v1") + app_instance.include_router(depositions.router, prefix="/api/v1") app_instance.include_router(validation.router, prefix="/api/v1") # Global OSA error handler - maps domain and infrastructure errors to HTTP responses diff --git a/server/osa/application/api/v1/routes/conventions.py b/server/osa/application/api/v1/routes/conventions.py new file mode 100644 index 0000000..cdea2bd --- /dev/null +++ b/server/osa/application/api/v1/routes/conventions.py @@ -0,0 +1,46 @@ +"""Convention REST routes.""" + +from dishka.integrations.fastapi import DishkaRoute, FromDishka +from fastapi import APIRouter + +from osa.domain.deposition.command.create_convention import ( + CreateConvention, + CreateConventionHandler, + ConventionCreated, +) +from osa.domain.deposition.query.get_convention import ( + GetConvention, + GetConventionHandler, + ConventionDetail, +) +from osa.domain.deposition.query.list_conventions import ( + ListConventions, + ListConventionsHandler, + ConventionList, +) +from osa.domain.shared.model.srn import ConventionSRN + +router = APIRouter(prefix="/conventions", tags=["Conventions"], route_class=DishkaRoute) + + +@router.post("", response_model=ConventionCreated, status_code=201) +async def create_convention( + body: CreateConvention, + handler: FromDishka[CreateConventionHandler], +) -> ConventionCreated: + return await handler.run(body) + + +@router.get("/{srn:path}", response_model=ConventionDetail) +async def get_convention( + srn: str, + handler: FromDishka[GetConventionHandler], +) -> ConventionDetail: + return await handler.run(GetConvention(srn=ConventionSRN.parse(srn))) + + +@router.get("", response_model=ConventionList) +async def list_conventions( + handler: FromDishka[ListConventionsHandler], +) -> ConventionList: + return await handler.run(ListConventions()) diff --git a/server/osa/application/api/v1/routes/depositions.py b/server/osa/application/api/v1/routes/depositions.py new file mode 100644 index 0000000..fb1cab2 --- /dev/null +++ b/server/osa/application/api/v1/routes/depositions.py @@ -0,0 +1,164 @@ +"""Deposition REST routes.""" + +from typing import Any + +from dishka.integrations.fastapi import DishkaRoute, FromDishka +from fastapi import APIRouter, UploadFile +from fastapi.responses import StreamingResponse + +from osa.domain.deposition.command.create import ( + CreateDeposition, + CreateDepositionHandler, + DepositionCreated, +) +from osa.domain.deposition.command.delete_files import ( + DeleteFile, + DeleteFileHandler, + FileDeleted, +) +from osa.domain.deposition.command.submit import ( + DepositionSubmitted, + SubmitDeposition, + SubmitDepositionHandler, +) +from osa.domain.deposition.command.update import ( + MetadataUpdated, + UpdateMetadata, + UpdateMetadataHandler, +) +from osa.domain.deposition.command.upload import ( + FileUploaded, + UploadFile as UploadFileCommand, + UploadFileHandler, +) +from osa.domain.deposition.command.upload_spreadsheet import ( + SpreadsheetUploaded, + UploadSpreadsheet, + UploadSpreadsheetHandler, +) +from osa.domain.deposition.query.download_file import ( + DownloadFile, + DownloadFileHandler, +) +from osa.domain.deposition.query.download_template import ( + DownloadTemplate, + DownloadTemplateHandler, +) +from osa.domain.deposition.query.get_deposition import ( + DepositionDetail, + GetDeposition, + GetDepositionHandler, +) +from osa.domain.deposition.query.list_depositions import ( + DepositionList, + ListDepositions, + ListDepositionsHandler, +) +from osa.domain.shared.model.srn import DepositionSRN + +router = APIRouter(prefix="/depositions", tags=["Depositions"], route_class=DishkaRoute) + + +@router.post("", response_model=DepositionCreated, status_code=201) +async def create_deposition( + body: CreateDeposition, + handler: FromDishka[CreateDepositionHandler], +) -> DepositionCreated: + return await handler.run(body) + + +@router.get("", response_model=DepositionList) +async def list_depositions( + handler: FromDishka[ListDepositionsHandler], +) -> DepositionList: + return await handler.run(ListDepositions()) + + +@router.get("/{srn:path}/template") +async def download_template( + srn: str, + handler: FromDishka[GetDepositionHandler], + template_handler: FromDishka[DownloadTemplateHandler], +) -> StreamingResponse: + dep = await handler.run(GetDeposition(srn=DepositionSRN.parse(srn))) + result = await template_handler.run(DownloadTemplate(convention_srn=dep.convention_srn)) + return StreamingResponse( + iter([result.content]), + media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + headers={"Content-Disposition": f'attachment; filename="{result.filename}"'}, + ) + + +@router.post("/{srn:path}/spreadsheet", response_model=SpreadsheetUploaded) +async def upload_spreadsheet( + srn: str, + file: UploadFile, + handler: FromDishka[UploadSpreadsheetHandler], +) -> SpreadsheetUploaded: + content = await file.read() + return await handler.run(UploadSpreadsheet(srn=DepositionSRN.parse(srn), content=content)) + + +@router.post("/{srn:path}/files", response_model=FileUploaded) +async def upload_file( + srn: str, + file: UploadFile, + handler: FromDishka[UploadFileHandler], +) -> FileUploaded: + content = await file.read() + return await handler.run( + UploadFileCommand( + srn=DepositionSRN.parse(srn), + filename=file.filename or "unknown", + content=content, + size=len(content), + ) + ) + + +@router.get("/{srn:path}/files/{filename}") +async def download_file( + srn: str, + filename: str, + handler: FromDishka[DownloadFileHandler], +) -> StreamingResponse: + result = await handler.run(DownloadFile(srn=DepositionSRN.parse(srn), filename=filename)) + return StreamingResponse( + result.stream, + media_type=result.content_type or "application/octet-stream", + headers={"Content-Disposition": f'attachment; filename="{result.filename}"'}, + ) + + +@router.delete("/{srn:path}/files/{filename}", response_model=FileDeleted) +async def delete_file( + srn: str, + filename: str, + handler: FromDishka[DeleteFileHandler], +) -> FileDeleted: + return await handler.run(DeleteFile(srn=DepositionSRN.parse(srn), filename=filename)) + + +@router.patch("/{srn:path}/metadata", response_model=MetadataUpdated) +async def update_metadata( + srn: str, + body: dict[str, Any], + handler: FromDishka[UpdateMetadataHandler], +) -> MetadataUpdated: + return await handler.run(UpdateMetadata(srn=DepositionSRN.parse(srn), metadata=body)) + + +@router.post("/{srn:path}/submit", response_model=DepositionSubmitted) +async def submit_deposition( + srn: str, + handler: FromDishka[SubmitDepositionHandler], +) -> DepositionSubmitted: + return await handler.run(SubmitDeposition(srn=DepositionSRN.parse(srn))) + + +@router.get("/{srn:path}", response_model=DepositionDetail) +async def get_deposition( + srn: str, + handler: FromDishka[GetDepositionHandler], +) -> DepositionDetail: + return await handler.run(GetDeposition(srn=DepositionSRN.parse(srn))) diff --git a/server/osa/application/api/v1/routes/ontologies.py b/server/osa/application/api/v1/routes/ontologies.py new file mode 100644 index 0000000..ed62272 --- /dev/null +++ b/server/osa/application/api/v1/routes/ontologies.py @@ -0,0 +1,59 @@ +"""Ontology REST routes.""" + +from dishka.integrations.fastapi import DishkaRoute, FromDishka +from fastapi import APIRouter + +from osa.domain.semantics.command.create_ontology import ( + CreateOntology, + CreateOntologyHandler, + OntologyCreated, +) +from osa.domain.semantics.command.import_ontology import ( + ImportOntology, + ImportOntologyHandler, + ImportOntologyResult, +) +from osa.domain.semantics.query.get_ontology import ( + GetOntology, + GetOntologyHandler, + OntologyDetail, +) +from osa.domain.semantics.query.list_ontologies import ( + ListOntologies, + ListOntologiesHandler, + OntologyList, +) +from osa.domain.shared.model.srn import OntologySRN + +router = APIRouter(prefix="/ontologies", tags=["Ontologies"], route_class=DishkaRoute) + + +@router.post("", response_model=OntologyCreated, status_code=201) +async def create_ontology( + body: CreateOntology, + handler: FromDishka[CreateOntologyHandler], +) -> OntologyCreated: + return await handler.run(body) + + +@router.post("/import", response_model=ImportOntologyResult, status_code=201) +async def import_ontology( + body: ImportOntology, + handler: FromDishka[ImportOntologyHandler], +) -> ImportOntologyResult: + return await handler.run(body) + + +@router.get("/{srn:path}", response_model=OntologyDetail) +async def get_ontology( + srn: str, + handler: FromDishka[GetOntologyHandler], +) -> OntologyDetail: + return await handler.run(GetOntology(srn=OntologySRN.parse(srn))) + + +@router.get("", response_model=OntologyList) +async def list_ontologies( + handler: FromDishka[ListOntologiesHandler], +) -> OntologyList: + return await handler.run(ListOntologies()) diff --git a/server/osa/application/api/v1/routes/schemas.py b/server/osa/application/api/v1/routes/schemas.py new file mode 100644 index 0000000..421a943 --- /dev/null +++ b/server/osa/application/api/v1/routes/schemas.py @@ -0,0 +1,46 @@ +"""Schema REST routes.""" + +from dishka.integrations.fastapi import DishkaRoute, FromDishka +from fastapi import APIRouter + +from osa.domain.semantics.command.create_schema import ( + CreateSchema, + CreateSchemaHandler, + SchemaCreated, +) +from osa.domain.semantics.query.get_schema import ( + GetSchema, + GetSchemaHandler, + SchemaDetail, +) +from osa.domain.semantics.query.list_schemas import ( + ListSchemas, + ListSchemasHandler, + SchemaList, +) +from osa.domain.shared.model.srn import SchemaSRN + +router = APIRouter(prefix="/schemas", tags=["Schemas"], route_class=DishkaRoute) + + +@router.post("", response_model=SchemaCreated, status_code=201) +async def create_schema( + body: CreateSchema, + handler: FromDishka[CreateSchemaHandler], +) -> SchemaCreated: + return await handler.run(body) + + +@router.get("/{srn:path}", response_model=SchemaDetail) +async def get_schema( + srn: str, + handler: FromDishka[GetSchemaHandler], +) -> SchemaDetail: + return await handler.run(GetSchema(srn=SchemaSRN.parse(srn))) + + +@router.get("", response_model=SchemaList) +async def list_schemas( + handler: FromDishka[ListSchemasHandler], +) -> SchemaList: + return await handler.run(ListSchemas()) diff --git a/server/osa/application/di.py b/server/osa/application/di.py index a7b0c7c..1d735dc 100644 --- a/server/osa/application/di.py +++ b/server/osa/application/di.py @@ -4,9 +4,11 @@ from osa.config import Config from osa.domain.auth.util.di import AuthProvider from osa.domain.deposition.util.di import DepositionProvider +from osa.domain.semantics.util.di.provider import SemanticsProvider from osa.domain.validation.util.di import ValidationProvider from osa.infrastructure.auth import AuthInfraProvider from osa.infrastructure.event.di import EventProvider +from osa.infrastructure.http.di import HttpProvider from osa.infrastructure.index.di import IndexProvider from osa.infrastructure.source.di import SourceProvider from osa.infrastructure.oci import OciProvider @@ -27,7 +29,9 @@ def create_container() -> AsyncContainer: IndexProvider(), SourceProvider(), EventProvider(), + HttpProvider(), DepositionProvider(), + SemanticsProvider(), ValidationProvider(), AuthProvider(), AuthInfraProvider(), diff --git a/server/osa/domain/deposition/command/create.py b/server/osa/domain/deposition/command/create.py index c344a88..47aadc0 100644 --- a/server/osa/domain/deposition/command/create.py +++ b/server/osa/domain/deposition/command/create.py @@ -1,17 +1,13 @@ -from typing import Any - -import logfire - from osa.domain.auth.model.principal import Principal from osa.domain.auth.model.role import Role from osa.domain.deposition.service.deposition import DepositionService from osa.domain.shared.authorization.gate import at_least from osa.domain.shared.command import Command, CommandHandler, Result -from osa.domain.shared.model.srn import DepositionSRN +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN class CreateDeposition(Command): - metadata: dict[str, Any] = {} + convention_srn: ConventionSRN class DepositionCreated(Result): @@ -24,10 +20,8 @@ class CreateDepositionHandler(CommandHandler[CreateDeposition, DepositionCreated deposition_service: DepositionService async def run(self, cmd: CreateDeposition) -> DepositionCreated: - # TODO: Logfire span - logfire.info("Deposition started", cmd=cmd) - - # Mock SRN for now since we don't have ID generation wired up - mock_srn = DepositionSRN.parse("urn:osa:mock-node:dep:mock-id") - - return DepositionCreated(srn=mock_srn) + dep = await self.deposition_service.create( + convention_srn=cmd.convention_srn, + owner_id=self.principal.user_id, + ) + return DepositionCreated(srn=dep.srn) diff --git a/server/osa/domain/deposition/command/create_convention.py b/server/osa/domain/deposition/command/create_convention.py new file mode 100644 index 0000000..a032e41 --- /dev/null +++ b/server/osa/domain/deposition/command/create_convention.py @@ -0,0 +1,50 @@ +from datetime import datetime + +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.deposition.model.value import FileRequirements +from osa.domain.deposition.service.convention import ConventionService +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.command import Command, CommandHandler, Result +from osa.domain.shared.model.srn import ConventionSRN, SchemaSRN +from osa.domain.shared.model.validator import ValidatorRef + + +class CreateConvention(Command): + title: str + version: str + schema_srn: SchemaSRN + file_requirements: FileRequirements + description: str | None = None + validator_refs: list[ValidatorRef] = [] + + +class ConventionCreated(Result): + srn: ConventionSRN + title: str + description: str | None + schema_srn: SchemaSRN + created_at: datetime + + +class CreateConventionHandler(CommandHandler[CreateConvention, ConventionCreated]): + __auth__ = at_least(Role.ADMIN) + principal: Principal + convention_service: ConventionService + + async def run(self, cmd: CreateConvention) -> ConventionCreated: + convention = await self.convention_service.create_convention( + title=cmd.title, + version=cmd.version, + schema_srn=cmd.schema_srn, + file_requirements=cmd.file_requirements, + description=cmd.description, + validator_refs=cmd.validator_refs, + ) + return ConventionCreated( + srn=convention.srn, + title=convention.title, + description=convention.description, + schema_srn=convention.schema_srn, + created_at=convention.created_at, + ) diff --git a/server/osa/domain/deposition/command/delete_files.py b/server/osa/domain/deposition/command/delete_files.py index 1f4f0e4..714cf56 100644 --- a/server/osa/domain/deposition/command/delete_files.py +++ b/server/osa/domain/deposition/command/delete_files.py @@ -1,41 +1,25 @@ -import logfire - from osa.domain.auth.model.principal import Principal from osa.domain.auth.model.role import Role -from osa.domain.deposition.port import DepositionRepository, StoragePort +from osa.domain.deposition.service.deposition import DepositionService from osa.domain.shared.authorization.gate import at_least from osa.domain.shared.command import Command, CommandHandler, Result from osa.domain.shared.model.srn import DepositionSRN -class DeleteDepositionFiles(Command): +class DeleteFile(Command): srn: DepositionSRN + filename: str -class DepositionFilesDeleted(Result): +class FileDeleted(Result): pass -class DeleteDepositionFilesHandler(CommandHandler[DeleteDepositionFiles, DepositionFilesDeleted]): +class DeleteFileHandler(CommandHandler[DeleteFile, FileDeleted]): __auth__ = at_least(Role.DEPOSITOR) principal: Principal - repository: DepositionRepository - storage: StoragePort - - async def run(self, cmd: DeleteDepositionFiles) -> DepositionFilesDeleted: - with logfire.span("DeleteDepositionFiles"): - # 1. Load deposition - dep = await self.repository.get(cmd.srn) - if dep is None: - raise ValueError(f"Deposition not found: {cmd.srn}") - - # 2. Clear files from aggregate (domain logic) - dep.remove_all_files() - - # 3. Delete physically (infrastructure) - self.storage.delete_files_for_deposition(cmd.srn) - - # 4. Persist changes - await self.repository.save(dep) + deposition_service: DepositionService - return DepositionFilesDeleted() + async def run(self, cmd: DeleteFile) -> FileDeleted: + await self.deposition_service.delete_file(cmd.srn, cmd.filename) + return FileDeleted() diff --git a/server/osa/domain/deposition/command/submit.py b/server/osa/domain/deposition/command/submit.py index 88d32c4..40f0a64 100644 --- a/server/osa/domain/deposition/command/submit.py +++ b/server/osa/domain/deposition/command/submit.py @@ -1,15 +1,9 @@ -import logfire -from uuid import uuid4 - from osa.domain.auth.model.principal import Principal from osa.domain.auth.model.role import Role -from osa.domain.deposition.event.submitted import DepositionSubmittedEvent from osa.domain.deposition.service.deposition import DepositionService from osa.domain.shared.authorization.gate import at_least from osa.domain.shared.command import Command, CommandHandler, Result -from osa.domain.shared.event import EventId from osa.domain.shared.model.srn import DepositionSRN -from osa.domain.shared.outbox import Outbox class SubmitDeposition(Command): @@ -24,21 +18,7 @@ class SubmitDepositionHandler(CommandHandler[SubmitDeposition, DepositionSubmitt __auth__ = at_least(Role.DEPOSITOR) principal: Principal deposition_service: DepositionService - outbox: Outbox async def run(self, cmd: SubmitDeposition) -> DepositionSubmitted: - with logfire.span("SubmitDeposition"): - # TODO: Domain logic via service (state transition) - # self.deposition_service.submit(cmd.srn) - - # Append event to outbox for reliable delivery - event = DepositionSubmittedEvent( - id=EventId(uuid4()), - deposition_id=cmd.srn, - metadata={}, # Empty metadata for direct submission (not from source) - ) - await self.outbox.append(event) - - logfire.info("Deposition submitted event saved to outbox", deposition_id=str(cmd.srn)) - - return DepositionSubmitted() + await self.deposition_service.submit(cmd.srn) + return DepositionSubmitted() diff --git a/server/osa/domain/deposition/command/update.py b/server/osa/domain/deposition/command/update.py index d0ee9b8..71356a6 100644 --- a/server/osa/domain/deposition/command/update.py +++ b/server/osa/domain/deposition/command/update.py @@ -1,25 +1,27 @@ -import logfire +from typing import Any from osa.domain.auth.model.principal import Principal from osa.domain.auth.model.role import Role from osa.domain.deposition.service.deposition import DepositionService from osa.domain.shared.authorization.gate import at_least from osa.domain.shared.command import Command, CommandHandler, Result +from osa.domain.shared.model.srn import DepositionSRN -class UpdateDeposition(Command): ... +class UpdateMetadata(Command): + srn: DepositionSRN + metadata: dict[str, Any] -class DepositionUpdated(Result): ... +class MetadataUpdated(Result): + pass -class UpdateDepositionHandler(CommandHandler[UpdateDeposition, DepositionUpdated]): +class UpdateMetadataHandler(CommandHandler[UpdateMetadata, MetadataUpdated]): __auth__ = at_least(Role.DEPOSITOR) principal: Principal deposition_service: DepositionService - async def run(self, cmd: UpdateDeposition) -> DepositionUpdated: - # TODO: Logfire span - logfire.info("Deposition updated", cmd=cmd) - - return DepositionUpdated() + async def run(self, cmd: UpdateMetadata) -> MetadataUpdated: + await self.deposition_service.update_metadata(cmd.srn, cmd.metadata) + return MetadataUpdated() diff --git a/server/osa/domain/deposition/command/upload.py b/server/osa/domain/deposition/command/upload.py index 5e358f8..7959563 100644 --- a/server/osa/domain/deposition/command/upload.py +++ b/server/osa/domain/deposition/command/upload.py @@ -1,9 +1,7 @@ -from typing import Any - -import logfire - from osa.domain.auth.model.principal import Principal from osa.domain.auth.model.role import Role +from osa.domain.deposition.model.value import DepositionFile +from osa.domain.deposition.service.deposition import DepositionService from osa.domain.shared.authorization.gate import at_least from osa.domain.shared.command import Command, CommandHandler, Result from osa.domain.shared.model.srn import DepositionSRN @@ -12,19 +10,24 @@ class UploadFile(Command): srn: DepositionSRN filename: str - stream: Any # BinaryIO or similar + content: bytes + size: int class FileUploaded(Result): - pass + file: DepositionFile class UploadFileHandler(CommandHandler[UploadFile, FileUploaded]): __auth__ = at_least(Role.DEPOSITOR) principal: Principal + deposition_service: DepositionService async def run(self, cmd: UploadFile) -> FileUploaded: - with logfire.span("UploadFile"): - # TODO: Implement actual file storage logic - logfire.info("File uploaded", filename=cmd.filename) - return FileUploaded() + dep = await self.deposition_service.upload_file( + cmd.srn, + cmd.filename, + cmd.content, + cmd.size, + ) + return FileUploaded(file=dep.files[-1]) diff --git a/server/osa/domain/deposition/command/upload_spreadsheet.py b/server/osa/domain/deposition/command/upload_spreadsheet.py new file mode 100644 index 0000000..c092dd4 --- /dev/null +++ b/server/osa/domain/deposition/command/upload_spreadsheet.py @@ -0,0 +1,46 @@ +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.deposition.port.convention_repository import ConventionRepository +from osa.domain.deposition.port.schema_reader import SchemaReader +from osa.domain.deposition.port.spreadsheet import SpreadsheetParseResult, SpreadsheetPort +from osa.domain.deposition.service.deposition import DepositionService +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.command import Command, CommandHandler, Result +from osa.domain.shared.error import NotFoundError +from osa.domain.shared.model.srn import DepositionSRN + + +class UploadSpreadsheet(Command): + srn: DepositionSRN + content: bytes + + +class SpreadsheetUploaded(Result): + parse_result: SpreadsheetParseResult + + +class UploadSpreadsheetHandler(CommandHandler[UploadSpreadsheet, SpreadsheetUploaded]): + __auth__ = at_least(Role.DEPOSITOR) + principal: Principal + deposition_service: DepositionService + convention_repo: ConventionRepository + schema_reader: SchemaReader + spreadsheet: SpreadsheetPort + + async def run(self, cmd: UploadSpreadsheet) -> SpreadsheetUploaded: + dep = await self.deposition_service.get(cmd.srn) + + convention = await self.convention_repo.get(dep.convention_srn) + if convention is None: + raise NotFoundError(f"Convention not found: {dep.convention_srn}") + + schema = await self.schema_reader.get_schema(convention.schema_srn) + if schema is None: + raise NotFoundError(f"Schema not found: {convention.schema_srn}") + + parse_result = self.spreadsheet.parse_upload(schema, cmd.content) + + if not parse_result.errors: + await self.deposition_service.update_metadata(cmd.srn, parse_result.metadata) + + return SpreadsheetUploaded(parse_result=parse_result) diff --git a/server/osa/domain/deposition/event/created.py b/server/osa/domain/deposition/event/created.py new file mode 100644 index 0000000..91174bb --- /dev/null +++ b/server/osa/domain/deposition/event/created.py @@ -0,0 +1,12 @@ +from osa.domain.auth.model.value import UserId +from osa.domain.shared.event import Event, EventId +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN + + +class DepositionCreatedEvent(Event): + """Emitted when a new deposition is created.""" + + id: EventId + deposition_id: DepositionSRN + convention_srn: ConventionSRN + owner_id: UserId diff --git a/server/osa/domain/deposition/event/file_deleted.py b/server/osa/domain/deposition/event/file_deleted.py new file mode 100644 index 0000000..11e278f --- /dev/null +++ b/server/osa/domain/deposition/event/file_deleted.py @@ -0,0 +1,10 @@ +from osa.domain.shared.event import Event, EventId +from osa.domain.shared.model.srn import DepositionSRN + + +class FileDeletedEvent(Event): + """Emitted when a file is deleted from a deposition.""" + + id: EventId + deposition_id: DepositionSRN + filename: str diff --git a/server/osa/domain/deposition/event/file_uploaded.py b/server/osa/domain/deposition/event/file_uploaded.py new file mode 100644 index 0000000..f1f8904 --- /dev/null +++ b/server/osa/domain/deposition/event/file_uploaded.py @@ -0,0 +1,12 @@ +from osa.domain.shared.event import Event, EventId +from osa.domain.shared.model.srn import DepositionSRN + + +class FileUploadedEvent(Event): + """Emitted when a file is uploaded to a deposition.""" + + id: EventId + deposition_id: DepositionSRN + filename: str + size: int + checksum: str diff --git a/server/osa/domain/deposition/event/metadata_updated.py b/server/osa/domain/deposition/event/metadata_updated.py new file mode 100644 index 0000000..b3ddd64 --- /dev/null +++ b/server/osa/domain/deposition/event/metadata_updated.py @@ -0,0 +1,12 @@ +from typing import Any + +from osa.domain.shared.event import Event, EventId +from osa.domain.shared.model.srn import DepositionSRN + + +class MetadataUpdatedEvent(Event): + """Emitted when deposition metadata is updated.""" + + id: EventId + deposition_id: DepositionSRN + metadata: dict[str, Any] diff --git a/server/osa/domain/schema/__init__.py b/server/osa/domain/deposition/handler/__init__.py similarity index 100% rename from server/osa/domain/schema/__init__.py rename to server/osa/domain/deposition/handler/__init__.py diff --git a/server/osa/domain/deposition/handler/return_to_draft.py b/server/osa/domain/deposition/handler/return_to_draft.py new file mode 100644 index 0000000..704a22d --- /dev/null +++ b/server/osa/domain/deposition/handler/return_to_draft.py @@ -0,0 +1,27 @@ +"""ReturnToDraft - handles ValidationFailed events.""" + +import logging + +from osa.domain.deposition.port.repository import DepositionRepository +from osa.domain.shared.event import EventHandler +from osa.domain.validation.event.validation_failed import ValidationFailed + +logger = logging.getLogger(__name__) + + +class ReturnToDraft(EventHandler[ValidationFailed]): + """Returns a deposition to DRAFT when validation fails.""" + + deposition_repo: DepositionRepository + + async def handle(self, event: ValidationFailed) -> None: + dep = await self.deposition_repo.get(event.deposition_srn) + if dep is None: + logger.warning(f"Deposition not found for return_to_draft: {event.deposition_srn}") + return + + dep.return_to_draft() + await self.deposition_repo.save(dep) + logger.info( + f"Deposition {event.deposition_srn} returned to draft. Reasons: {event.reasons}" + ) diff --git a/server/osa/domain/deposition/model/aggregate.py b/server/osa/domain/deposition/model/aggregate.py index 32834fc..06e91ec 100644 --- a/server/osa/domain/deposition/model/aggregate.py +++ b/server/osa/domain/deposition/model/aggregate.py @@ -1,21 +1,62 @@ -from typing import Any, Generic, TypeVar +from datetime import UTC, datetime +from typing import Any from osa.domain.auth.model.value import UserId from osa.domain.deposition.model.value import DepositionFile, DepositionStatus +from osa.domain.shared.error import InvalidStateError from osa.domain.shared.model.aggregate import Aggregate -from osa.domain.shared.model.srn import DepositionSRN, RecordSRN +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN, RecordSRN -T = TypeVar("T") - -class Deposition(Aggregate, Generic[T]): +class Deposition(Aggregate): srn: DepositionSRN - status: DepositionStatus - metadata: T + convention_srn: ConventionSRN + status: DepositionStatus = DepositionStatus.DRAFT + metadata: dict[str, Any] = {} files: list[DepositionFile] = [] record_srn: RecordSRN | None = None - provenance: dict[str, Any] = {} # Source info, provenance tracking - owner_id: UserId | None = None + owner_id: UserId + created_at: datetime + updated_at: datetime + + def _require_draft(self) -> None: + if self.status != DepositionStatus.DRAFT: + raise InvalidStateError(f"Operation not allowed in {self.status} state") + + def update_metadata(self, metadata: dict[str, Any]) -> None: + self._require_draft() + self.metadata = metadata + self.updated_at = datetime.now(UTC) + + def add_file(self, file: DepositionFile) -> None: + self._require_draft() + self.files.append(file) + self.updated_at = datetime.now(UTC) + + def remove_file(self, filename: str) -> DepositionFile: + self._require_draft() + for i, f in enumerate(self.files): + if f.name == filename: + removed = self.files.pop(i) + self.updated_at = datetime.now(UTC) + return removed + from osa.domain.shared.error import NotFoundError + + raise NotFoundError(f"File '{filename}' not found in deposition") + + def submit(self) -> None: + self._require_draft() + self.status = DepositionStatus.IN_VALIDATION + self.updated_at = datetime.now(UTC) + + def return_to_draft(self) -> None: + if self.status != DepositionStatus.IN_VALIDATION: + raise InvalidStateError( + f"Can only return to draft from IN_VALIDATION, currently {self.status}" + ) + self.status = DepositionStatus.DRAFT + self.updated_at = datetime.now(UTC) def remove_all_files(self) -> None: self.files = [] + self.updated_at = datetime.now(UTC) diff --git a/server/osa/domain/deposition/model/convention.py b/server/osa/domain/deposition/model/convention.py new file mode 100644 index 0000000..fb6b6fe --- /dev/null +++ b/server/osa/domain/deposition/model/convention.py @@ -0,0 +1,18 @@ +from datetime import datetime + +from osa.domain.deposition.model.value import FileRequirements +from osa.domain.shared.model.aggregate import Aggregate +from osa.domain.shared.model.srn import ConventionSRN, SchemaSRN +from osa.domain.shared.model.validator import ValidatorRef + + +class Convention(Aggregate): + """An immutable, user-facing submission template.""" + + srn: ConventionSRN + title: str + description: str | None = None + schema_srn: SchemaSRN + file_requirements: FileRequirements + validator_refs: list[ValidatorRef] = [] + created_at: datetime diff --git a/server/osa/domain/deposition/model/value.py b/server/osa/domain/deposition/model/value.py index f7de996..e704717 100644 --- a/server/osa/domain/deposition/model/value.py +++ b/server/osa/domain/deposition/model/value.py @@ -3,11 +3,13 @@ from pydantic import BaseModel, Field +from osa.domain.shared.model.value import ValueObject + class DepositionStatus(StrEnum): DRAFT = "draft" - SUBMITTED = "submitted" - IN_REVIEW = "in review" + IN_VALIDATION = "in_validation" + IN_REVIEW = "in_review" ACCEPTED = "accepted" REJECTED = "rejected" @@ -16,4 +18,14 @@ class DepositionFile(BaseModel): name: str size: int checksum: str + content_type: str | None = None uploaded_at: datetime = Field(default_factory=datetime.now) + + +class FileRequirements(ValueObject): + """File upload constraints for a convention.""" + + accepted_types: list[str] # e.g., [".csv", ".h5ad"] + min_count: int = 0 + max_count: int + max_file_size: int # bytes diff --git a/server/osa/domain/deposition/port/__init__.py b/server/osa/domain/deposition/port/__init__.py index 1c8fcaf..f13f932 100644 --- a/server/osa/domain/deposition/port/__init__.py +++ b/server/osa/domain/deposition/port/__init__.py @@ -1,4 +1,4 @@ from .repository import DepositionRepository -from .storage import StoragePort +from .storage import FileStoragePort -__all__ = ["DepositionRepository", "StoragePort"] +__all__ = ["DepositionRepository", "FileStoragePort"] diff --git a/server/osa/domain/deposition/port/convention_repository.py b/server/osa/domain/deposition/port/convention_repository.py new file mode 100644 index 0000000..965351a --- /dev/null +++ b/server/osa/domain/deposition/port/convention_repository.py @@ -0,0 +1,24 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING, List, Protocol + +from osa.domain.shared.model.srn import ConventionSRN +from osa.domain.shared.port import Port + +if TYPE_CHECKING: + from osa.domain.deposition.model.convention import Convention + + +class ConventionRepository(Port, Protocol): + @abstractmethod + async def save(self, convention: "Convention") -> None: ... + + @abstractmethod + async def get(self, srn: ConventionSRN) -> "Convention | None": ... + + @abstractmethod + async def list( + self, *, limit: int | None = None, offset: int | None = None + ) -> "List[Convention]": ... + + @abstractmethod + async def exists(self, srn: ConventionSRN) -> bool: ... diff --git a/server/osa/domain/deposition/port/ontology_reader.py b/server/osa/domain/deposition/port/ontology_reader.py new file mode 100644 index 0000000..48a7f12 --- /dev/null +++ b/server/osa/domain/deposition/port/ontology_reader.py @@ -0,0 +1,15 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING, Protocol + +from osa.domain.shared.model.srn import OntologySRN +from osa.domain.shared.port import Port + +if TYPE_CHECKING: + from osa.domain.semantics.model.ontology import Ontology + + +class OntologyReader(Port, Protocol): + """Read-only cross-domain port for reading ontologies from the deposition domain.""" + + @abstractmethod + async def get_ontology(self, srn: OntologySRN) -> "Ontology | None": ... diff --git a/server/osa/domain/deposition/port/repository.py b/server/osa/domain/deposition/port/repository.py index 9a0a7ec..1b6c64f 100644 --- a/server/osa/domain/deposition/port/repository.py +++ b/server/osa/domain/deposition/port/repository.py @@ -1,6 +1,9 @@ +from __future__ import annotations + from abc import abstractmethod -from typing import Protocol +from typing import List, Protocol +from osa.domain.auth.model.value import UserId from osa.domain.deposition.model.aggregate import Deposition from osa.domain.shared.model.srn import DepositionSRN from osa.domain.shared.port import Port @@ -12,3 +15,23 @@ async def get(self, srn: DepositionSRN) -> Deposition | None: ... @abstractmethod async def save(self, deposition: Deposition) -> None: ... + + @abstractmethod + async def list( + self, *, limit: int | None = None, offset: int | None = None + ) -> List[Deposition]: ... + + @abstractmethod + async def list_by_owner( + self, + owner_id: UserId, + *, + limit: int | None = None, + offset: int | None = None, + ) -> List[Deposition]: ... + + @abstractmethod + async def count(self) -> int: ... + + @abstractmethod + async def count_by_owner(self, owner_id: UserId) -> int: ... diff --git a/server/osa/domain/deposition/port/schema_reader.py b/server/osa/domain/deposition/port/schema_reader.py new file mode 100644 index 0000000..36f1e00 --- /dev/null +++ b/server/osa/domain/deposition/port/schema_reader.py @@ -0,0 +1,18 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING, Protocol + +from osa.domain.shared.model.srn import SchemaSRN +from osa.domain.shared.port import Port + +if TYPE_CHECKING: + from osa.domain.semantics.model.schema import Schema + + +class SchemaReader(Port, Protocol): + """Read-only cross-domain port for reading schemas from the deposition domain.""" + + @abstractmethod + async def get_schema(self, srn: SchemaSRN) -> "Schema | None": ... + + @abstractmethod + async def schema_exists(self, srn: SchemaSRN) -> bool: ... diff --git a/server/osa/domain/deposition/port/spreadsheet.py b/server/osa/domain/deposition/port/spreadsheet.py new file mode 100644 index 0000000..1d305dd --- /dev/null +++ b/server/osa/domain/deposition/port/spreadsheet.py @@ -0,0 +1,40 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING, Any, Protocol + +from pydantic import BaseModel + +from osa.domain.shared.port import Port + +if TYPE_CHECKING: + from osa.domain.semantics.model.schema import Schema + + +class SpreadsheetError(BaseModel): + """A single field-level error from spreadsheet parsing.""" + + field: str + message: str + + +class SpreadsheetParseResult(BaseModel): + """Result of parsing a spreadsheet upload.""" + + metadata: dict[str, Any] + warnings: list[str] = [] + errors: list[SpreadsheetError] = [] + + +class SpreadsheetPort(Port, Protocol): + @abstractmethod + def generate_template( + self, + schema: "Schema", + ontology_terms_by_srn: dict[str, list[str]], + ) -> bytes: ... + + @abstractmethod + def parse_upload( + self, + schema: "Schema", + content: bytes, + ) -> SpreadsheetParseResult: ... diff --git a/server/osa/domain/deposition/port/storage.py b/server/osa/domain/deposition/port/storage.py index f0bcdc8..915bdc6 100644 --- a/server/osa/domain/deposition/port/storage.py +++ b/server/osa/domain/deposition/port/storage.py @@ -1,14 +1,38 @@ from abc import abstractmethod +from collections.abc import AsyncIterator from typing import Protocol -from osa.domain.shared.port import Port +from osa.domain.deposition.model.value import DepositionFile from osa.domain.shared.model.srn import DepositionSRN +from osa.domain.shared.port import Port + + +class FileStoragePort(Port, Protocol): + @abstractmethod + async def save_file( + self, + deposition_id: DepositionSRN, + filename: str, + content: bytes, + size: int, + ) -> DepositionFile: ... + + @abstractmethod + async def get_file( + self, + deposition_id: DepositionSRN, + filename: str, + ) -> AsyncIterator[bytes]: ... + @abstractmethod + async def delete_file( + self, + deposition_id: DepositionSRN, + filename: str, + ) -> None: ... -class StoragePort(Port, Protocol): @abstractmethod - def delete_files_for_deposition(self, deposition_id: DepositionSRN) -> None: - """ - Physically removes all files associated with the deposition from storage. - """ - ... + async def delete_files_for_deposition( + self, + deposition_id: DepositionSRN, + ) -> None: ... diff --git a/server/osa/domain/deposition/query/download_file.py b/server/osa/domain/deposition/query/download_file.py new file mode 100644 index 0000000..2b0fc40 --- /dev/null +++ b/server/osa/domain/deposition/query/download_file.py @@ -0,0 +1,35 @@ +from collections.abc import AsyncIterator + +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.deposition.service.deposition import DepositionService +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.model.srn import DepositionSRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class DownloadFile(Query): + srn: DepositionSRN + filename: str + + +class FileStream(Result, arbitrary_types_allowed=True): + stream: AsyncIterator[bytes] + filename: str + size: int + content_type: str | None + + +class DownloadFileHandler(QueryHandler[DownloadFile, FileStream]): + __auth__ = at_least(Role.DEPOSITOR) + principal: Principal + deposition_service: DepositionService + + async def run(self, cmd: DownloadFile) -> FileStream: + stream, file_meta = await self.deposition_service.get_file_download(cmd.srn, cmd.filename) + return FileStream( + stream=stream, + filename=file_meta.name, + size=file_meta.size, + content_type=file_meta.content_type, + ) diff --git a/server/osa/domain/deposition/query/download_template.py b/server/osa/domain/deposition/query/download_template.py new file mode 100644 index 0000000..eab466a --- /dev/null +++ b/server/osa/domain/deposition/query/download_template.py @@ -0,0 +1,52 @@ +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.deposition.port.convention_repository import ConventionRepository +from osa.domain.deposition.port.ontology_reader import OntologyReader +from osa.domain.deposition.port.schema_reader import SchemaReader +from osa.domain.deposition.port.spreadsheet import SpreadsheetPort +from osa.domain.semantics.model.value import TermConstraints +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.error import NotFoundError +from osa.domain.shared.model.srn import ConventionSRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class DownloadTemplate(Query): + convention_srn: ConventionSRN + + +class TemplateResult(Result): + content: bytes + filename: str + + +class DownloadTemplateHandler(QueryHandler[DownloadTemplate, TemplateResult]): + __auth__ = at_least(Role.DEPOSITOR) + principal: Principal + convention_repo: ConventionRepository + schema_reader: SchemaReader + ontology_reader: OntologyReader + spreadsheet: SpreadsheetPort + + async def run(self, cmd: DownloadTemplate) -> TemplateResult: + convention = await self.convention_repo.get(cmd.convention_srn) + if convention is None: + raise NotFoundError(f"Convention not found: {cmd.convention_srn}") + + schema = await self.schema_reader.get_schema(convention.schema_srn) + if schema is None: + raise NotFoundError(f"Schema not found: {convention.schema_srn}") + + # Collect ontology terms for fields that reference ontologies + ontology_terms_by_srn: dict[str, list[str]] = {} + for field in schema.fields: + if isinstance(field.constraints, TermConstraints): + onto_srn = field.constraints.ontology_srn + if str(onto_srn) not in ontology_terms_by_srn: + onto = await self.ontology_reader.get_ontology(onto_srn) + if onto: + ontology_terms_by_srn[str(onto_srn)] = [t.term_id for t in onto.terms] + + content = self.spreadsheet.generate_template(schema, ontology_terms_by_srn) + filename = f"{convention.title.lower().replace(' ', '_')}_template.xlsx" + return TemplateResult(content=content, filename=filename) diff --git a/server/osa/domain/deposition/query/get_convention.py b/server/osa/domain/deposition/query/get_convention.py new file mode 100644 index 0000000..da30dbc --- /dev/null +++ b/server/osa/domain/deposition/query/get_convention.py @@ -0,0 +1,39 @@ +from datetime import datetime + +from osa.domain.deposition.model.value import FileRequirements +from osa.domain.deposition.service.convention import ConventionService +from osa.domain.shared.authorization.gate import public +from osa.domain.shared.model.srn import ConventionSRN, SchemaSRN +from osa.domain.shared.model.validator import ValidatorRef +from osa.domain.shared.query import Query, QueryHandler, Result + + +class GetConvention(Query): + srn: ConventionSRN + + +class ConventionDetail(Result): + srn: ConventionSRN + title: str + description: str | None + schema_srn: SchemaSRN + file_requirements: FileRequirements + validator_refs: list[ValidatorRef] + created_at: datetime + + +class GetConventionHandler(QueryHandler[GetConvention, ConventionDetail]): + __auth__ = public() + convention_service: ConventionService + + async def run(self, cmd: GetConvention) -> ConventionDetail: + conv = await self.convention_service.get_convention(cmd.srn) + return ConventionDetail( + srn=conv.srn, + title=conv.title, + description=conv.description, + schema_srn=conv.schema_srn, + file_requirements=conv.file_requirements, + validator_refs=conv.validator_refs, + created_at=conv.created_at, + ) diff --git a/server/osa/domain/deposition/query/get_deposition.py b/server/osa/domain/deposition/query/get_deposition.py new file mode 100644 index 0000000..0b835df --- /dev/null +++ b/server/osa/domain/deposition/query/get_deposition.py @@ -0,0 +1,44 @@ +from datetime import datetime +from typing import Any + +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.deposition.model.value import DepositionFile, DepositionStatus +from osa.domain.deposition.service.deposition import DepositionService +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN, RecordSRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class GetDeposition(Query): + srn: DepositionSRN + + +class DepositionDetail(Result): + srn: DepositionSRN + convention_srn: ConventionSRN + status: DepositionStatus + metadata: dict[str, Any] + files: list[DepositionFile] + record_srn: RecordSRN | None + created_at: datetime + updated_at: datetime + + +class GetDepositionHandler(QueryHandler[GetDeposition, DepositionDetail]): + __auth__ = at_least(Role.DEPOSITOR) + principal: Principal + deposition_service: DepositionService + + async def run(self, cmd: GetDeposition) -> DepositionDetail: + dep = await self.deposition_service.get(cmd.srn) + return DepositionDetail( + srn=dep.srn, + convention_srn=dep.convention_srn, + status=dep.status, + metadata=dep.metadata, + files=dep.files, + record_srn=dep.record_srn, + created_at=dep.created_at, + updated_at=dep.updated_at, + ) diff --git a/server/osa/domain/deposition/query/list_conventions.py b/server/osa/domain/deposition/query/list_conventions.py new file mode 100644 index 0000000..f38a07e --- /dev/null +++ b/server/osa/domain/deposition/query/list_conventions.py @@ -0,0 +1,44 @@ +from datetime import datetime + +from pydantic import BaseModel + +from osa.domain.deposition.service.convention import ConventionService +from osa.domain.shared.authorization.gate import public +from osa.domain.shared.model.srn import ConventionSRN, SchemaSRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class ListConventions(Query): + pass + + +class ConventionSummary(BaseModel): + srn: ConventionSRN + title: str + description: str | None + schema_srn: SchemaSRN + created_at: datetime + + +class ConventionList(Result): + items: list[ConventionSummary] + + +class ListConventionsHandler(QueryHandler[ListConventions, ConventionList]): + __auth__ = public() + convention_service: ConventionService + + async def run(self, cmd: ListConventions) -> ConventionList: + conventions = await self.convention_service.list_conventions() + return ConventionList( + items=[ + ConventionSummary( + srn=c.srn, + title=c.title, + description=c.description, + schema_srn=c.schema_srn, + created_at=c.created_at, + ) + for c in conventions + ] + ) diff --git a/server/osa/domain/deposition/query/list_depositions.py b/server/osa/domain/deposition/query/list_depositions.py new file mode 100644 index 0000000..6920016 --- /dev/null +++ b/server/osa/domain/deposition/query/list_depositions.py @@ -0,0 +1,54 @@ +from datetime import datetime + +from pydantic import BaseModel + +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.deposition.model.value import DepositionStatus +from osa.domain.deposition.service.deposition import DepositionService +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class ListDepositions(Query): + pass + + +class DepositionSummary(BaseModel): + srn: DepositionSRN + convention_srn: ConventionSRN + status: DepositionStatus + file_count: int + created_at: datetime + updated_at: datetime + + +class DepositionList(Result): + items: list[DepositionSummary] + total: int + + +class ListDepositionsHandler(QueryHandler[ListDepositions, DepositionList]): + __auth__ = at_least(Role.DEPOSITOR) + principal: Principal + deposition_service: DepositionService + + async def run(self, cmd: ListDepositions) -> DepositionList: + # Curators see all depositions; depositors see only their own + owner_id = None if self.principal.has_role(Role.CURATOR) else self.principal.user_id + depositions, total = await self.deposition_service.list_depositions(owner_id) + return DepositionList( + items=[ + DepositionSummary( + srn=d.srn, + convention_srn=d.convention_srn, + status=d.status, + file_count=len(d.files), + created_at=d.created_at, + updated_at=d.updated_at, + ) + for d in depositions + ], + total=total, + ) diff --git a/server/osa/domain/deposition/service/convention.py b/server/osa/domain/deposition/service/convention.py new file mode 100644 index 0000000..83b255d --- /dev/null +++ b/server/osa/domain/deposition/service/convention.py @@ -0,0 +1,57 @@ +from datetime import UTC, datetime +from uuid import uuid4 + +from osa.domain.deposition.model.convention import Convention +from osa.domain.deposition.model.value import FileRequirements +from osa.domain.deposition.port.convention_repository import ConventionRepository +from osa.domain.deposition.port.schema_reader import SchemaReader +from osa.domain.shared.error import NotFoundError, ValidationError +from osa.domain.shared.model.srn import ConventionSRN, Domain, LocalId, SchemaSRN, Semver +from osa.domain.shared.model.validator import ValidatorRef +from osa.domain.shared.service import Service + + +class ConventionService(Service): + convention_repo: ConventionRepository + schema_reader: SchemaReader + node_domain: Domain + + async def create_convention( + self, + title: str, + version: str, + schema_srn: SchemaSRN, + file_requirements: FileRequirements, + description: str | None = None, + validator_refs: list[ValidatorRef] | None = None, + ) -> Convention: + if not await self.schema_reader.schema_exists(schema_srn): + raise ValidationError(f"Schema '{schema_srn}' not found") + + srn = ConventionSRN( + domain=self.node_domain, + id=LocalId(str(uuid4())[:20]), + version=Semver.from_string(version), + ) + convention = Convention( + srn=srn, + title=title, + description=description, + schema_srn=schema_srn, + file_requirements=file_requirements, + validator_refs=validator_refs or [], + created_at=datetime.now(UTC), + ) + await self.convention_repo.save(convention) + return convention + + async def get_convention(self, srn: ConventionSRN) -> Convention: + convention = await self.convention_repo.get(srn) + if convention is None: + raise NotFoundError(f"Convention not found: {srn}") + return convention + + async def list_conventions( + self, *, limit: int | None = None, offset: int | None = None + ) -> list[Convention]: + return await self.convention_repo.list(limit=limit, offset=offset) diff --git a/server/osa/domain/deposition/service/deposition.py b/server/osa/domain/deposition/service/deposition.py index 0800766..d45ad80 100644 --- a/server/osa/domain/deposition/service/deposition.py +++ b/server/osa/domain/deposition/service/deposition.py @@ -1,4 +1,204 @@ +from collections.abc import AsyncIterator +from datetime import UTC, datetime +from uuid import uuid4 + +from osa.domain.auth.model.value import UserId +from osa.domain.deposition.event.created import DepositionCreatedEvent +from osa.domain.deposition.event.file_deleted import FileDeletedEvent +from osa.domain.deposition.event.file_uploaded import FileUploadedEvent +from osa.domain.deposition.event.metadata_updated import MetadataUpdatedEvent +from osa.domain.deposition.event.submitted import DepositionSubmittedEvent +from osa.domain.deposition.model.aggregate import Deposition +from osa.domain.deposition.model.value import DepositionFile +from osa.domain.deposition.port.convention_repository import ConventionRepository +from osa.domain.deposition.port.repository import DepositionRepository +from osa.domain.deposition.port.storage import FileStoragePort +from osa.domain.shared.error import NotFoundError, ValidationError +from osa.domain.shared.event import EventId +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN, Domain, LocalId +from osa.domain.shared.outbox import Outbox from osa.domain.shared.service import Service -class DepositionService(Service): ... +class DepositionService(Service): + deposition_repo: DepositionRepository + convention_repo: ConventionRepository + file_storage: FileStoragePort + outbox: Outbox + node_domain: Domain + + async def create( + self, + convention_srn: ConventionSRN, + owner_id: UserId, + ) -> Deposition: + convention = await self.convention_repo.get(convention_srn) + if convention is None: + raise NotFoundError(f"Convention not found: {convention_srn}") + + now = datetime.now(UTC) + srn = DepositionSRN( + domain=self.node_domain, + id=LocalId(str(uuid4())[:20]), + ) + deposition = Deposition( + srn=srn, + convention_srn=convention_srn, + owner_id=owner_id, + created_at=now, + updated_at=now, + ) + await self.deposition_repo.save(deposition) + + event = DepositionCreatedEvent( + id=EventId(uuid4()), + deposition_id=srn, + convention_srn=convention_srn, + owner_id=owner_id, + ) + await self.outbox.append(event) + return deposition + + async def get(self, srn: DepositionSRN) -> Deposition: + dep = await self.deposition_repo.get(srn) + if dep is None: + raise NotFoundError(f"Deposition not found: {srn}") + return dep + + async def update_metadata( + self, + srn: DepositionSRN, + metadata: dict, + ) -> Deposition: + dep = await self.get(srn) + dep.update_metadata(metadata) + await self.deposition_repo.save(dep) + + event = MetadataUpdatedEvent( + id=EventId(uuid4()), + deposition_id=srn, + metadata=metadata, + ) + await self.outbox.append(event) + return dep + + async def upload_file( + self, + srn: DepositionSRN, + filename: str, + content: bytes, + size: int, + ) -> Deposition: + dep = await self.get(srn) + convention = await self.convention_repo.get(dep.convention_srn) + if convention is None: + raise NotFoundError(f"Convention not found: {dep.convention_srn}") + + reqs = convention.file_requirements + + # Validate file type + ext = _get_extension(filename) + if reqs.accepted_types and ext not in reqs.accepted_types: + raise ValidationError(f"File type '{ext}' not accepted. Allowed: {reqs.accepted_types}") + + # Validate file size + if size > reqs.max_file_size: + raise ValidationError(f"File size {size} exceeds maximum {reqs.max_file_size}") + + # Validate max count + if len(dep.files) >= reqs.max_count: + raise ValidationError( + f"Maximum {reqs.max_count} files allowed, already have {len(dep.files)}" + ) + + # Store the file and get back the DepositionFile VO + saved_file = await self.file_storage.save_file(srn, filename, content, size) + dep.add_file(saved_file) + await self.deposition_repo.save(dep) + + event = FileUploadedEvent( + id=EventId(uuid4()), + deposition_id=srn, + filename=saved_file.name, + size=saved_file.size, + checksum=saved_file.checksum, + ) + await self.outbox.append(event) + return dep + + async def delete_file( + self, + srn: DepositionSRN, + filename: str, + ) -> Deposition: + dep = await self.get(srn) + dep.remove_file(filename) + await self.file_storage.delete_file(srn, filename) + await self.deposition_repo.save(dep) + + event = FileDeletedEvent( + id=EventId(uuid4()), + deposition_id=srn, + filename=filename, + ) + await self.outbox.append(event) + return dep + + async def list_depositions( + self, + owner_id: UserId | None = None, + *, + limit: int | None = None, + offset: int | None = None, + ) -> tuple[list[Deposition], int]: + if owner_id is not None: + items = await self.deposition_repo.list_by_owner(owner_id, limit=limit, offset=offset) + total = await self.deposition_repo.count_by_owner(owner_id) + else: + items = await self.deposition_repo.list(limit=limit, offset=offset) + total = await self.deposition_repo.count() + return items, total + + async def get_file_download( + self, + srn: DepositionSRN, + filename: str, + ) -> tuple[AsyncIterator[bytes], DepositionFile]: + """Fetch file stream and metadata in a single deposition lookup.""" + dep = await self.get(srn) + file_meta = next((f for f in dep.files if f.name == filename), None) + if file_meta is None: + raise NotFoundError(f"File '{filename}' not found in deposition") + stream = await self.file_storage.get_file(srn, filename) + return stream, file_meta + + async def submit(self, srn: DepositionSRN) -> Deposition: + dep = await self.get(srn) + convention = await self.convention_repo.get(dep.convention_srn) + if convention is None: + raise NotFoundError(f"Convention not found: {dep.convention_srn}") + + reqs = convention.file_requirements + if len(dep.files) < reqs.min_count: + raise ValidationError( + f"Minimum {reqs.min_count} file(s) required, have {len(dep.files)}" + ) + + dep.submit() + await self.deposition_repo.save(dep) + + event = DepositionSubmittedEvent( + id=EventId(uuid4()), + deposition_id=srn, + metadata=dep.metadata, + ) + await self.outbox.append(event) + return dep + + +def _get_extension(filename: str) -> str: + """Extract file extension including dot (e.g., '.csv').""" + dot_idx = filename.rfind(".") + if dot_idx == -1: + return "" + return filename[dot_idx:].lower() diff --git a/server/osa/domain/deposition/util/di/provider.py b/server/osa/domain/deposition/util/di/provider.py index d8ed5b1..41e3237 100644 --- a/server/osa/domain/deposition/util/di/provider.py +++ b/server/osa/domain/deposition/util/di/provider.py @@ -1,15 +1,81 @@ -from osa.util.di.scope import Scope from dishka import provide +from osa.config import Config from osa.domain.deposition.command.create import CreateDepositionHandler +from osa.domain.deposition.command.create_convention import CreateConventionHandler +from osa.domain.deposition.command.delete_files import DeleteFileHandler from osa.domain.deposition.command.submit import SubmitDepositionHandler +from osa.domain.deposition.command.update import UpdateMetadataHandler +from osa.domain.deposition.command.upload import UploadFileHandler +from osa.domain.deposition.command.upload_spreadsheet import UploadSpreadsheetHandler +from osa.domain.deposition.port.convention_repository import ConventionRepository +from osa.domain.deposition.port.repository import DepositionRepository +from osa.domain.deposition.port.schema_reader import SchemaReader +from osa.domain.deposition.port.spreadsheet import SpreadsheetPort +from osa.domain.deposition.port.storage import FileStoragePort +from osa.domain.deposition.query.download_file import DownloadFileHandler +from osa.domain.deposition.query.download_template import DownloadTemplateHandler +from osa.domain.deposition.query.get_convention import GetConventionHandler +from osa.domain.deposition.query.get_deposition import GetDepositionHandler +from osa.domain.deposition.query.list_conventions import ListConventionsHandler +from osa.domain.deposition.query.list_depositions import ListDepositionsHandler +from osa.domain.deposition.service.convention import ConventionService from osa.domain.deposition.service.deposition import DepositionService +from osa.domain.shared.model.srn import Domain +from osa.domain.shared.outbox import Outbox +from osa.infrastructure.persistence.adapter.spreadsheet import OpenpyxlSpreadsheetAdapter from osa.util.di.base import Provider +from osa.util.di.scope import Scope class DepositionProvider(Provider): - service = provide(DepositionService, scope=Scope.UOW) + @provide(scope=Scope.UOW) + def get_deposition_service( + self, + deposition_repo: DepositionRepository, + convention_repo: ConventionRepository, + file_storage: FileStoragePort, + outbox: Outbox, + config: Config, + ) -> DepositionService: + return DepositionService( + deposition_repo=deposition_repo, + convention_repo=convention_repo, + file_storage=file_storage, + outbox=outbox, + node_domain=Domain(config.server.domain), + ) + + @provide(scope=Scope.UOW) + def get_convention_service( + self, + convention_repo: ConventionRepository, + schema_reader: SchemaReader, + config: Config, + ) -> ConventionService: + return ConventionService( + convention_repo=convention_repo, + schema_reader=schema_reader, + node_domain=Domain(config.server.domain), + ) + + @provide(scope=Scope.APP) + def get_spreadsheet_port(self) -> SpreadsheetPort: + return OpenpyxlSpreadsheetAdapter() # Command Handlers create_handler = provide(CreateDepositionHandler, scope=Scope.UOW) submit_handler = provide(SubmitDepositionHandler, scope=Scope.UOW) + update_handler = provide(UpdateMetadataHandler, scope=Scope.UOW) + upload_handler = provide(UploadFileHandler, scope=Scope.UOW) + delete_file_handler = provide(DeleteFileHandler, scope=Scope.UOW) + upload_spreadsheet_handler = provide(UploadSpreadsheetHandler, scope=Scope.UOW) + create_convention_handler = provide(CreateConventionHandler, scope=Scope.UOW) + + # Query Handlers + get_deposition_handler = provide(GetDepositionHandler, scope=Scope.UOW) + download_template_handler = provide(DownloadTemplateHandler, scope=Scope.UOW) + get_convention_handler = provide(GetConventionHandler, scope=Scope.UOW) + list_conventions_handler = provide(ListConventionsHandler, scope=Scope.UOW) + list_depositions_handler = provide(ListDepositionsHandler, scope=Scope.UOW) + download_file_handler = provide(DownloadFileHandler, scope=Scope.UOW) diff --git a/server/osa/domain/schema/adapter/__init__.py b/server/osa/domain/semantics/__init__.py similarity index 100% rename from server/osa/domain/schema/adapter/__init__.py rename to server/osa/domain/semantics/__init__.py diff --git a/server/osa/domain/schema/command/__init__.py b/server/osa/domain/semantics/command/__init__.py similarity index 100% rename from server/osa/domain/schema/command/__init__.py rename to server/osa/domain/semantics/command/__init__.py diff --git a/server/osa/domain/semantics/command/create_ontology.py b/server/osa/domain/semantics/command/create_ontology.py new file mode 100644 index 0000000..59a05bc --- /dev/null +++ b/server/osa/domain/semantics/command/create_ontology.py @@ -0,0 +1,67 @@ +from datetime import datetime + +from pydantic import BaseModel + +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.semantics.model.ontology import Term +from osa.domain.semantics.service.ontology import OntologyService +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.command import Command, CommandHandler, Result +from osa.domain.shared.model.srn import OntologySRN + + +class TermInput(BaseModel): + term_id: str + label: str + synonyms: list[str] = [] + parent_ids: list[str] = [] + definition: str | None = None + deprecated: bool = False + + +class CreateOntology(Command): + title: str + version: str + terms: list[TermInput] + description: str | None = None + + +class OntologyCreated(Result): + srn: OntologySRN + title: str + description: str | None + term_count: int + created_at: datetime + + +class CreateOntologyHandler(CommandHandler[CreateOntology, OntologyCreated]): + __auth__ = at_least(Role.ADMIN) + principal: Principal + ontology_service: OntologyService + + async def run(self, cmd: CreateOntology) -> OntologyCreated: + terms = [ + Term( + term_id=t.term_id, + label=t.label, + synonyms=t.synonyms, + parent_ids=t.parent_ids, + definition=t.definition, + deprecated=t.deprecated, + ) + for t in cmd.terms + ] + ontology = await self.ontology_service.create_ontology( + title=cmd.title, + version=cmd.version, + terms=terms, + description=cmd.description, + ) + return OntologyCreated( + srn=ontology.srn, + title=ontology.title, + description=ontology.description, + term_count=len(ontology.terms), + created_at=ontology.created_at, + ) diff --git a/server/osa/domain/semantics/command/create_schema.py b/server/osa/domain/semantics/command/create_schema.py new file mode 100644 index 0000000..27c4202 --- /dev/null +++ b/server/osa/domain/semantics/command/create_schema.py @@ -0,0 +1,41 @@ +from datetime import datetime + +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.semantics.model.value import FieldDefinition +from osa.domain.semantics.service.schema import SchemaService +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.command import Command, CommandHandler, Result +from osa.domain.shared.model.srn import SchemaSRN + + +class CreateSchema(Command): + title: str + version: str + fields: list[FieldDefinition] + + +class SchemaCreated(Result): + srn: SchemaSRN + title: str + field_count: int + created_at: datetime + + +class CreateSchemaHandler(CommandHandler[CreateSchema, SchemaCreated]): + __auth__ = at_least(Role.ADMIN) + principal: Principal + schema_service: SchemaService + + async def run(self, cmd: CreateSchema) -> SchemaCreated: + schema = await self.schema_service.create_schema( + title=cmd.title, + version=cmd.version, + fields=cmd.fields, + ) + return SchemaCreated( + srn=schema.srn, + title=schema.title, + field_count=len(schema.fields), + created_at=schema.created_at, + ) diff --git a/server/osa/domain/semantics/command/import_ontology.py b/server/osa/domain/semantics/command/import_ontology.py new file mode 100644 index 0000000..8e714cc --- /dev/null +++ b/server/osa/domain/semantics/command/import_ontology.py @@ -0,0 +1,44 @@ +"""Import an ontology from an OBO Graphs JSON URL.""" + +from datetime import datetime + +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.semantics.port.ontology_fetcher import OntologyFetcher +from osa.domain.semantics.service.ontology import OntologyService +from osa.domain.shared.authorization.gate import at_least +from osa.domain.shared.command import Command, CommandHandler, Result +from osa.domain.shared.model.srn import OntologySRN + + +class ImportOntology(Command): + url: str + version: str | None = None + + +class ImportOntologyResult(Result): + srn: OntologySRN + title: str + description: str | None + term_count: int + created_at: datetime + + +class ImportOntologyHandler(CommandHandler[ImportOntology, ImportOntologyResult]): + __auth__ = at_least(Role.ADMIN) + principal: Principal + ontology_service: OntologyService + fetcher: OntologyFetcher + + async def run(self, cmd: ImportOntology) -> ImportOntologyResult: + data = await self.fetcher.fetch_json(cmd.url) + ontology = await self.ontology_service.import_from_obographs( + data, version_override=cmd.version + ) + return ImportOntologyResult( + srn=ontology.srn, + title=ontology.title, + description=ontology.description, + term_count=len(ontology.terms), + created_at=ontology.created_at, + ) diff --git a/server/osa/domain/schema/event/__init__.py b/server/osa/domain/semantics/event/__init__.py similarity index 100% rename from server/osa/domain/schema/event/__init__.py rename to server/osa/domain/semantics/event/__init__.py diff --git a/server/osa/domain/schema/model/__init__.py b/server/osa/domain/semantics/handler/__init__.py similarity index 100% rename from server/osa/domain/schema/model/__init__.py rename to server/osa/domain/semantics/handler/__init__.py diff --git a/server/osa/domain/schema/port/__init__.py b/server/osa/domain/semantics/model/__init__.py similarity index 100% rename from server/osa/domain/schema/port/__init__.py rename to server/osa/domain/semantics/model/__init__.py diff --git a/server/osa/domain/semantics/model/ontology.py b/server/osa/domain/semantics/model/ontology.py new file mode 100644 index 0000000..e740022 --- /dev/null +++ b/server/osa/domain/semantics/model/ontology.py @@ -0,0 +1,36 @@ +from datetime import datetime + +from pydantic import BaseModel + +from osa.domain.shared.error import ValidationError +from osa.domain.shared.model.aggregate import Aggregate +from osa.domain.shared.model.srn import OntologySRN + + +class Term(BaseModel): + """An individual entry within an ontology.""" + + term_id: str + label: str + synonyms: list[str] = [] + parent_ids: list[str] = [] + definition: str | None = None + deprecated: bool = False + + +class Ontology(Aggregate): + """An immutable, versioned collection of terms.""" + + srn: OntologySRN + title: str + description: str | None = None + terms: list[Term] + created_at: datetime + + def model_post_init(self, __context: object) -> None: + if len(self.terms) < 1: + raise ValidationError("Ontology must have at least one term") + + term_ids = [t.term_id for t in self.terms] + if len(term_ids) != len(set(term_ids)): + raise ValidationError("Duplicate term IDs within ontology") diff --git a/server/osa/domain/semantics/model/schema.py b/server/osa/domain/semantics/model/schema.py new file mode 100644 index 0000000..1188dbf --- /dev/null +++ b/server/osa/domain/semantics/model/schema.py @@ -0,0 +1,23 @@ +from datetime import datetime + +from osa.domain.semantics.model.value import FieldDefinition +from osa.domain.shared.error import ValidationError +from osa.domain.shared.model.aggregate import Aggregate +from osa.domain.shared.model.srn import SchemaSRN + + +class Schema(Aggregate): + """An immutable, versioned definition of metadata structure.""" + + srn: SchemaSRN + title: str + fields: list[FieldDefinition] + created_at: datetime + + def model_post_init(self, __context: object) -> None: + if len(self.fields) < 1: + raise ValidationError("Schema must have at least one field") + + names = [f.name for f in self.fields] + if len(names) != len(set(names)): + raise ValidationError("Duplicate field names within schema") diff --git a/server/osa/domain/semantics/model/value.py b/server/osa/domain/semantics/model/value.py new file mode 100644 index 0000000..6299dc6 --- /dev/null +++ b/server/osa/domain/semantics/model/value.py @@ -0,0 +1,80 @@ +from enum import StrEnum +from typing import Annotated, Literal, Union + +from pydantic import Field + +from osa.domain.shared.model.srn import OntologySRN +from osa.domain.shared.model.value import ValueObject + + +class FieldType(StrEnum): + TEXT = "text" + NUMBER = "number" + DATE = "date" + BOOLEAN = "boolean" + TERM = "term" + URL = "url" + + +class Cardinality(StrEnum): + EXACTLY_ONE = "exactly_one" + ONE_OR_MORE = "one_or_more" + ZERO_OR_MORE = "zero_or_more" + + +class TextConstraints(ValueObject): + type: Literal["text"] = "text" + min_length: int | None = None + max_length: int | None = None + pattern: str | None = None + + +class NumberConstraints(ValueObject): + type: Literal["number"] = "number" + min_value: float | None = None + max_value: float | None = None + integer_only: bool = False + unit: str | None = None + + +class TermConstraints(ValueObject): + type: Literal["term"] = "term" + ontology_srn: OntologySRN + root_term: str | None = None + + +class UrlConstraints(ValueObject): + type: Literal["url"] = "url" + pattern: str | None = None + + +class DateConstraints(ValueObject): + type: Literal["date"] = "date" + + +class BooleanConstraints(ValueObject): + type: Literal["boolean"] = "boolean" + + +FieldConstraints = Annotated[ + Union[ + TextConstraints, + NumberConstraints, + TermConstraints, + UrlConstraints, + DateConstraints, + BooleanConstraints, + ], + Field(discriminator="type"), +] + + +class FieldDefinition(ValueObject): + """A single field definition within a schema.""" + + name: str + type: FieldType + required: bool + cardinality: Cardinality + description: str | None = None + constraints: FieldConstraints | None = None diff --git a/server/osa/domain/schema/query/__init__.py b/server/osa/domain/semantics/port/__init__.py similarity index 100% rename from server/osa/domain/schema/query/__init__.py rename to server/osa/domain/semantics/port/__init__.py diff --git a/server/osa/domain/semantics/port/ontology_fetcher.py b/server/osa/domain/semantics/port/ontology_fetcher.py new file mode 100644 index 0000000..e15cddd --- /dev/null +++ b/server/osa/domain/semantics/port/ontology_fetcher.py @@ -0,0 +1,13 @@ +"""Port for fetching ontology data from external URLs.""" + +from abc import abstractmethod +from typing import Protocol + +from osa.domain.shared.port import Port + + +class OntologyFetcher(Port, Protocol): + """Fetches ontology JSON data from a URL.""" + + @abstractmethod + async def fetch_json(self, url: str) -> dict: ... diff --git a/server/osa/domain/semantics/port/ontology_repository.py b/server/osa/domain/semantics/port/ontology_repository.py new file mode 100644 index 0000000..5d5e6b3 --- /dev/null +++ b/server/osa/domain/semantics/port/ontology_repository.py @@ -0,0 +1,24 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING, List, Protocol + +from osa.domain.shared.model.srn import OntologySRN +from osa.domain.shared.port import Port + +if TYPE_CHECKING: + from osa.domain.semantics.model.ontology import Ontology + + +class OntologyRepository(Port, Protocol): + @abstractmethod + async def save(self, ontology: "Ontology") -> None: ... + + @abstractmethod + async def get(self, srn: OntologySRN) -> "Ontology | None": ... + + @abstractmethod + async def list( + self, *, limit: int | None = None, offset: int | None = None + ) -> "List[Ontology]": ... + + @abstractmethod + async def exists(self, srn: OntologySRN) -> bool: ... diff --git a/server/osa/domain/semantics/port/schema_repository.py b/server/osa/domain/semantics/port/schema_repository.py new file mode 100644 index 0000000..b6849a8 --- /dev/null +++ b/server/osa/domain/semantics/port/schema_repository.py @@ -0,0 +1,24 @@ +from abc import abstractmethod +from typing import TYPE_CHECKING, List, Protocol + +from osa.domain.shared.model.srn import SchemaSRN +from osa.domain.shared.port import Port + +if TYPE_CHECKING: + from osa.domain.semantics.model.schema import Schema + + +class SchemaRepository(Port, Protocol): + @abstractmethod + async def save(self, schema: "Schema") -> None: ... + + @abstractmethod + async def get(self, srn: SchemaSRN) -> "Schema | None": ... + + @abstractmethod + async def list( + self, *, limit: int | None = None, offset: int | None = None + ) -> "List[Schema]": ... + + @abstractmethod + async def exists(self, srn: SchemaSRN) -> bool: ... diff --git a/server/osa/domain/schema/service/__init__.py b/server/osa/domain/semantics/query/__init__.py similarity index 100% rename from server/osa/domain/schema/service/__init__.py rename to server/osa/domain/semantics/query/__init__.py diff --git a/server/osa/domain/semantics/query/get_ontology.py b/server/osa/domain/semantics/query/get_ontology.py new file mode 100644 index 0000000..be9e1b9 --- /dev/null +++ b/server/osa/domain/semantics/query/get_ontology.py @@ -0,0 +1,34 @@ +from datetime import datetime + +from osa.domain.semantics.model.ontology import Term +from osa.domain.semantics.service.ontology import OntologyService +from osa.domain.shared.authorization.gate import public +from osa.domain.shared.model.srn import OntologySRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class GetOntology(Query): + srn: OntologySRN + + +class OntologyDetail(Result): + srn: OntologySRN + title: str + description: str | None + terms: list[Term] + created_at: datetime + + +class GetOntologyHandler(QueryHandler[GetOntology, OntologyDetail]): + __auth__ = public() + ontology_service: OntologyService + + async def run(self, cmd: GetOntology) -> OntologyDetail: + ontology = await self.ontology_service.get_ontology(cmd.srn) + return OntologyDetail( + srn=ontology.srn, + title=ontology.title, + description=ontology.description, + terms=ontology.terms, + created_at=ontology.created_at, + ) diff --git a/server/osa/domain/semantics/query/get_schema.py b/server/osa/domain/semantics/query/get_schema.py new file mode 100644 index 0000000..3c7afd8 --- /dev/null +++ b/server/osa/domain/semantics/query/get_schema.py @@ -0,0 +1,32 @@ +from datetime import datetime + +from osa.domain.semantics.model.value import FieldDefinition +from osa.domain.semantics.service.schema import SchemaService +from osa.domain.shared.authorization.gate import public +from osa.domain.shared.model.srn import SchemaSRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class GetSchema(Query): + srn: SchemaSRN + + +class SchemaDetail(Result): + srn: SchemaSRN + title: str + fields: list[FieldDefinition] + created_at: datetime + + +class GetSchemaHandler(QueryHandler[GetSchema, SchemaDetail]): + __auth__ = public() + schema_service: SchemaService + + async def run(self, cmd: GetSchema) -> SchemaDetail: + schema = await self.schema_service.get_schema(cmd.srn) + return SchemaDetail( + srn=schema.srn, + title=schema.title, + fields=schema.fields, + created_at=schema.created_at, + ) diff --git a/server/osa/domain/semantics/query/list_ontologies.py b/server/osa/domain/semantics/query/list_ontologies.py new file mode 100644 index 0000000..de491d5 --- /dev/null +++ b/server/osa/domain/semantics/query/list_ontologies.py @@ -0,0 +1,44 @@ +from datetime import datetime + +from pydantic import BaseModel + +from osa.domain.semantics.service.ontology import OntologyService +from osa.domain.shared.authorization.gate import public +from osa.domain.shared.model.srn import OntologySRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class ListOntologies(Query): + pass + + +class OntologySummary(BaseModel): + srn: OntologySRN + title: str + description: str | None + term_count: int + created_at: datetime + + +class OntologyList(Result): + items: list[OntologySummary] + + +class ListOntologiesHandler(QueryHandler[ListOntologies, OntologyList]): + __auth__ = public() + ontology_service: OntologyService + + async def run(self, cmd: ListOntologies) -> OntologyList: + ontologies = await self.ontology_service.list_ontologies() + return OntologyList( + items=[ + OntologySummary( + srn=o.srn, + title=o.title, + description=o.description, + term_count=len(o.terms), + created_at=o.created_at, + ) + for o in ontologies + ] + ) diff --git a/server/osa/domain/semantics/query/list_schemas.py b/server/osa/domain/semantics/query/list_schemas.py new file mode 100644 index 0000000..b13ba2e --- /dev/null +++ b/server/osa/domain/semantics/query/list_schemas.py @@ -0,0 +1,42 @@ +from datetime import datetime + +from pydantic import BaseModel + +from osa.domain.semantics.service.schema import SchemaService +from osa.domain.shared.authorization.gate import public +from osa.domain.shared.model.srn import SchemaSRN +from osa.domain.shared.query import Query, QueryHandler, Result + + +class ListSchemas(Query): + pass + + +class SchemaSummary(BaseModel): + srn: SchemaSRN + title: str + field_count: int + created_at: datetime + + +class SchemaList(Result): + items: list[SchemaSummary] + + +class ListSchemasHandler(QueryHandler[ListSchemas, SchemaList]): + __auth__ = public() + schema_service: SchemaService + + async def run(self, cmd: ListSchemas) -> SchemaList: + schemas = await self.schema_service.list_schemas() + return SchemaList( + items=[ + SchemaSummary( + srn=s.srn, + title=s.title, + field_count=len(s.fields), + created_at=s.created_at, + ) + for s in schemas + ] + ) diff --git a/server/osa/domain/semantics/service/__init__.py b/server/osa/domain/semantics/service/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/osa/domain/semantics/service/ontology.py b/server/osa/domain/semantics/service/ontology.py new file mode 100644 index 0000000..de5a6ff --- /dev/null +++ b/server/osa/domain/semantics/service/ontology.py @@ -0,0 +1,63 @@ +from datetime import UTC, datetime +from uuid import uuid4 + +from osa.domain.semantics.model.ontology import Ontology, Term +from osa.domain.semantics.port.ontology_repository import OntologyRepository +from osa.domain.semantics.util.obographs import parse_obographs +from osa.domain.shared.error import NotFoundError +from osa.domain.shared.model.srn import Domain, LocalId, OntologySRN, Semver +from osa.domain.shared.service import Service + + +class OntologyService(Service): + ontology_repo: OntologyRepository + node_domain: Domain + + async def import_from_obographs( + self, + data: dict, + *, + version_override: str | None = None, + ) -> Ontology: + """Parse OBO Graphs JSON and create an ontology from it.""" + parsed = parse_obographs(data) + version = version_override or parsed.version or "1.0.0" + return await self.create_ontology( + title=parsed.title, + version=version, + terms=parsed.terms, + description=parsed.description, + ) + + async def create_ontology( + self, + title: str, + version: str, + terms: list[Term], + description: str | None = None, + ) -> Ontology: + srn = OntologySRN( + domain=self.node_domain, + id=LocalId(str(uuid4())[:20]), + version=Semver.from_string(version), + ) + ontology = Ontology( + srn=srn, + title=title, + description=description, + terms=terms, + created_at=datetime.now(UTC), + ) + await self.ontology_repo.save(ontology) + return ontology + + async def get_ontology(self, srn: OntologySRN) -> Ontology: + ontology = await self.ontology_repo.get(srn) + if ontology is None: + raise NotFoundError(f"Ontology not found: {srn}") + return ontology + + async def list_ontologies( + self, *, limit: int | None = None, offset: int | None = None + ) -> list[Ontology]: + return await self.ontology_repo.list(limit=limit, offset=offset) diff --git a/server/osa/domain/semantics/service/schema.py b/server/osa/domain/semantics/service/schema.py new file mode 100644 index 0000000..3127488 --- /dev/null +++ b/server/osa/domain/semantics/service/schema.py @@ -0,0 +1,61 @@ +from datetime import UTC, datetime +from uuid import uuid4 + +from osa.domain.semantics.model.schema import Schema +from osa.domain.semantics.model.value import FieldDefinition, FieldType, TermConstraints +from osa.domain.semantics.port.ontology_repository import OntologyRepository +from osa.domain.semantics.port.schema_repository import SchemaRepository +from osa.domain.shared.error import NotFoundError, ValidationError +from osa.domain.shared.model.srn import Domain, LocalId, SchemaSRN, Semver +from osa.domain.shared.service import Service + + +class SchemaService(Service): + schema_repo: SchemaRepository + ontology_repo: OntologyRepository + node_domain: Domain + + async def create_schema( + self, + title: str, + version: str, + fields: list[FieldDefinition], + ) -> Schema: + # Validate ontology references + for field in fields: + if ( + field.type == FieldType.TERM + and field.constraints is not None + and isinstance(field.constraints, TermConstraints) + ): + exists = await self.ontology_repo.exists(field.constraints.ontology_srn) + if not exists: + raise ValidationError( + f"Ontology '{field.constraints.ontology_srn}' not found " + f"(referenced by field '{field.name}')" + ) + + srn = SchemaSRN( + domain=self.node_domain, + id=LocalId(str(uuid4())[:20]), + version=Semver.from_string(version), + ) + schema = Schema( + srn=srn, + title=title, + fields=fields, + created_at=datetime.now(UTC), + ) + await self.schema_repo.save(schema) + return schema + + async def get_schema(self, srn: SchemaSRN) -> Schema: + schema = await self.schema_repo.get(srn) + if schema is None: + raise NotFoundError(f"Schema not found: {srn}") + return schema + + async def list_schemas( + self, *, limit: int | None = None, offset: int | None = None + ) -> list[Schema]: + return await self.schema_repo.list(limit=limit, offset=offset) diff --git a/server/osa/domain/semantics/util/__init__.py b/server/osa/domain/semantics/util/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/osa/domain/semantics/util/di/__init__.py b/server/osa/domain/semantics/util/di/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/osa/domain/semantics/util/di/provider.py b/server/osa/domain/semantics/util/di/provider.py new file mode 100644 index 0000000..bc93573 --- /dev/null +++ b/server/osa/domain/semantics/util/di/provider.py @@ -0,0 +1,53 @@ +from dishka import provide + +from osa.config import Config +from osa.domain.semantics.command.create_ontology import CreateOntologyHandler +from osa.domain.semantics.command.create_schema import CreateSchemaHandler +from osa.domain.semantics.command.import_ontology import ImportOntologyHandler +from osa.domain.semantics.port.ontology_repository import OntologyRepository +from osa.domain.semantics.port.schema_repository import SchemaRepository +from osa.domain.semantics.query.get_ontology import GetOntologyHandler +from osa.domain.semantics.query.get_schema import GetSchemaHandler +from osa.domain.semantics.query.list_ontologies import ListOntologiesHandler +from osa.domain.semantics.query.list_schemas import ListSchemasHandler +from osa.domain.semantics.service.ontology import OntologyService +from osa.domain.semantics.service.schema import SchemaService +from osa.domain.shared.model.srn import Domain +from osa.util.di.base import Provider +from osa.util.di.scope import Scope + + +class SemanticsProvider(Provider): + # Services + @provide(scope=Scope.UOW) + def get_ontology_service( + self, ontology_repo: OntologyRepository, config: Config + ) -> OntologyService: + return OntologyService( + ontology_repo=ontology_repo, + node_domain=Domain(config.server.domain), + ) + + @provide(scope=Scope.UOW) + def get_schema_service( + self, + schema_repo: SchemaRepository, + ontology_repo: OntologyRepository, + config: Config, + ) -> SchemaService: + return SchemaService( + schema_repo=schema_repo, + ontology_repo=ontology_repo, + node_domain=Domain(config.server.domain), + ) + + # Command Handlers + create_ontology_handler = provide(CreateOntologyHandler, scope=Scope.UOW) + create_schema_handler = provide(CreateSchemaHandler, scope=Scope.UOW) + import_ontology_handler = provide(ImportOntologyHandler, scope=Scope.UOW) + + # Query Handlers + get_ontology_handler = provide(GetOntologyHandler, scope=Scope.UOW) + get_schema_handler = provide(GetSchemaHandler, scope=Scope.UOW) + list_ontologies_handler = provide(ListOntologiesHandler, scope=Scope.UOW) + list_schemas_handler = provide(ListSchemasHandler, scope=Scope.UOW) diff --git a/server/osa/domain/semantics/util/obographs.py b/server/osa/domain/semantics/util/obographs.py new file mode 100644 index 0000000..362ddce --- /dev/null +++ b/server/osa/domain/semantics/util/obographs.py @@ -0,0 +1,91 @@ +"""Pure parser for OBO Graphs JSON format. + +Converts OBO Graphs JSON (used by OBO Foundry ontologies like GO, PATO, etc.) +into OSA domain Term objects. + +Spec: https://github.com/geneontology/obographs +""" + +from collections import defaultdict +from dataclasses import dataclass, field + +from osa.domain.semantics.model.ontology import Term + + +@dataclass +class ParsedOntology: + """Result of parsing an OBO Graphs JSON document.""" + + title: str + description: str | None = None + version: str | None = None + terms: list[Term] = field(default_factory=list) + + +def parse_obographs(data: dict) -> ParsedOntology: + """Parse an OBO Graphs JSON dict into a ParsedOntology. + + Args: + data: Parsed JSON dict following the OBO Graphs JSON spec. + + Returns: + ParsedOntology with title, description, version, and Term objects. + + Raises: + ValueError: If the data is missing required keys or contains no CLASS nodes. + """ + graphs = data.get("graphs") + if not graphs: + raise ValueError("OBO Graphs JSON must contain a non-empty 'graphs' key") + + graph = graphs[0] + + # Extract metadata + title = graph.get("lbl") or graph.get("id", "Unknown") + graph_meta = graph.get("meta", {}) + version = graph_meta.get("version") + description_def = graph_meta.get("definition") + description = description_def.get("val") if isinstance(description_def, dict) else None + + # Build parent_ids index from is_a edges + parent_index: dict[str, list[str]] = defaultdict(list) + for edge in graph.get("edges", []): + if edge.get("pred") == "is_a": + parent_index[edge["sub"]].append(edge["obj"]) + + # Convert nodes to Terms + terms: list[Term] = [] + for node in graph.get("nodes", []): + if node.get("type") != "CLASS": + continue + if not node.get("lbl"): + continue + + node_meta = node.get("meta", {}) + + definition_obj = node_meta.get("definition") + definition = definition_obj.get("val") if isinstance(definition_obj, dict) else None + + synonyms = [s["val"] for s in node_meta.get("synonyms", [])] + deprecated = node_meta.get("deprecated", False) + + terms.append( + Term( + term_id=node["id"], + label=node["lbl"], + definition=definition, + synonyms=synonyms, + parent_ids=parent_index.get(node["id"], []), + deprecated=deprecated, + ) + ) + + if not terms: + raise ValueError("OBO Graphs data contains no CLASS nodes with labels") + + return ParsedOntology( + title=title, + description=description, + version=version, + terms=terms, + ) diff --git a/server/osa/domain/shared/model/srn.py b/server/osa/domain/shared/model/srn.py index 2078209..8f6f8c7 100644 --- a/server/osa/domain/shared/model/srn.py +++ b/server/osa/domain/shared/model/srn.py @@ -103,7 +103,8 @@ class ResourceType(str, Enum): rec = "rec" dep = "dep" schema = "schema" - vocab = "vocab" + onto = "onto" + conv = "conv" snap = "snap" evt = "evt" val = "val" @@ -231,8 +232,13 @@ class SchemaSRN(SRN): version: Semver -class VocabSRN(SRN): - type: ResourceType = Field(default=ResourceType.vocab, frozen=True) +class OntologySRN(SRN): + type: ResourceType = Field(default=ResourceType.onto, frozen=True) + version: Semver + + +class ConventionSRN(SRN): + type: ResourceType = Field(default=ResourceType.conv, frozen=True) version: Semver diff --git a/server/osa/domain/shared/model/validator.py b/server/osa/domain/shared/model/validator.py new file mode 100644 index 0000000..50f24ad --- /dev/null +++ b/server/osa/domain/shared/model/validator.py @@ -0,0 +1,8 @@ +from osa.domain.shared.model.value import ValueObject + + +class ValidatorRef(ValueObject): + """Immutable reference to an OCI validator image.""" + + image: str # e.g., ghcr.io/osap/validators/si-units + digest: str # e.g., sha256:def456... diff --git a/server/osa/domain/validation/event/validation_failed.py b/server/osa/domain/validation/event/validation_failed.py new file mode 100644 index 0000000..6874d00 --- /dev/null +++ b/server/osa/domain/validation/event/validation_failed.py @@ -0,0 +1,10 @@ +from osa.domain.shared.event import Event, EventId +from osa.domain.shared.model.srn import DepositionSRN + + +class ValidationFailed(Event): + """Emitted when validation fails for a deposition.""" + + id: EventId + deposition_srn: DepositionSRN + reasons: list[str] diff --git a/server/osa/domain/validation/handler/validate_deposition.py b/server/osa/domain/validation/handler/validate_deposition.py index 5fe172c..2150cf7 100644 --- a/server/osa/domain/validation/handler/validate_deposition.py +++ b/server/osa/domain/validation/handler/validate_deposition.py @@ -9,6 +9,7 @@ from osa.domain.shared.model.srn import Domain, LocalId, ValidationRunSRN from osa.domain.shared.outbox import Outbox from osa.domain.validation.event.validation_completed import ValidationCompleted +from osa.domain.validation.event.validation_failed import ValidationFailed from osa.domain.validation.model import RunStatus logger = logging.getLogger(__name__) @@ -21,7 +22,7 @@ class ValidateDeposition(EventHandler[DepositionSubmittedEvent]): config: Config async def handle(self, event: DepositionSubmittedEvent) -> None: - """Run validators and emit ValidationCompleted.""" + """Run validators and emit ValidationCompleted or ValidationFailed.""" logger.debug(f"Validating deposition: {event.deposition_id}") domain = Domain(self.config.server.domain) @@ -33,27 +34,35 @@ async def handle(self, event: DepositionSubmittedEvent) -> None: ) # Run validators (none configured = instant pass) - # TODO: Load configured validators from config + # TODO: Load configured validators from convention validators: list = [] # Empty for v1 if not validators: logger.debug("No validators configured, instant pass") status = RunStatus.COMPLETED results = [] + failure_reasons: list[str] = [] else: # TODO: Actually run validators status = RunStatus.COMPLETED results = [] + failure_reasons = [] - # Emit ValidationCompleted - completed = ValidationCompleted( - id=EventId(uuid4()), - validation_run_srn=val_run_srn, - deposition_srn=event.deposition_id, - status=status, - results=results, - metadata=event.metadata, # Pass through - ) - - await self.outbox.append(completed) - - logger.debug(f"Validation completed for: {event.deposition_id}") + if status == RunStatus.FAILED: + failed = ValidationFailed( + id=EventId(uuid4()), + deposition_srn=event.deposition_id, + reasons=failure_reasons, + ) + await self.outbox.append(failed) + logger.info(f"Validation failed for: {event.deposition_id}") + else: + completed = ValidationCompleted( + id=EventId(uuid4()), + validation_run_srn=val_run_srn, + deposition_srn=event.deposition_id, + status=status, + results=results, + metadata=event.metadata, + ) + await self.outbox.append(completed) + logger.debug(f"Validation completed for: {event.deposition_id}") diff --git a/server/osa/infrastructure/event/di.py b/server/osa/infrastructure/event/di.py index 6cdc045..2e29b69 100644 --- a/server/osa/infrastructure/event/di.py +++ b/server/osa/infrastructure/event/di.py @@ -7,6 +7,7 @@ from osa.config import Config from osa.domain.curation.handler import AutoApproveCuration +from osa.domain.deposition.handler.return_to_draft import ReturnToDraft from osa.domain.index.handler import FanOutToIndexBackends, KeywordIndexHandler, VectorIndexHandler from osa.domain.record.handler import ConvertDepositionToRecord from osa.domain.shared.event import EventHandler @@ -38,6 +39,8 @@ PullFromSource, # Validation handlers ValidateDeposition, + # Deposition handlers + ReturnToDraft, # Curation handlers AutoApproveCuration, # Record handlers diff --git a/server/osa/infrastructure/http/__init__.py b/server/osa/infrastructure/http/__init__.py new file mode 100644 index 0000000..4ebfa6e --- /dev/null +++ b/server/osa/infrastructure/http/__init__.py @@ -0,0 +1 @@ +"""HTTP infrastructure adapters.""" diff --git a/server/osa/infrastructure/http/di.py b/server/osa/infrastructure/http/di.py new file mode 100644 index 0000000..ae3be15 --- /dev/null +++ b/server/osa/infrastructure/http/di.py @@ -0,0 +1,35 @@ +"""DI provider for HTTP infrastructure.""" + +from typing import NewType + +import httpx +from dishka import provide + +from osa.domain.semantics.port.ontology_fetcher import OntologyFetcher +from osa.infrastructure.http.ontology_fetcher import HttpOntologyFetcher +from osa.util.di.base import Provider +from osa.util.di.scope import Scope + +# Disambiguate from the auth httpx.AsyncClient +OntologyHttpClient = NewType("OntologyHttpClient", httpx.AsyncClient) + +# Longer read timeout for large ontology files (e.g. Gene Ontology) +_ONTOLOGY_TIMEOUT = httpx.Timeout( + connect=5.0, + read=30.0, + write=5.0, + pool=5.0, +) + + +class HttpProvider(Provider): + """DI provider for HTTP fetcher adapters.""" + + @provide(scope=Scope.APP) + def get_ontology_http_client(self) -> OntologyHttpClient: + """Dedicated HTTP client for fetching ontology files.""" + return OntologyHttpClient(httpx.AsyncClient(timeout=_ONTOLOGY_TIMEOUT)) + + @provide(scope=Scope.APP, provides=OntologyFetcher) + def get_ontology_fetcher(self, client: OntologyHttpClient) -> HttpOntologyFetcher: + return HttpOntologyFetcher(client=client) diff --git a/server/osa/infrastructure/http/ontology_fetcher.py b/server/osa/infrastructure/http/ontology_fetcher.py new file mode 100644 index 0000000..832fce1 --- /dev/null +++ b/server/osa/infrastructure/http/ontology_fetcher.py @@ -0,0 +1,17 @@ +"""HTTP adapter for OntologyFetcher port.""" + +import httpx + +from osa.domain.semantics.port.ontology_fetcher import OntologyFetcher + + +class HttpOntologyFetcher(OntologyFetcher): + """Fetches ontology JSON from a URL using httpx.""" + + def __init__(self, client: httpx.AsyncClient) -> None: + self._client = client + + async def fetch_json(self, url: str) -> dict: + response = await self._client.get(url) + response.raise_for_status() + return response.json() diff --git a/server/osa/infrastructure/persistence/adapter/readers.py b/server/osa/infrastructure/persistence/adapter/readers.py new file mode 100644 index 0000000..36d066f --- /dev/null +++ b/server/osa/infrastructure/persistence/adapter/readers.py @@ -0,0 +1,84 @@ +"""Cross-domain reader adapters. + +These implement the deposition domain's read-only ports by querying the +semantics tables directly — no semantics domain code is imported for business logic. +""" + +from sqlalchemy import select +from sqlalchemy.ext.asyncio import AsyncSession + +from osa.domain.deposition.port.ontology_reader import OntologyReader +from osa.domain.deposition.port.schema_reader import SchemaReader +from osa.domain.semantics.model.ontology import Ontology, Term +from osa.domain.semantics.model.schema import Schema +from osa.domain.semantics.model.value import FieldDefinition +from osa.domain.shared.model.srn import OntologySRN, SchemaSRN +from osa.infrastructure.persistence.tables import ( + ontologies_table, + ontology_terms_table, + schemas_table, +) + + +class SchemaReaderAdapter(SchemaReader): + def __init__(self, session: AsyncSession) -> None: + self.session = session + + async def get_schema(self, srn: SchemaSRN) -> Schema | None: + stmt = select(schemas_table).where(schemas_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + row = result.mappings().first() + if not row: + return None + row_dict = dict(row) + fields = [FieldDefinition.model_validate(f) for f in row_dict["fields"]] + return Schema( + srn=SchemaSRN.parse(row_dict["srn"]), + title=row_dict["title"], + fields=fields, + created_at=row_dict["created_at"], + ) + + async def schema_exists(self, srn: SchemaSRN) -> bool: + stmt = select(schemas_table.c.srn).where(schemas_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + return result.first() is not None + + +class OntologyReaderAdapter(OntologyReader): + def __init__(self, session: AsyncSession) -> None: + self.session = session + + async def get_ontology(self, srn: OntologySRN) -> Ontology | None: + stmt = select(ontologies_table).where(ontologies_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + header = result.mappings().first() + if not header: + return None + + header_dict = dict(header) + terms_stmt = select(ontology_terms_table).where( + ontology_terms_table.c.ontology_srn == str(srn) + ) + terms_result = await self.session.execute(terms_stmt) + term_rows = terms_result.mappings().all() + + terms = [ + Term( + term_id=r["term_id"], + label=r["label"], + synonyms=r.get("synonyms", []), + parent_ids=r.get("parent_ids", []), + definition=r.get("definition"), + deprecated=r.get("deprecated", False), + ) + for r in term_rows + ] + + return Ontology( + srn=OntologySRN.parse(header_dict["srn"]), + title=header_dict["title"], + description=header_dict.get("description"), + terms=terms, + created_at=header_dict["created_at"], + ) diff --git a/server/osa/infrastructure/persistence/adapter/spreadsheet.py b/server/osa/infrastructure/persistence/adapter/spreadsheet.py new file mode 100644 index 0000000..400c832 --- /dev/null +++ b/server/osa/infrastructure/persistence/adapter/spreadsheet.py @@ -0,0 +1,134 @@ +"""Openpyxl-based spreadsheet adapter for template generation and parsing.""" + +from io import BytesIO +from typing import Any + +from openpyxl import Workbook, load_workbook +from openpyxl.styles import Font, PatternFill +from openpyxl.worksheet.datavalidation import DataValidation + +from osa.domain.deposition.port.spreadsheet import ( + SpreadsheetError, + SpreadsheetParseResult, + SpreadsheetPort, +) +from osa.domain.semantics.model.schema import Schema +from osa.domain.semantics.model.value import FieldDefinition, FieldType + +# Ontologies with <=20 terms get dropdown validation; others get an instruction note. +_MAX_DROPDOWN_TERMS = 20 +_REQUIRED_FILL = PatternFill(start_color="FFFFEE", end_color="FFFFEE", fill_type="solid") +_REQUIRED_FONT = Font(bold=True) +_DESC_FONT = Font(italic=True, color="888888") + + +class OpenpyxlSpreadsheetAdapter(SpreadsheetPort): + def generate_template( + self, + schema: Schema, + ontology_terms_by_srn: dict[str, list[str]], + ) -> bytes: + wb = Workbook() + ws = wb.active + ws.title = "Data" + + for col_idx, field in enumerate(schema.fields, 1): + # Row 1: headers + cell = ws.cell(row=1, column=col_idx, value=field.name) + if field.required: + cell.font = _REQUIRED_FONT + cell.fill = _REQUIRED_FILL + + # Row 2: descriptions + desc_cell = ws.cell(row=2, column=col_idx, value=field.description or "") + desc_cell.font = _DESC_FONT + + # Add dropdown for term fields with small ontologies + if field.type == FieldType.TERM and field.constraints: + onto_srn_str = str(field.constraints.ontology_srn) + terms = ontology_terms_by_srn.get(onto_srn_str, []) + if terms and len(terms) <= _MAX_DROPDOWN_TERMS: + formula = '"' + ",".join(terms) + '"' + dv = DataValidation( + type="list", formula1=formula, allow_blank=not field.required + ) + dv.sqref = f"{ws.cell(row=3, column=col_idx).coordinate}:{ws.cell(row=1000, column=col_idx).coordinate}" + ws.add_data_validation(dv) + elif terms: + # Too many terms — add an instruction note in description + ws.cell( + row=2, + column=col_idx, + value=f"Select from ontology {onto_srn_str} ({len(terms)} terms)", + ) + + # Auto-size columns + for col_idx in range(1, len(schema.fields) + 1): + ws.column_dimensions[ws.cell(row=1, column=col_idx).column_letter].width = 20 + + buf = BytesIO() + wb.save(buf) + return buf.getvalue() + + def parse_upload( + self, + schema: Schema, + content: bytes, + ) -> SpreadsheetParseResult: + wb = load_workbook(BytesIO(content), data_only=True) + ws = wb.active + + errors: list[SpreadsheetError] = [] + warnings: list[str] = [] + metadata: dict[str, Any] = {} + + # Read headers from row 1 + headers: list[str | None] = [] + for col_idx in range(1, ws.max_column + 1): + val = ws.cell(row=1, column=col_idx).value + headers.append(str(val) if val is not None else None) + + # Build field lookup + field_by_name: dict[str, FieldDefinition] = {f.name: f for f in schema.fields} + + # Check for missing required columns + present_names = {h for h in headers if h is not None} + for field in schema.fields: + if field.required and field.name not in present_names: + errors.append( + SpreadsheetError( + field=field.name, + message=f"Required column '{field.name}' is missing", + ) + ) + + # Warn about unrecognized columns + for h in headers: + if h is not None and h not in field_by_name: + warnings.append(f"Unrecognized column '{h}' will be ignored") + + # Parse data row (row 3 — row 2 is descriptions) + data_row = 3 + for col_idx, header in enumerate(headers, 1): + if header is None or header not in field_by_name: + continue + field = field_by_name[header] + value = ws.cell(row=data_row, column=col_idx).value + + if value is None or (isinstance(value, str) and value.strip() == ""): + if field.required: + errors.append( + SpreadsheetError( + field=field.name, + message=f"Required field '{field.name}' is empty", + ) + ) + continue + + metadata[field.name] = value + + return SpreadsheetParseResult( + metadata=metadata, + warnings=warnings, + errors=errors, + ) diff --git a/server/osa/infrastructure/persistence/adapter/storage.py b/server/osa/infrastructure/persistence/adapter/storage.py index db043e5..003e8a6 100644 --- a/server/osa/infrastructure/persistence/adapter/storage.py +++ b/server/osa/infrastructure/persistence/adapter/storage.py @@ -1,44 +1,86 @@ +import hashlib import shutil +import tempfile +from collections.abc import AsyncIterator +from datetime import UTC, datetime from pathlib import Path -from typing import Any -from osa.domain.deposition.port.storage import StoragePort +from osa.domain.deposition.model.value import DepositionFile +from osa.domain.deposition.port.storage import FileStoragePort from osa.domain.shared.model.srn import DepositionSRN -class LocalStorageAdapter(StoragePort): - """Local filesystem implementation of StoragePort.""" +class LocalFileStorageAdapter(FileStoragePort): + """Local filesystem implementation of FileStoragePort.""" - def __init__(self, base_path: str = "/tmp/osa_storage"): + def __init__(self, base_path: str) -> None: self.base_path = Path(base_path) self.base_path.mkdir(parents=True, exist_ok=True) - def _get_dep_path(self, deposition_id: DepositionSRN) -> Path: - # Structure: /tmp/osa_storage/{domain}/{id}/ - # Using safe string representation + def _dep_dir(self, deposition_id: DepositionSRN) -> Path: safe_id = f"{deposition_id.domain.root}_{deposition_id.id.root}" - return self.base_path / safe_id - - def delete_files_for_deposition(self, deposition_id: DepositionSRN) -> None: - target_dir = self._get_dep_path(deposition_id) - if target_dir.exists(): - shutil.rmtree(target_dir) - - def save_file(self, deposition_id: DepositionSRN, filename: str, stream: Any) -> None: - # Helper for UploadFile command - target_dir = self._get_dep_path(deposition_id) - target_dir.mkdir(parents=True, exist_ok=True) - - target_file = target_dir / filename - - # Assuming stream is file-like open in binary mode - # If stream is from httpx/spooledtempfile - with open(target_file, "wb") as f: - # If stream supports read/write - if hasattr(stream, "read"): - if hasattr(stream, "seek"): - stream.seek(0) - shutil.copyfileobj(stream, f) - else: - # Fallback if just bytes - f.write(stream) + return self.base_path / "depositions" / safe_id + + async def save_file( + self, + deposition_id: DepositionSRN, + filename: str, + content: bytes, + size: int, + ) -> DepositionFile: + dep_dir = self._dep_dir(deposition_id) + dep_dir.mkdir(parents=True, exist_ok=True) + target = dep_dir / filename + + # Atomic write: write to temp file then rename + fd, tmp_path = tempfile.mkstemp(dir=dep_dir) + try: + with open(fd, "wb") as f: + f.write(content) + Path(tmp_path).rename(target) + except Exception: + Path(tmp_path).unlink(missing_ok=True) + raise + + checksum = hashlib.sha256(content).hexdigest() + return DepositionFile( + name=filename, + size=size, + checksum=f"sha256:{checksum}", + content_type=None, + uploaded_at=datetime.now(UTC), + ) + + async def get_file( + self, + deposition_id: DepositionSRN, + filename: str, + ) -> AsyncIterator[bytes]: + target = self._dep_dir(deposition_id) / filename + if not target.exists(): + from osa.domain.shared.error import NotFoundError + + raise NotFoundError(f"File not found: {filename}") + + async def _stream() -> AsyncIterator[bytes]: + with open(target, "rb") as f: + while chunk := f.read(8192): + yield chunk + + return _stream() + + async def delete_file( + self, + deposition_id: DepositionSRN, + filename: str, + ) -> None: + target = self._dep_dir(deposition_id) / filename + target.unlink(missing_ok=True) + + async def delete_files_for_deposition( + self, + deposition_id: DepositionSRN, + ) -> None: + dep_dir = self._dep_dir(deposition_id) + if dep_dir.exists(): + shutil.rmtree(dep_dir) diff --git a/server/osa/infrastructure/persistence/di.py b/server/osa/infrastructure/persistence/di.py index 710c074..2b5e5d4 100644 --- a/server/osa/infrastructure/persistence/di.py +++ b/server/osa/infrastructure/persistence/di.py @@ -3,27 +3,48 @@ from dishka import provide from sqlalchemy.ext.asyncio import AsyncEngine, AsyncSession, async_sessionmaker +from osa.cli.util.paths import OSAPaths from osa.config import Config +from osa.domain.deposition.port.convention_repository import ConventionRepository +from osa.domain.deposition.port.ontology_reader import OntologyReader from osa.domain.deposition.port.repository import DepositionRepository +from osa.domain.deposition.port.schema_reader import SchemaReader +from osa.domain.deposition.port.storage import FileStoragePort from osa.domain.record.port.repository import RecordRepository from osa.domain.record.service import RecordService +from osa.domain.semantics.port.ontology_repository import OntologyRepository +from osa.domain.semantics.port.schema_repository import SchemaRepository from osa.domain.shared.model.srn import Domain from osa.domain.shared.outbox import Outbox from osa.domain.shared.port.event_repository import EventRepository from osa.domain.validation.port.repository import ValidationRunRepository +from osa.infrastructure.persistence.adapter.readers import ( + OntologyReaderAdapter, + SchemaReaderAdapter, +) +from osa.infrastructure.persistence.adapter.storage import LocalFileStorageAdapter from osa.infrastructure.persistence.database import ( create_db_engine, create_session_factory, ) +from osa.infrastructure.persistence.repository.convention import ( + PostgresConventionRepository, +) from osa.infrastructure.persistence.repository.deposition import ( PostgresDepositionRepository, ) from osa.infrastructure.persistence.repository.event import ( SQLAlchemyEventRepository, ) +from osa.infrastructure.persistence.repository.ontology import ( + PostgresOntologyRepository, +) from osa.infrastructure.persistence.repository.record import ( PostgresRecordRepository, ) +from osa.infrastructure.persistence.repository.schema import ( + PostgresSemanticsSchemaRepository, +) from osa.infrastructure.persistence.repository.validation import ( PostgresValidationRunRepository, ) @@ -59,6 +80,28 @@ async def get_session( ) event_repo = provide(SQLAlchemyEventRepository, scope=Scope.UOW, provides=EventRepository) + # Semantics repositories + ontology_repo = provide( + PostgresOntologyRepository, scope=Scope.UOW, provides=OntologyRepository + ) + schema_repo = provide( + PostgresSemanticsSchemaRepository, scope=Scope.UOW, provides=SchemaRepository + ) + + # Deposition repositories and adapters + convention_repo = provide( + PostgresConventionRepository, scope=Scope.UOW, provides=ConventionRepository + ) + + # Cross-domain readers + schema_reader = provide(SchemaReaderAdapter, scope=Scope.UOW, provides=SchemaReader) + ontology_reader = provide(OntologyReaderAdapter, scope=Scope.UOW, provides=OntologyReader) + + # File storage + @provide(scope=Scope.APP) + def get_file_storage(self, paths: "OSAPaths") -> FileStoragePort: + return LocalFileStorageAdapter(base_path=str(paths.data_dir / "files")) + @provide(scope=Scope.UOW) def get_record_service( self, diff --git a/server/osa/infrastructure/persistence/mappers/deposition.py b/server/osa/infrastructure/persistence/mappers/deposition.py index 78eff36..f47d271 100644 --- a/server/osa/infrastructure/persistence/mappers/deposition.py +++ b/server/osa/infrastructure/persistence/mappers/deposition.py @@ -4,29 +4,26 @@ from osa.domain.auth.model.value import UserId from osa.domain.deposition.model.aggregate import Deposition from osa.domain.deposition.model.value import DepositionFile, DepositionStatus -from osa.domain.shared.model.srn import DepositionSRN, RecordSRN +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN, RecordSRN -def row_to_deposition(row: dict[str, Any]) -> Deposition[dict[str, Any]]: - """Convert database row to Deposition aggregate. - - Note: We assume the metadata is a dict. Since Deposition is generic, - at the persistence boundary we treat it as a dict. - """ +def row_to_deposition(row: dict[str, Any]) -> Deposition: + """Convert database row to Deposition aggregate.""" files_data = row.get("files", []) or [] files = [DepositionFile(**f) for f in files_data] record_id = row.get("record_id") - owner_id_raw = row.get("owner_id") return Deposition( srn=DepositionSRN.parse(row["srn"]), + convention_srn=ConventionSRN.parse(row["convention_srn"]), status=DepositionStatus(row["status"]), metadata=row.get("metadata", {}), files=files, - provenance=row.get("provenance", {}), record_srn=RecordSRN.parse(record_id) if record_id else None, - owner_id=UserId(UUID(owner_id_raw)) if owner_id_raw else None, + owner_id=UserId(UUID(row["owner_id"])), + created_at=row["created_at"], + updated_at=row["updated_at"], ) @@ -34,10 +31,12 @@ def deposition_to_dict(dep: Deposition) -> dict[str, Any]: """Convert Deposition aggregate to database dict.""" return { "srn": str(dep.srn), + "convention_srn": str(dep.convention_srn), "status": dep.status, - "metadata": dep.metadata if isinstance(dep.metadata, dict) else dep.metadata.model_dump(), + "metadata": dep.metadata, "files": [f.model_dump(mode="json") for f in dep.files], - "provenance": dep.provenance, "record_id": str(dep.record_srn) if dep.record_srn else None, - "owner_id": str(dep.owner_id) if dep.owner_id else None, + "owner_id": str(dep.owner_id), + "created_at": dep.created_at, + "updated_at": dep.updated_at, } diff --git a/server/osa/infrastructure/persistence/repository/convention.py b/server/osa/infrastructure/persistence/repository/convention.py new file mode 100644 index 0000000..b98e097 --- /dev/null +++ b/server/osa/infrastructure/persistence/repository/convention.py @@ -0,0 +1,68 @@ +from typing import Any, List + +from sqlalchemy import insert, select +from sqlalchemy.ext.asyncio import AsyncSession + +from osa.domain.deposition.model.convention import Convention +from osa.domain.deposition.model.value import FileRequirements +from osa.domain.deposition.port.convention_repository import ConventionRepository +from osa.domain.shared.model.srn import ConventionSRN, SchemaSRN +from osa.domain.shared.model.validator import ValidatorRef +from osa.infrastructure.persistence.tables import conventions_table + + +def _convention_to_row(convention: Convention) -> dict[str, Any]: + return { + "srn": str(convention.srn), + "title": convention.title, + "description": convention.description, + "schema_srn": str(convention.schema_srn), + "file_requirements": convention.file_requirements.model_dump(), + "validator_refs": [v.model_dump() for v in convention.validator_refs], + "created_at": convention.created_at, + } + + +def _row_to_convention(row: dict[str, Any]) -> Convention: + return Convention( + srn=ConventionSRN.parse(row["srn"]), + title=row["title"], + description=row.get("description"), + schema_srn=SchemaSRN.parse(row["schema_srn"]), + file_requirements=FileRequirements.model_validate(row["file_requirements"]), + validator_refs=[ValidatorRef.model_validate(v) for v in (row.get("validator_refs") or [])], + created_at=row["created_at"], + ) + + +class PostgresConventionRepository(ConventionRepository): + def __init__(self, session: AsyncSession) -> None: + self.session = session + + async def save(self, convention: Convention) -> None: + row = _convention_to_row(convention) + await self.session.execute(insert(conventions_table).values(**row)) + await self.session.flush() + + async def get(self, srn: ConventionSRN) -> Convention | None: + stmt = select(conventions_table).where(conventions_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + row = result.mappings().first() + return _row_to_convention(dict(row)) if row else None + + async def list( + self, *, limit: int | None = None, offset: int | None = None + ) -> List[Convention]: + stmt = select(conventions_table).order_by(conventions_table.c.created_at.desc()) + if offset is not None: + stmt = stmt.offset(offset) + if limit is not None: + stmt = stmt.limit(limit) + + result = await self.session.execute(stmt) + return [_row_to_convention(dict(r)) for r in result.mappings().all()] + + async def exists(self, srn: ConventionSRN) -> bool: + stmt = select(conventions_table.c.srn).where(conventions_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + return result.first() is not None diff --git a/server/osa/infrastructure/persistence/repository/deposition.py b/server/osa/infrastructure/persistence/repository/deposition.py index 5d585fa..41bc17a 100644 --- a/server/osa/infrastructure/persistence/repository/deposition.py +++ b/server/osa/infrastructure/persistence/repository/deposition.py @@ -1,16 +1,21 @@ -from sqlalchemy import insert, select, update +from __future__ import annotations + +from typing import List + +from sqlalchemy import func, insert, select, update from sqlalchemy.ext.asyncio import AsyncSession from osa.domain.auth.model.identity import Identity from osa.domain.auth.model.role import Role +from osa.domain.auth.model.value import UserId from osa.domain.deposition.model.aggregate import Deposition from osa.domain.deposition.port.repository import DepositionRepository from osa.domain.shared.authorization.decorators import reads, writes from osa.domain.shared.authorization.resource import has_role, owner from osa.domain.shared.model.srn import DepositionSRN from osa.infrastructure.persistence.mappers.deposition import ( - row_to_deposition, deposition_to_dict, + row_to_deposition, ) from osa.infrastructure.persistence.tables import depositions_table @@ -49,3 +54,49 @@ async def save(self, deposition: Deposition) -> None: await self.session.execute(stmt) await self.session.flush() + + async def list( + self, *, limit: int | None = None, offset: int | None = None + ) -> List[Deposition]: + stmt = select(depositions_table).order_by(depositions_table.c.created_at.desc()) + if offset is not None: + stmt = stmt.offset(offset) + if limit is not None: + stmt = stmt.limit(limit) + + result = await self.session.execute(stmt) + return [row_to_deposition(dict(r)) for r in result.mappings().all()] + + async def count(self) -> int: + stmt = select(func.count()).select_from(depositions_table) + result = await self.session.execute(stmt) + return result.scalar_one() + + async def count_by_owner(self, owner_id: UserId) -> int: + stmt = ( + select(func.count()) + .select_from(depositions_table) + .where(depositions_table.c.owner_id == str(owner_id)) + ) + result = await self.session.execute(stmt) + return result.scalar_one() + + async def list_by_owner( + self, + owner_id: UserId, + *, + limit: int | None = None, + offset: int | None = None, + ) -> List[Deposition]: + stmt = ( + select(depositions_table) + .where(depositions_table.c.owner_id == str(owner_id)) + .order_by(depositions_table.c.created_at.desc()) + ) + if offset is not None: + stmt = stmt.offset(offset) + if limit is not None: + stmt = stmt.limit(limit) + + result = await self.session.execute(stmt) + return [row_to_deposition(dict(r)) for r in result.mappings().all()] diff --git a/server/osa/infrastructure/persistence/repository/ontology.py b/server/osa/infrastructure/persistence/repository/ontology.py new file mode 100644 index 0000000..4f2ecfe --- /dev/null +++ b/server/osa/infrastructure/persistence/repository/ontology.py @@ -0,0 +1,109 @@ +from typing import Any, List +from uuid import uuid4 + +from sqlalchemy import insert, select +from sqlalchemy.ext.asyncio import AsyncSession + +from osa.domain.semantics.model.ontology import Ontology, Term +from osa.domain.semantics.port.ontology_repository import OntologyRepository +from osa.domain.shared.model.srn import OntologySRN +from osa.infrastructure.persistence.tables import ontologies_table, ontology_terms_table + + +def _ontology_to_rows(ontology: Ontology) -> tuple[dict[str, Any], list[dict[str, Any]]]: + """Convert Ontology aggregate to table rows.""" + header = { + "srn": str(ontology.srn), + "title": ontology.title, + "description": ontology.description, + "created_at": ontology.created_at, + } + terms = [ + { + "id": str(uuid4()), + "ontology_srn": str(ontology.srn), + "term_id": t.term_id, + "label": t.label, + "synonyms": t.synonyms, + "parent_ids": t.parent_ids, + "definition": t.definition, + "deprecated": t.deprecated, + } + for t in ontology.terms + ] + return header, terms + + +def _rows_to_ontology(header: dict[str, Any], term_rows: list[dict[str, Any]]) -> Ontology: + """Convert table rows back to Ontology aggregate.""" + terms = [ + Term( + term_id=r["term_id"], + label=r["label"], + synonyms=r.get("synonyms", []), + parent_ids=r.get("parent_ids", []), + definition=r.get("definition"), + deprecated=r.get("deprecated", False), + ) + for r in term_rows + ] + return Ontology( + srn=OntologySRN.parse(header["srn"]), + title=header["title"], + description=header.get("description"), + terms=terms, + created_at=header["created_at"], + ) + + +class PostgresOntologyRepository(OntologyRepository): + def __init__(self, session: AsyncSession) -> None: + self.session = session + + async def save(self, ontology: Ontology) -> None: + header, terms = _ontology_to_rows(ontology) + await self.session.execute(insert(ontologies_table).values(**header)) + if terms: + await self.session.execute(insert(ontology_terms_table).values(terms)) + await self.session.flush() + + async def get(self, srn: OntologySRN) -> Ontology | None: + stmt = select(ontologies_table).where(ontologies_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + header = result.mappings().first() + if not header: + return None + + terms_stmt = select(ontology_terms_table).where( + ontology_terms_table.c.ontology_srn == str(srn) + ) + terms_result = await self.session.execute(terms_stmt) + term_rows = [dict(r) for r in terms_result.mappings().all()] + + return _rows_to_ontology(dict(header), term_rows) + + async def list(self, *, limit: int | None = None, offset: int | None = None) -> List[Ontology]: + stmt = select(ontologies_table).order_by(ontologies_table.c.created_at.desc()) + if offset is not None: + stmt = stmt.offset(offset) + if limit is not None: + stmt = stmt.limit(limit) + + result = await self.session.execute(stmt) + headers = [dict(r) for r in result.mappings().all()] + + ontologies = [] + for header in headers: + terms_stmt = select(ontology_terms_table).where( + ontology_terms_table.c.ontology_srn == header["srn"] + ) + terms_result = await self.session.execute(terms_stmt) + term_rows = [dict(r) for r in terms_result.mappings().all()] + ontologies.append(_rows_to_ontology(header, term_rows)) + + return ontologies + + async def exists(self, srn: OntologySRN) -> bool: + stmt = select(ontologies_table.c.srn).where(ontologies_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + return result.first() is not None diff --git a/server/osa/infrastructure/persistence/repository/schema.py b/server/osa/infrastructure/persistence/repository/schema.py new file mode 100644 index 0000000..135fc10 --- /dev/null +++ b/server/osa/infrastructure/persistence/repository/schema.py @@ -0,0 +1,60 @@ +from typing import Any, List + +from sqlalchemy import insert, select +from sqlalchemy.ext.asyncio import AsyncSession + +from osa.domain.semantics.model.schema import Schema +from osa.domain.semantics.model.value import FieldDefinition +from osa.domain.semantics.port.schema_repository import SchemaRepository +from osa.domain.shared.model.srn import SchemaSRN +from osa.infrastructure.persistence.tables import schemas_table + + +def _schema_to_row(schema: Schema) -> dict[str, Any]: + return { + "srn": str(schema.srn), + "title": schema.title, + "fields": [f.model_dump(mode="json") for f in schema.fields], + "created_at": schema.created_at, + } + + +def _row_to_schema(row: dict[str, Any]) -> Schema: + fields = [FieldDefinition.model_validate(f) for f in row["fields"]] + return Schema( + srn=SchemaSRN.parse(row["srn"]), + title=row["title"], + fields=fields, + created_at=row["created_at"], + ) + + +class PostgresSemanticsSchemaRepository(SchemaRepository): + def __init__(self, session: AsyncSession) -> None: + self.session = session + + async def save(self, schema: Schema) -> None: + row = _schema_to_row(schema) + await self.session.execute(insert(schemas_table).values(**row)) + await self.session.flush() + + async def get(self, srn: SchemaSRN) -> Schema | None: + stmt = select(schemas_table).where(schemas_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + row = result.mappings().first() + return _row_to_schema(dict(row)) if row else None + + async def list(self, *, limit: int | None = None, offset: int | None = None) -> List[Schema]: + stmt = select(schemas_table).order_by(schemas_table.c.created_at.desc()) + if offset is not None: + stmt = stmt.offset(offset) + if limit is not None: + stmt = stmt.limit(limit) + + result = await self.session.execute(stmt) + return [_row_to_schema(dict(r)) for r in result.mappings().all()] + + async def exists(self, srn: SchemaSRN) -> bool: + stmt = select(schemas_table.c.srn).where(schemas_table.c.srn == str(srn)) + result = await self.session.execute(stmt) + return result.first() is not None diff --git a/server/osa/infrastructure/persistence/tables.py b/server/osa/infrastructure/persistence/tables.py index 240ca7a..3aed090 100644 --- a/server/osa/infrastructure/persistence/tables.py +++ b/server/osa/infrastructure/persistence/tables.py @@ -1,6 +1,7 @@ """SQLAlchemy table definitions - dialect-agnostic (works with SQLite and PostgreSQL).""" from sqlalchemy import ( + Boolean, Column, DateTime, ForeignKey, @@ -25,12 +26,12 @@ "depositions", metadata, Column("srn", String, primary_key=True), + Column("convention_srn", String, nullable=False), # Convention submitted against Column("status", String(32), nullable=False), # DepositionStatus as string Column("metadata", JSON, nullable=False), - Column("provenance", JSON, nullable=False), Column("files", JSON, nullable=False), Column("record_id", String, nullable=True), - Column("owner_id", String, ForeignKey("users.id"), nullable=True), + Column("owner_id", String, ForeignKey("users.id"), nullable=False), Column("created_at", DateTime(timezone=True), nullable=False), Column("updated_at", DateTime(timezone=True), nullable=False), ) @@ -179,6 +180,73 @@ Index("ix_refresh_tokens_family_id", refresh_tokens_table.c.family_id) +# ============================================================================ +# ONTOLOGIES TABLE (Semantics) +# ============================================================================ +ontologies_table = Table( + "ontologies", + metadata, + Column("srn", String, primary_key=True), # Versioned SRN string + Column("title", String(255), nullable=False), + Column("description", Text, nullable=True), + Column("created_at", DateTime(timezone=True), nullable=False), +) + + +# ============================================================================ +# ONTOLOGY TERMS TABLE (Semantics) +# ============================================================================ +ontology_terms_table = Table( + "ontology_terms", + metadata, + Column("id", String, primary_key=True), # UUID as string + Column( + "ontology_srn", + String, + ForeignKey("ontologies.srn", ondelete="CASCADE"), + nullable=False, + ), + Column("term_id", String(255), nullable=False), + Column("label", String(255), nullable=False), + Column("synonyms", JSON, nullable=False, default=[]), + Column("parent_ids", JSON, nullable=False, default=[]), + Column("definition", Text, nullable=True), + Column("deprecated", Boolean, nullable=False, default=False), + UniqueConstraint("ontology_srn", "term_id", name="uq_ontology_term"), +) + +Index("idx_ontology_terms_ontology_srn", ontology_terms_table.c.ontology_srn) + + +# ============================================================================ +# SCHEMAS TABLE (Semantics) +# ============================================================================ +schemas_table = Table( + "schemas", + metadata, + Column("srn", String, primary_key=True), # Versioned SRN string + Column("title", String(255), nullable=False), + Column("fields", JSON, nullable=False), # List of FieldDefinition dicts + Column("created_at", DateTime(timezone=True), nullable=False), +) + + +# ============================================================================ +# CONVENTIONS TABLE (Deposition) +# ============================================================================ +conventions_table = Table( + "conventions", + metadata, + Column("srn", String, primary_key=True), # Versioned SRN string + Column("title", String(255), nullable=False), + Column("description", Text, nullable=True), + Column("schema_srn", String, nullable=False), # Reference to schemas.srn + Column("file_requirements", JSON, nullable=False), # FileRequirements as dict + Column("validator_refs", JSON, nullable=False, default=[]), # List of ValidatorRef dicts + Column("created_at", DateTime(timezone=True), nullable=False), +) + + # ============================================================================ # ROLE ASSIGNMENTS TABLE (Authorization) # ============================================================================ diff --git a/server/pyproject.toml b/server/pyproject.toml index 09609ca..a027c9c 100644 --- a/server/pyproject.toml +++ b/server/pyproject.toml @@ -29,6 +29,8 @@ dependencies = [ "asyncpg>=0.31.0", "psycopg2-binary>=2.9.11", "pyjwt>=2.11.0", + "openpyxl>=3.1.5", + "python-multipart>=0.0.22", ] [project.scripts] diff --git a/server/tests/unit/domain/auth/test_handler_configs.py b/server/tests/unit/domain/auth/test_handler_configs.py index 57ce28d..5be7715 100644 --- a/server/tests/unit/domain/auth/test_handler_configs.py +++ b/server/tests/unit/domain/auth/test_handler_configs.py @@ -41,22 +41,31 @@ def _make_principal( class TestCreateDepositionHandlerAuth: @pytest.mark.asyncio async def test_create_deposition_allows_depositor(self) -> None: + from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN + depositor = _make_principal(frozenset({Role.DEPOSITOR})) service = AsyncMock() + mock_dep = MagicMock() + mock_dep.srn = DepositionSRN.parse("urn:osa:localhost:dep:test-dep") + service.create.return_value = mock_dep handler = CreateDepositionHandler( principal=depositor, deposition_service=service, ) - result = await handler.run(CreateDeposition()) + conv_srn = ConventionSRN.parse("urn:osa:localhost:conv:test@1.0.0") + result = await handler.run(CreateDeposition(convention_srn=conv_srn)) assert result.srn is not None @pytest.mark.asyncio async def test_create_deposition_rejects_unauthenticated(self) -> None: + from osa.domain.shared.model.srn import ConventionSRN + handler = CreateDepositionHandler.__new__(CreateDepositionHandler) with pytest.raises(AuthorizationError) as exc_info: - await handler.run(CreateDeposition()) + conv_srn = ConventionSRN.parse("urn:osa:localhost:conv:test@1.0.0") + await handler.run(CreateDeposition(convention_srn=conv_srn)) assert exc_info.value.code == "missing_token" @@ -101,6 +110,114 @@ async def test_assign_role_rejects_admin(self) -> None: assert exc_info.value.code == "access_denied" +class TestListDepositionsHandlerAuth: + @pytest.mark.asyncio + async def test_list_depositions_allows_depositor(self) -> None: + from osa.domain.deposition.query.list_depositions import ( + DepositionList, + ListDepositions, + ListDepositionsHandler, + ) + + depositor = _make_principal(frozenset({Role.DEPOSITOR})) + service = AsyncMock() + service.list_depositions.return_value = ([], 0) + handler = ListDepositionsHandler( + principal=depositor, + deposition_service=service, + ) + + result = await handler.run(ListDepositions()) + assert isinstance(result, DepositionList) + assert result.items == [] + assert result.total == 0 + # Depositor sees own depositions only + service.list_depositions.assert_called_once_with(depositor.user_id) + + @pytest.mark.asyncio + async def test_list_depositions_curator_sees_all(self) -> None: + from osa.domain.deposition.query.list_depositions import ( + DepositionList, + ListDepositions, + ListDepositionsHandler, + ) + + curator = _make_principal(frozenset({Role.CURATOR})) + service = AsyncMock() + service.list_depositions.return_value = ([], 0) + handler = ListDepositionsHandler( + principal=curator, + deposition_service=service, + ) + + result = await handler.run(ListDepositions()) + assert isinstance(result, DepositionList) + # Curator sees all depositions (owner_id=None) + service.list_depositions.assert_called_once_with(None) + + @pytest.mark.asyncio + async def test_list_depositions_rejects_unauthenticated(self) -> None: + from osa.domain.deposition.query.list_depositions import ( + ListDepositions, + ListDepositionsHandler, + ) + + handler = ListDepositionsHandler.__new__(ListDepositionsHandler) + + with pytest.raises(AuthorizationError) as exc_info: + await handler.run(ListDepositions()) + assert exc_info.value.code == "missing_token" + + +class TestDownloadFileHandlerAuth: + @pytest.mark.asyncio + async def test_download_file_allows_depositor(self) -> None: + from datetime import UTC, datetime + + from osa.domain.deposition.model.value import DepositionFile + from osa.domain.deposition.query.download_file import ( + DownloadFile, + DownloadFileHandler, + ) + from osa.domain.shared.model.srn import DepositionSRN + + depositor = _make_principal(frozenset({Role.DEPOSITOR})) + service = AsyncMock() + file_meta = DepositionFile( + name="data.csv", size=100, checksum="abc", uploaded_at=datetime.now(UTC) + ) + + async def _fake_stream(): + yield b"data" + + service.get_file_download.return_value = (_fake_stream(), file_meta) + + handler = DownloadFileHandler( + principal=depositor, + deposition_service=service, + ) + + srn = DepositionSRN.parse("urn:osa:localhost:dep:test-dep") + result = await handler.run(DownloadFile(srn=srn, filename="data.csv")) + assert result.filename == "data.csv" + assert result.size == 100 + + @pytest.mark.asyncio + async def test_download_file_rejects_unauthenticated(self) -> None: + from osa.domain.deposition.query.download_file import ( + DownloadFile, + DownloadFileHandler, + ) + from osa.domain.shared.model.srn import DepositionSRN + + handler = DownloadFileHandler.__new__(DownloadFileHandler) + + with pytest.raises(AuthorizationError) as exc_info: + srn = DepositionSRN.parse("urn:osa:localhost:dep:test-dep") + await handler.run(DownloadFile(srn=srn, filename="data.csv")) + assert exc_info.value.code == "missing_token" + + class TestInitiateLoginHandlerAuth: @pytest.mark.asyncio async def test_public_login_handler_works_without_principal(self) -> None: diff --git a/server/tests/unit/domain/deposition/test_convention.py b/server/tests/unit/domain/deposition/test_convention.py new file mode 100644 index 0000000..8141ed7 --- /dev/null +++ b/server/tests/unit/domain/deposition/test_convention.py @@ -0,0 +1,72 @@ +"""Unit tests for Convention aggregate.""" + +from datetime import UTC, datetime + +from osa.domain.deposition.model.convention import Convention +from osa.domain.deposition.model.value import FileRequirements +from osa.domain.shared.model.srn import ConventionSRN, SchemaSRN + + +def _make_conv_srn(id: str = "test-conv", version: str = "1.0.0") -> ConventionSRN: + return ConventionSRN.parse(f"urn:osa:localhost:conv:{id}@{version}") + + +def _make_schema_srn(id: str = "test-schema", version: str = "1.0.0") -> SchemaSRN: + return SchemaSRN.parse(f"urn:osa:localhost:schema:{id}@{version}") + + +def _make_file_reqs() -> FileRequirements: + return FileRequirements( + accepted_types=[".csv", ".h5ad"], + min_count=1, + max_count=5, + max_file_size=5_368_709_120, + ) + + +class TestConventionCreation: + def test_create_with_required_fields(self): + conv = Convention( + srn=_make_conv_srn(), + title="scRNA-seq Submission", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + created_at=datetime.now(UTC), + ) + assert conv.title == "scRNA-seq Submission" + assert conv.schema_srn == _make_schema_srn() + assert conv.file_requirements.max_count == 5 + + def test_create_with_description(self): + conv = Convention( + srn=_make_conv_srn(), + title="Test", + description="A test convention", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + created_at=datetime.now(UTC), + ) + assert conv.description == "A test convention" + + def test_create_with_empty_validators(self): + conv = Convention( + srn=_make_conv_srn(), + title="Test", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + validator_refs=[], + created_at=datetime.now(UTC), + ) + assert conv.validator_refs == [] + + +class TestConventionImmutability: + def test_srn_is_versioned(self): + conv = Convention( + srn=_make_conv_srn("my-conv", "2.0.0"), + title="Test", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + created_at=datetime.now(UTC), + ) + assert str(conv.srn) == "urn:osa:localhost:conv:my-conv@2.0.0" diff --git a/server/tests/unit/domain/deposition/test_convention_service.py b/server/tests/unit/domain/deposition/test_convention_service.py new file mode 100644 index 0000000..a0d2345 --- /dev/null +++ b/server/tests/unit/domain/deposition/test_convention_service.py @@ -0,0 +1,149 @@ +"""Unit tests for ConventionService.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock + +import pytest + +from osa.domain.deposition.model.convention import Convention +from osa.domain.deposition.model.value import FileRequirements +from osa.domain.deposition.service.convention import ConventionService +from osa.domain.shared.error import NotFoundError, ValidationError +from osa.domain.shared.model.srn import ConventionSRN, Domain, SchemaSRN + + +def _make_conv_srn(id: str = "test-conv", version: str = "1.0.0") -> ConventionSRN: + return ConventionSRN.parse(f"urn:osa:localhost:conv:{id}@{version}") + + +def _make_schema_srn(id: str = "test-schema", version: str = "1.0.0") -> SchemaSRN: + return SchemaSRN.parse(f"urn:osa:localhost:schema:{id}@{version}") + + +def _make_file_reqs() -> FileRequirements: + return FileRequirements( + accepted_types=[".csv"], + min_count=1, + max_count=3, + max_file_size=1_000_000, + ) + + +class TestConventionServiceCreate: + @pytest.mark.asyncio + async def test_create_convention_with_valid_schema(self): + conv_repo = AsyncMock() + schema_reader = AsyncMock() + schema_reader.schema_exists.return_value = True + + service = ConventionService( + convention_repo=conv_repo, + schema_reader=schema_reader, + node_domain=Domain("localhost"), + ) + result = await service.create_convention( + title="Test Convention", + version="1.0.0", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + ) + assert result.title == "Test Convention" + conv_repo.save.assert_called_once() + + @pytest.mark.asyncio + async def test_create_convention_rejects_invalid_schema(self): + conv_repo = AsyncMock() + schema_reader = AsyncMock() + schema_reader.schema_exists.return_value = False + + service = ConventionService( + convention_repo=conv_repo, + schema_reader=schema_reader, + node_domain=Domain("localhost"), + ) + with pytest.raises(ValidationError, match="Schema.*not found"): + await service.create_convention( + title="Bad", + version="1.0.0", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + ) + + @pytest.mark.asyncio + async def test_create_convention_generates_srn(self): + conv_repo = AsyncMock() + schema_reader = AsyncMock() + schema_reader.schema_exists.return_value = True + + service = ConventionService( + convention_repo=conv_repo, + schema_reader=schema_reader, + node_domain=Domain("localhost"), + ) + result = await service.create_convention( + title="Test", + version="1.0.0", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + ) + assert str(result.srn).startswith("urn:osa:localhost:conv:") + + +class TestConventionServiceGet: + @pytest.mark.asyncio + async def test_get_existing(self): + conv = Convention( + srn=_make_conv_srn(), + title="Test", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + created_at=datetime.now(UTC), + ) + conv_repo = AsyncMock() + conv_repo.get.return_value = conv + schema_reader = AsyncMock() + + service = ConventionService( + convention_repo=conv_repo, + schema_reader=schema_reader, + node_domain=Domain("localhost"), + ) + result = await service.get_convention(conv.srn) + assert result == conv + + @pytest.mark.asyncio + async def test_get_nonexistent_raises(self): + conv_repo = AsyncMock() + conv_repo.get.return_value = None + schema_reader = AsyncMock() + + service = ConventionService( + convention_repo=conv_repo, + schema_reader=schema_reader, + node_domain=Domain("localhost"), + ) + with pytest.raises(NotFoundError): + await service.get_convention(_make_conv_srn()) + + +class TestConventionServiceList: + @pytest.mark.asyncio + async def test_list_conventions(self): + conv = Convention( + srn=_make_conv_srn(), + title="Test", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + created_at=datetime.now(UTC), + ) + conv_repo = AsyncMock() + conv_repo.list.return_value = [conv] + schema_reader = AsyncMock() + + service = ConventionService( + convention_repo=conv_repo, + schema_reader=schema_reader, + node_domain=Domain("localhost"), + ) + result = await service.list_conventions() + assert len(result) == 1 diff --git a/server/tests/unit/domain/deposition/test_deposition.py b/server/tests/unit/domain/deposition/test_deposition.py new file mode 100644 index 0000000..a089f40 --- /dev/null +++ b/server/tests/unit/domain/deposition/test_deposition.py @@ -0,0 +1,132 @@ +"""Unit tests for Deposition aggregate.""" + +from datetime import UTC, datetime +from uuid import uuid4 + +import pytest + +from osa.domain.auth.model.value import UserId +from osa.domain.deposition.model.aggregate import Deposition +from osa.domain.deposition.model.value import DepositionFile, DepositionStatus +from osa.domain.shared.error import InvalidStateError +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN + + +def _make_dep_srn(id: str = "test-dep") -> DepositionSRN: + return DepositionSRN.parse(f"urn:osa:localhost:dep:{id}") + + +def _make_conv_srn(id: str = "test-conv", version: str = "1.0.0") -> ConventionSRN: + return ConventionSRN.parse(f"urn:osa:localhost:conv:{id}@{version}") + + +def _make_deposition(**overrides) -> Deposition: + defaults = dict( + srn=_make_dep_srn(), + convention_srn=_make_conv_srn(), + owner_id=UserId(uuid4()), + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + ) + defaults.update(overrides) + return Deposition(**defaults) + + +def _make_file(name: str = "data.csv", size: int = 1000) -> DepositionFile: + return DepositionFile( + name=name, + size=size, + checksum="abc123", + uploaded_at=datetime.now(UTC), + ) + + +class TestDepositionCreation: + def test_create_with_convention_srn(self): + dep = _make_deposition() + assert dep.convention_srn == _make_conv_srn() + assert dep.status == DepositionStatus.DRAFT + + def test_create_with_empty_metadata(self): + dep = _make_deposition() + assert dep.metadata == {} + + def test_create_with_empty_files(self): + dep = _make_deposition() + assert dep.files == [] + + def test_create_has_owner(self): + owner = UserId(uuid4()) + dep = _make_deposition(owner_id=owner) + assert dep.owner_id == owner + + +class TestDepositionMetadata: + def test_update_metadata_in_draft(self): + dep = _make_deposition() + dep.update_metadata({"title": "Test"}) + assert dep.metadata == {"title": "Test"} + + def test_update_metadata_rejects_non_draft(self): + dep = _make_deposition(status=DepositionStatus.IN_VALIDATION) + with pytest.raises(InvalidStateError): + dep.update_metadata({"title": "Test"}) + + +class TestDepositionFiles: + def test_add_file_in_draft(self): + dep = _make_deposition() + f = _make_file() + dep.add_file(f) + assert len(dep.files) == 1 + assert dep.files[0].name == "data.csv" + + def test_remove_file_in_draft(self): + dep = _make_deposition() + dep.add_file(_make_file("a.csv")) + dep.add_file(_make_file("b.csv")) + removed = dep.remove_file("a.csv") + assert removed.name == "a.csv" + assert len(dep.files) == 1 + + def test_remove_nonexistent_file_raises(self): + dep = _make_deposition() + from osa.domain.shared.error import NotFoundError + + with pytest.raises(NotFoundError): + dep.remove_file("no-such-file.csv") + + def test_add_file_rejects_non_draft(self): + dep = _make_deposition(status=DepositionStatus.IN_VALIDATION) + with pytest.raises(InvalidStateError): + dep.add_file(_make_file()) + + def test_remove_file_rejects_non_draft(self): + dep = _make_deposition() + dep.add_file(_make_file()) + dep.submit() + with pytest.raises(InvalidStateError): + dep.remove_file("data.csv") + + +class TestDepositionSubmit: + def test_submit_transitions_to_in_validation(self): + dep = _make_deposition() + dep.submit() + assert dep.status == DepositionStatus.IN_VALIDATION + + def test_submit_rejects_non_draft(self): + dep = _make_deposition(status=DepositionStatus.IN_VALIDATION) + with pytest.raises(InvalidStateError): + dep.submit() + + def test_return_to_draft_from_in_validation(self): + dep = _make_deposition() + dep.submit() + dep.return_to_draft() + assert dep.status == DepositionStatus.DRAFT + + def test_return_to_draft_rejects_non_in_validation(self): + dep = _make_deposition() + with pytest.raises(InvalidStateError): + dep.return_to_draft() diff --git a/server/tests/unit/domain/deposition/test_deposition_service.py b/server/tests/unit/domain/deposition/test_deposition_service.py new file mode 100644 index 0000000..22b0213 --- /dev/null +++ b/server/tests/unit/domain/deposition/test_deposition_service.py @@ -0,0 +1,477 @@ +"""Unit tests for DepositionService.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock +from uuid import uuid4 + +import pytest + +from osa.domain.auth.model.value import UserId +from osa.domain.deposition.model.aggregate import Deposition +from osa.domain.deposition.model.convention import Convention +from osa.domain.deposition.model.value import ( + DepositionFile, + DepositionStatus, + FileRequirements, +) +from osa.domain.deposition.event.created import DepositionCreatedEvent +from osa.domain.deposition.event.file_deleted import FileDeletedEvent +from osa.domain.deposition.event.file_uploaded import FileUploadedEvent +from osa.domain.deposition.event.metadata_updated import MetadataUpdatedEvent +from osa.domain.deposition.event.submitted import DepositionSubmittedEvent +from osa.domain.deposition.service.deposition import DepositionService +from osa.domain.shared.error import NotFoundError, ValidationError +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN, Domain, SchemaSRN + + +def _make_dep_srn(id: str = "test-dep") -> DepositionSRN: + return DepositionSRN.parse(f"urn:osa:localhost:dep:{id}") + + +def _make_conv_srn(id: str = "test-conv", version: str = "1.0.0") -> ConventionSRN: + return ConventionSRN.parse(f"urn:osa:localhost:conv:{id}@{version}") + + +def _make_schema_srn(id: str = "test-schema", version: str = "1.0.0") -> SchemaSRN: + return SchemaSRN.parse(f"urn:osa:localhost:schema:{id}@{version}") + + +def _make_file_reqs(**overrides) -> FileRequirements: + defaults = dict( + accepted_types=[".csv"], + min_count=1, + max_count=3, + max_file_size=1_000_000, + ) + defaults.update(overrides) + return FileRequirements(**defaults) + + +def _make_convention(**overrides) -> Convention: + defaults = dict( + srn=_make_conv_srn(), + title="Test Convention", + schema_srn=_make_schema_srn(), + file_requirements=_make_file_reqs(), + created_at=datetime.now(UTC), + ) + defaults.update(overrides) + return Convention(**defaults) + + +def _make_deposition(**overrides) -> Deposition: + defaults = dict( + srn=_make_dep_srn(), + convention_srn=_make_conv_srn(), + owner_id=UserId(uuid4()), + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + ) + defaults.update(overrides) + return Deposition(**defaults) + + +def _make_service( + dep_repo=None, + conv_repo=None, + file_storage=None, + outbox=None, +) -> DepositionService: + return DepositionService( + deposition_repo=dep_repo or AsyncMock(), + convention_repo=conv_repo or AsyncMock(), + file_storage=file_storage or AsyncMock(), + outbox=outbox or AsyncMock(), + node_domain=Domain("localhost"), + ) + + +class TestDepositionServiceCreate: + @pytest.mark.asyncio + async def test_create_with_valid_convention(self): + conv_repo = AsyncMock() + conv_repo.get.return_value = _make_convention() + owner = UserId(uuid4()) + + service = _make_service(conv_repo=conv_repo) + result = await service.create( + convention_srn=_make_conv_srn(), + owner_id=owner, + ) + assert result.convention_srn == _make_conv_srn() + assert result.owner_id == owner + assert result.status == DepositionStatus.DRAFT + + @pytest.mark.asyncio + async def test_create_rejects_nonexistent_convention(self): + conv_repo = AsyncMock() + conv_repo.get.return_value = None + + service = _make_service(conv_repo=conv_repo) + with pytest.raises(NotFoundError, match="Convention not found"): + await service.create( + convention_srn=_make_conv_srn(), + owner_id=UserId(uuid4()), + ) + + @pytest.mark.asyncio + async def test_create_generates_srn(self): + conv_repo = AsyncMock() + conv_repo.get.return_value = _make_convention() + + service = _make_service(conv_repo=conv_repo) + result = await service.create( + convention_srn=_make_conv_srn(), + owner_id=UserId(uuid4()), + ) + assert str(result.srn).startswith("urn:osa:localhost:dep:") + + @pytest.mark.asyncio + async def test_create_saves_to_repo(self): + conv_repo = AsyncMock() + conv_repo.get.return_value = _make_convention() + dep_repo = AsyncMock() + + service = _make_service(dep_repo=dep_repo, conv_repo=conv_repo) + await service.create( + convention_srn=_make_conv_srn(), + owner_id=UserId(uuid4()), + ) + dep_repo.save.assert_called_once() + + @pytest.mark.asyncio + async def test_create_emits_deposition_created_event(self): + conv_srn = _make_conv_srn() + conv_repo = AsyncMock() + conv_repo.get.return_value = _make_convention(srn=conv_srn) + outbox = AsyncMock() + owner = UserId(uuid4()) + + service = _make_service(conv_repo=conv_repo, outbox=outbox) + result = await service.create(convention_srn=conv_srn, owner_id=owner) + + outbox.append.assert_called_once() + event = outbox.append.call_args[0][0] + assert isinstance(event, DepositionCreatedEvent) + assert event.deposition_id == result.srn + assert event.convention_srn == conv_srn + assert event.owner_id == owner + + +class TestDepositionServiceUpdateMetadata: + @pytest.mark.asyncio + async def test_update_metadata(self): + dep = _make_deposition() + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + + service = _make_service(dep_repo=dep_repo) + result = await service.update_metadata(dep.srn, {"title": "Updated"}) + assert result.metadata == {"title": "Updated"} + dep_repo.save.assert_called_once() + + @pytest.mark.asyncio + async def test_update_metadata_not_found(self): + dep_repo = AsyncMock() + dep_repo.get.return_value = None + + service = _make_service(dep_repo=dep_repo) + with pytest.raises(NotFoundError): + await service.update_metadata(_make_dep_srn(), {"title": "Test"}) + + @pytest.mark.asyncio + async def test_update_metadata_emits_event(self): + dep = _make_deposition() + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + outbox = AsyncMock() + + service = _make_service(dep_repo=dep_repo, outbox=outbox) + metadata = {"title": "Updated"} + await service.update_metadata(dep.srn, metadata) + + outbox.append.assert_called_once() + event = outbox.append.call_args[0][0] + assert isinstance(event, MetadataUpdatedEvent) + assert event.deposition_id == dep.srn + assert event.metadata == metadata + + +class TestDepositionServiceUploadFile: + @pytest.mark.asyncio + async def test_upload_file_success(self): + dep = _make_deposition() + conv = _make_convention(file_requirements=_make_file_reqs(accepted_types=[".csv"])) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + conv_repo = AsyncMock() + conv_repo.get.return_value = conv + file_storage = AsyncMock() + saved_file = DepositionFile( + name="data.csv", size=500, checksum="abc", uploaded_at=datetime.now(UTC) + ) + file_storage.save_file.return_value = saved_file + + service = _make_service(dep_repo=dep_repo, conv_repo=conv_repo, file_storage=file_storage) + result = await service.upload_file(dep.srn, "data.csv", b"content", 500) + assert len(result.files) == 1 + file_storage.save_file.assert_called_once() + + @pytest.mark.asyncio + async def test_upload_file_rejects_wrong_type(self): + dep = _make_deposition() + conv = _make_convention(file_requirements=_make_file_reqs(accepted_types=[".csv"])) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + conv_repo = AsyncMock() + conv_repo.get.return_value = conv + + service = _make_service(dep_repo=dep_repo, conv_repo=conv_repo) + with pytest.raises(ValidationError, match="File type"): + await service.upload_file(dep.srn, "data.xlsx", b"content", 500) + + @pytest.mark.asyncio + async def test_upload_file_rejects_exceeds_max_size(self): + dep = _make_deposition() + conv = _make_convention(file_requirements=_make_file_reqs(max_file_size=100)) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + conv_repo = AsyncMock() + conv_repo.get.return_value = conv + + service = _make_service(dep_repo=dep_repo, conv_repo=conv_repo) + with pytest.raises(ValidationError, match="exceeds maximum"): + await service.upload_file(dep.srn, "data.csv", b"x" * 200, 200) + + @pytest.mark.asyncio + async def test_upload_file_rejects_exceeds_max_count(self): + dep = _make_deposition( + files=[ + DepositionFile( + name=f"f{i}.csv", size=10, checksum="x", uploaded_at=datetime.now(UTC) + ) + for i in range(3) + ] + ) + conv = _make_convention(file_requirements=_make_file_reqs(max_count=3)) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + conv_repo = AsyncMock() + conv_repo.get.return_value = conv + + service = _make_service(dep_repo=dep_repo, conv_repo=conv_repo) + with pytest.raises(ValidationError, match="Maximum.*files"): + await service.upload_file(dep.srn, "extra.csv", b"content", 500) + + @pytest.mark.asyncio + async def test_upload_file_emits_event(self): + dep = _make_deposition() + conv = _make_convention(file_requirements=_make_file_reqs(accepted_types=[".csv"])) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + conv_repo = AsyncMock() + conv_repo.get.return_value = conv + file_storage = AsyncMock() + saved_file = DepositionFile( + name="data.csv", size=500, checksum="abc123", uploaded_at=datetime.now(UTC) + ) + file_storage.save_file.return_value = saved_file + outbox = AsyncMock() + + service = _make_service( + dep_repo=dep_repo, conv_repo=conv_repo, file_storage=file_storage, outbox=outbox + ) + await service.upload_file(dep.srn, "data.csv", b"content", 500) + + outbox.append.assert_called_once() + event = outbox.append.call_args[0][0] + assert isinstance(event, FileUploadedEvent) + assert event.deposition_id == dep.srn + assert event.filename == "data.csv" + assert event.size == 500 + assert event.checksum == "abc123" + + +class TestDepositionServiceDeleteFile: + @pytest.mark.asyncio + async def test_delete_file_success(self): + dep = _make_deposition( + files=[ + DepositionFile(name="a.csv", size=10, checksum="x", uploaded_at=datetime.now(UTC)) + ] + ) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + file_storage = AsyncMock() + + service = _make_service(dep_repo=dep_repo, file_storage=file_storage) + result = await service.delete_file(dep.srn, "a.csv") + assert len(result.files) == 0 + file_storage.delete_file.assert_called_once() + + @pytest.mark.asyncio + async def test_delete_file_not_found(self): + dep_repo = AsyncMock() + dep_repo.get.return_value = None + + service = _make_service(dep_repo=dep_repo) + with pytest.raises(NotFoundError): + await service.delete_file(_make_dep_srn(), "a.csv") + + @pytest.mark.asyncio + async def test_delete_file_emits_event(self): + dep = _make_deposition( + files=[ + DepositionFile(name="a.csv", size=10, checksum="x", uploaded_at=datetime.now(UTC)) + ] + ) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + file_storage = AsyncMock() + outbox = AsyncMock() + + service = _make_service(dep_repo=dep_repo, file_storage=file_storage, outbox=outbox) + await service.delete_file(dep.srn, "a.csv") + + outbox.append.assert_called_once() + event = outbox.append.call_args[0][0] + assert isinstance(event, FileDeletedEvent) + assert event.deposition_id == dep.srn + assert event.filename == "a.csv" + + +class TestDepositionServiceSubmit: + @pytest.mark.asyncio + async def test_submit_with_enough_files(self): + dep = _make_deposition( + files=[ + DepositionFile(name="a.csv", size=10, checksum="x", uploaded_at=datetime.now(UTC)) + ] + ) + conv = _make_convention(file_requirements=_make_file_reqs(min_count=1)) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + conv_repo = AsyncMock() + conv_repo.get.return_value = conv + outbox = AsyncMock() + + service = _make_service(dep_repo=dep_repo, conv_repo=conv_repo, outbox=outbox) + result = await service.submit(dep.srn) + assert result.status == DepositionStatus.IN_VALIDATION + outbox.append.assert_called_once() + event = outbox.append.call_args[0][0] + assert isinstance(event, DepositionSubmittedEvent) + + @pytest.mark.asyncio + async def test_submit_rejects_too_few_files(self): + dep = _make_deposition(files=[]) + conv = _make_convention(file_requirements=_make_file_reqs(min_count=1)) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + conv_repo = AsyncMock() + conv_repo.get.return_value = conv + + service = _make_service(dep_repo=dep_repo, conv_repo=conv_repo) + with pytest.raises(ValidationError, match="Minimum.*file"): + await service.submit(dep.srn) + + @pytest.mark.asyncio + async def test_submit_not_found(self): + dep_repo = AsyncMock() + dep_repo.get.return_value = None + + service = _make_service(dep_repo=dep_repo) + with pytest.raises(NotFoundError): + await service.submit(_make_dep_srn()) + + +class TestDepositionServiceListDepositions: + @pytest.mark.asyncio + async def test_list_by_owner_calls_repo(self): + owner = UserId(uuid4()) + dep_repo = AsyncMock() + dep_repo.list_by_owner.return_value = [] + dep_repo.count_by_owner.return_value = 0 + + service = _make_service(dep_repo=dep_repo) + items, total = await service.list_depositions(owner) + + dep_repo.list_by_owner.assert_called_once_with(owner, limit=None, offset=None) + dep_repo.count_by_owner.assert_called_once_with(owner) + assert items == [] + assert total == 0 + + @pytest.mark.asyncio + async def test_list_all_when_owner_is_none(self): + dep_repo = AsyncMock() + dep_repo.list.return_value = [_make_deposition(), _make_deposition()] + dep_repo.count.return_value = 2 + + service = _make_service(dep_repo=dep_repo) + items, total = await service.list_depositions(None) + + dep_repo.list.assert_called_once_with(limit=None, offset=None) + dep_repo.count.assert_called_once() + assert len(items) == 2 + assert total == 2 + + @pytest.mark.asyncio + async def test_list_depositions_returns_total(self): + owner = UserId(uuid4()) + deps = [_make_deposition(owner_id=owner), _make_deposition(owner_id=owner)] + dep_repo = AsyncMock() + dep_repo.list_by_owner.return_value = deps + dep_repo.count_by_owner.return_value = 5 # more than page + + service = _make_service(dep_repo=dep_repo) + items, total = await service.list_depositions(owner) + + assert len(items) == 2 + assert total == 5 + + @pytest.mark.asyncio + async def test_list_depositions_passes_limit_offset(self): + owner = UserId(uuid4()) + dep_repo = AsyncMock() + dep_repo.list_by_owner.return_value = [] + dep_repo.count_by_owner.return_value = 0 + + service = _make_service(dep_repo=dep_repo) + await service.list_depositions(owner, limit=10, offset=20) + + dep_repo.list_by_owner.assert_called_once_with(owner, limit=10, offset=20) + + +class TestDepositionServiceGetFileDownload: + @pytest.mark.asyncio + async def test_raises_if_file_not_in_deposition(self): + dep = _make_deposition(files=[]) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + + service = _make_service(dep_repo=dep_repo) + with pytest.raises(NotFoundError, match="File.*not found"): + await service.get_file_download(dep.srn, "missing.csv") + + @pytest.mark.asyncio + async def test_returns_stream_and_metadata(self): + file_meta = DepositionFile( + name="data.csv", size=100, checksum="abc", uploaded_at=datetime.now(UTC) + ) + dep = _make_deposition(files=[file_meta]) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + file_storage = AsyncMock() + + async def _fake_stream(): + yield b"chunk" + + file_storage.get_file.return_value = _fake_stream() + + service = _make_service(dep_repo=dep_repo, file_storage=file_storage) + stream, returned_meta = await service.get_file_download(dep.srn, "data.csv") + + file_storage.get_file.assert_called_once_with(dep.srn, "data.csv") + assert stream is not None + assert returned_meta.name == "data.csv" + assert returned_meta.size == 100 diff --git a/server/tests/unit/domain/deposition/test_event_chain.py b/server/tests/unit/domain/deposition/test_event_chain.py new file mode 100644 index 0000000..1f386bc --- /dev/null +++ b/server/tests/unit/domain/deposition/test_event_chain.py @@ -0,0 +1,105 @@ +"""Unit tests verifying the event chain: DepositionSubmitted → Validate → Approve → Record.""" + +from unittest.mock import AsyncMock +from uuid import uuid4 + +import pytest + +from unittest.mock import MagicMock +from osa.domain.curation.handler.auto_approve_curation import AutoApproveCuration +from osa.domain.deposition.event.submitted import DepositionSubmittedEvent +from osa.domain.record.handler.convert_deposition_to_record import ConvertDepositionToRecord +from osa.domain.shared.event import EventId +from osa.domain.shared.model.srn import DepositionSRN, ValidationRunSRN +from osa.domain.validation.event.validation_completed import ValidationCompleted +from osa.domain.validation.handler.validate_deposition import ValidateDeposition +from osa.domain.validation.model import RunStatus + + +def _make_dep_srn() -> DepositionSRN: + return DepositionSRN.parse("urn:osa:localhost:dep:test-dep") + + +def _make_config(): + """Create a minimal Config mock for testing.""" + config = MagicMock() + config.server.domain = "localhost" + return config + + +class TestValidateDepositionEmitsCompleted: + @pytest.mark.asyncio + async def test_emits_validation_completed(self): + outbox = AsyncMock() + config = _make_config() + handler = ValidateDeposition(outbox=outbox, config=config) + + event = DepositionSubmittedEvent( + id=EventId(uuid4()), + deposition_id=_make_dep_srn(), + metadata={"title": "Test"}, + ) + await handler.handle(event) + + outbox.append.assert_called_once() + emitted = outbox.append.call_args[0][0] + assert isinstance(emitted, ValidationCompleted) + assert emitted.deposition_srn == _make_dep_srn() + assert emitted.status == RunStatus.COMPLETED + + +class TestAutoApproveCurationEmitsApproved: + @pytest.mark.asyncio + async def test_auto_approve_on_completed(self): + outbox = AsyncMock() + handler = AutoApproveCuration(outbox=outbox) + + event = ValidationCompleted( + id=EventId(uuid4()), + validation_run_srn=ValidationRunSRN.parse("urn:osa:localhost:val:run1"), + deposition_srn=_make_dep_srn(), + status=RunStatus.COMPLETED, + results=[], + metadata={"title": "Test"}, + ) + await handler.handle(event) + + outbox.append.assert_called_once() + + @pytest.mark.asyncio + async def test_skips_approve_on_failed(self): + outbox = AsyncMock() + handler = AutoApproveCuration(outbox=outbox) + + event = ValidationCompleted( + id=EventId(uuid4()), + validation_run_srn=ValidationRunSRN.parse("urn:osa:localhost:val:run1"), + deposition_srn=_make_dep_srn(), + status=RunStatus.FAILED, + results=[], + metadata={"title": "Test"}, + ) + await handler.handle(event) + + outbox.append.assert_not_called() + + +class TestConvertDepositionToRecord: + @pytest.mark.asyncio + async def test_publishes_record(self): + service = AsyncMock() + handler = ConvertDepositionToRecord(service=service) + + from osa.domain.curation.event.deposition_approved import DepositionApproved + + event = DepositionApproved( + id=EventId(uuid4()), + deposition_srn=_make_dep_srn(), + metadata={"title": "Test"}, + ) + await handler.handle(event) + + service.publish_record.assert_called_once_with( + deposition_srn=_make_dep_srn(), + metadata={"title": "Test"}, + ) diff --git a/server/tests/unit/domain/deposition/test_return_to_draft.py b/server/tests/unit/domain/deposition/test_return_to_draft.py new file mode 100644 index 0000000..4664456 --- /dev/null +++ b/server/tests/unit/domain/deposition/test_return_to_draft.py @@ -0,0 +1,64 @@ +"""Unit tests for ReturnToDraft event handler.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock +from uuid import uuid4 + +import pytest + +from osa.domain.auth.model.value import UserId +from osa.domain.deposition.handler.return_to_draft import ReturnToDraft +from osa.domain.deposition.model.aggregate import Deposition +from osa.domain.deposition.model.value import DepositionStatus +from osa.domain.shared.event import EventId +from osa.domain.shared.model.srn import ConventionSRN, DepositionSRN +from osa.domain.validation.event.validation_failed import ValidationFailed + + +def _make_dep_srn(id: str = "test-dep") -> DepositionSRN: + return DepositionSRN.parse(f"urn:osa:localhost:dep:{id}") + + +def _make_conv_srn() -> ConventionSRN: + return ConventionSRN.parse("urn:osa:localhost:conv:test@1.0.0") + + +class TestReturnToDraft: + @pytest.mark.asyncio + async def test_returns_deposition_to_draft(self): + dep = Deposition( + srn=_make_dep_srn(), + convention_srn=_make_conv_srn(), + status=DepositionStatus.IN_VALIDATION, + owner_id=UserId(uuid4()), + created_at=datetime.now(UTC), + updated_at=datetime.now(UTC), + ) + dep_repo = AsyncMock() + dep_repo.get.return_value = dep + + handler = ReturnToDraft(deposition_repo=dep_repo) + event = ValidationFailed( + id=EventId(uuid4()), + deposition_srn=dep.srn, + reasons=["Missing required field"], + ) + await handler.handle(event) + + assert dep.status == DepositionStatus.DRAFT + dep_repo.save.assert_called_once_with(dep) + + @pytest.mark.asyncio + async def test_handles_missing_deposition(self): + dep_repo = AsyncMock() + dep_repo.get.return_value = None + + handler = ReturnToDraft(deposition_repo=dep_repo) + event = ValidationFailed( + id=EventId(uuid4()), + deposition_srn=_make_dep_srn(), + reasons=["error"], + ) + # Should not raise — workers must be resilient + await handler.handle(event) + dep_repo.save.assert_not_called() diff --git a/server/tests/unit/domain/deposition/test_spreadsheet.py b/server/tests/unit/domain/deposition/test_spreadsheet.py new file mode 100644 index 0000000..5a077de --- /dev/null +++ b/server/tests/unit/domain/deposition/test_spreadsheet.py @@ -0,0 +1,178 @@ +"""Unit tests for the Openpyxl spreadsheet adapter.""" + +from datetime import UTC, datetime +from io import BytesIO + +from openpyxl import load_workbook + +from osa.domain.semantics.model.schema import Schema +from osa.domain.semantics.model.value import ( + Cardinality, + FieldDefinition, + FieldType, + TermConstraints, +) +from osa.domain.shared.model.srn import OntologySRN, SchemaSRN +from osa.infrastructure.persistence.adapter.spreadsheet import OpenpyxlSpreadsheetAdapter + + +def _make_schema_srn() -> SchemaSRN: + return SchemaSRN.parse("urn:osa:localhost:schema:test@1.0.0") + + +def _make_ontology_srn() -> OntologySRN: + return OntologySRN.parse("urn:osa:localhost:onto:sex@1.0.0") + + +def _make_text_field(name: str = "title", required: bool = True) -> FieldDefinition: + return FieldDefinition( + name=name, + type=FieldType.TEXT, + required=required, + cardinality=Cardinality.EXACTLY_ONE, + description="The title of the sample", + ) + + +def _make_term_field( + name: str = "sex", + onto_srn: OntologySRN | None = None, +) -> FieldDefinition: + return FieldDefinition( + name=name, + type=FieldType.TERM, + required=True, + cardinality=Cardinality.EXACTLY_ONE, + constraints=TermConstraints(ontology_srn=onto_srn or _make_ontology_srn()), + ) + + +def _make_schema(fields: list[FieldDefinition] | None = None) -> Schema: + return Schema( + srn=_make_schema_srn(), + title="Test Schema", + fields=fields or [_make_text_field()], + created_at=datetime.now(UTC), + ) + + +class TestTemplateGeneration: + def test_generates_valid_xlsx(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema() + content = adapter.generate_template(schema, {}) + wb = load_workbook(BytesIO(content)) + assert wb.active is not None + + def test_headers_match_field_names(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema( + fields=[_make_text_field("title"), _make_text_field("description", required=False)] + ) + content = adapter.generate_template(schema, {}) + wb = load_workbook(BytesIO(content)) + ws = wb.active + headers = [ws.cell(row=1, column=c).value for c in range(1, 3)] + assert headers == ["title", "description"] + + def test_description_row_populated(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema(fields=[_make_text_field("title")]) + content = adapter.generate_template(schema, {}) + wb = load_workbook(BytesIO(content)) + ws = wb.active + desc = ws.cell(row=2, column=1).value + assert desc == "The title of the sample" + + def test_required_field_header_bold(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema( + fields=[_make_text_field("title"), _make_text_field("optional_field", required=False)] + ) + content = adapter.generate_template(schema, {}) + wb = load_workbook(BytesIO(content)) + ws = wb.active + assert ws.cell(row=1, column=1).font.bold is True + assert ws.cell(row=1, column=2).font.bold is not True + + def test_term_field_with_few_terms_gets_dropdown(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema(fields=[_make_term_field()]) + onto_terms = {str(_make_ontology_srn()): ["male", "female", "other"]} + content = adapter.generate_template(schema, onto_terms) + wb = load_workbook(BytesIO(content)) + ws = wb.active + # Check data validations exist + assert len(ws.data_validations.dataValidation) > 0 + + +class TestSpreadsheetParsing: + def _create_upload(self, headers: list[str], rows: list[list]) -> bytes: + """Create a valid spreadsheet upload with given headers and rows.""" + from openpyxl import Workbook + + wb = Workbook() + ws = wb.active + for col, header in enumerate(headers, 1): + ws.cell(row=1, column=col, value=header) + # Row 2 = descriptions (skip in parsing) + for col in range(1, len(headers) + 1): + ws.cell(row=2, column=col, value="description") + # Row 3+ = data + for row_idx, row_data in enumerate(rows, 3): + for col, val in enumerate(row_data, 1): + ws.cell(row=row_idx, column=col, value=val) + + buf = BytesIO() + wb.save(buf) + return buf.getvalue() + + def test_parse_valid_data(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema(fields=[_make_text_field("title")]) + content = self._create_upload(["title"], [["Sample A"]]) + result = adapter.parse_upload(schema, content) + assert result.metadata["title"] == "Sample A" + assert result.errors == [] + + def test_parse_missing_required_column(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema(fields=[_make_text_field("title"), _make_text_field("organism")]) + content = self._create_upload(["title"], [["Sample A"]]) + result = adapter.parse_upload(schema, content) + assert len(result.errors) > 0 + assert any("organism" in e.field for e in result.errors) + + def test_parse_ignores_unrecognized_columns(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema(fields=[_make_text_field("title")]) + content = self._create_upload(["title", "extra_col"], [["Sample A", "ignored"]]) + result = adapter.parse_upload(schema, content) + assert "title" in result.metadata + assert "extra_col" not in result.metadata + + def test_parse_unrecognized_columns_generate_warnings(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema(fields=[_make_text_field("title")]) + content = self._create_upload(["title", "extra_col"], [["Sample A", "ignored"]]) + result = adapter.parse_upload(schema, content) + assert any("extra_col" in w for w in result.warnings) + + def test_parse_missing_required_value(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema(fields=[_make_text_field("title")]) + content = self._create_upload(["title"], [[None]]) + result = adapter.parse_upload(schema, content) + assert len(result.errors) > 0 + + def test_parse_optional_field_allows_empty(self): + adapter = OpenpyxlSpreadsheetAdapter() + schema = _make_schema( + fields=[ + _make_text_field("title"), + _make_text_field("notes", required=False), + ] + ) + content = self._create_upload(["title", "notes"], [["Sample A", None]]) + result = adapter.parse_upload(schema, content) + assert result.errors == [] diff --git a/server/tests/unit/domain/semantics/__init__.py b/server/tests/unit/domain/semantics/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/server/tests/unit/domain/semantics/test_import_ontology.py b/server/tests/unit/domain/semantics/test_import_ontology.py new file mode 100644 index 0000000..5ba9cb1 --- /dev/null +++ b/server/tests/unit/domain/semantics/test_import_ontology.py @@ -0,0 +1,137 @@ +"""Unit tests for ImportOntology command handler.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock + +import pytest + +from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role +from osa.domain.auth.model.value import ProviderIdentity, UserId +from osa.domain.semantics.command.import_ontology import ( + ImportOntology, + ImportOntologyHandler, + ImportOntologyResult, +) +from osa.domain.semantics.model.ontology import Ontology, Term +from osa.domain.shared.error import AuthorizationError +from osa.domain.shared.model.srn import Domain, LocalId, OntologySRN, Semver + + +def _make_principal(roles: frozenset[Role]) -> Principal: + return Principal( + user_id=UserId.generate(), + provider_identity=ProviderIdentity(provider="test", external_id="ext"), + roles=roles, + ) + + +def _make_ontology() -> Ontology: + return Ontology( + srn=OntologySRN( + domain=Domain("localhost"), + id=LocalId("test-id"), + version=Semver.from_string("1.0.0"), + ), + title="Test Ontology", + description="A test.", + terms=[Term(term_id="T:001", label="Root")], + created_at=datetime.now(UTC), + ) + + +def _obographs_data() -> dict: + return { + "graphs": [ + { + "id": "http://example.org/test.owl", + "lbl": "Test Ontology", + "nodes": [{"id": "T:001", "lbl": "Root", "type": "CLASS"}], + } + ] + } + + +class TestImportOntologyHandler: + @pytest.mark.asyncio + async def test_fetches_and_imports(self): + admin = _make_principal(frozenset({Role.ADMIN})) + ontology = _make_ontology() + + fetcher = AsyncMock() + fetcher.fetch_json.return_value = _obographs_data() + + service = AsyncMock() + service.import_from_obographs.return_value = ontology + + handler = ImportOntologyHandler( + principal=admin, + ontology_service=service, + fetcher=fetcher, + ) + + result = await handler.run(ImportOntology(url="https://example.com/onto.json")) + + assert isinstance(result, ImportOntologyResult) + assert result.srn == ontology.srn + assert result.title == "Test Ontology" + fetcher.fetch_json.assert_called_once_with("https://example.com/onto.json") + service.import_from_obographs.assert_called_once() + + @pytest.mark.asyncio + async def test_passes_version_override(self): + admin = _make_principal(frozenset({Role.ADMIN})) + ontology = _make_ontology() + + fetcher = AsyncMock() + fetcher.fetch_json.return_value = _obographs_data() + + service = AsyncMock() + service.import_from_obographs.return_value = ontology + + handler = ImportOntologyHandler( + principal=admin, + ontology_service=service, + fetcher=fetcher, + ) + + await handler.run(ImportOntology(url="https://example.com/onto.json", version="2.0.0")) + + call_kwargs = service.import_from_obographs.call_args + assert call_kwargs[1]["version_override"] == "2.0.0" + + @pytest.mark.asyncio + async def test_requires_admin_role(self): + depositor = _make_principal(frozenset({Role.DEPOSITOR})) + handler = ImportOntologyHandler( + principal=depositor, + ontology_service=AsyncMock(), + fetcher=AsyncMock(), + ) + + with pytest.raises(AuthorizationError) as exc_info: + await handler.run(ImportOntology(url="https://example.com/onto.json")) + assert exc_info.value.code == "access_denied" + + @pytest.mark.asyncio + async def test_rejects_unauthenticated(self): + handler = ImportOntologyHandler.__new__(ImportOntologyHandler) + + with pytest.raises(AuthorizationError) as exc_info: + await handler.run(ImportOntology(url="https://example.com/onto.json")) + assert exc_info.value.code == "missing_token" + + @pytest.mark.asyncio + async def test_propagates_fetch_errors(self): + admin = _make_principal(frozenset({Role.ADMIN})) + fetcher = AsyncMock() + fetcher.fetch_json.side_effect = RuntimeError("Connection failed") + + handler = ImportOntologyHandler( + principal=admin, + ontology_service=AsyncMock(), + fetcher=fetcher, + ) + + with pytest.raises(RuntimeError, match="Connection failed"): + await handler.run(ImportOntology(url="https://example.com/onto.json")) diff --git a/server/tests/unit/domain/semantics/test_obographs_parser.py b/server/tests/unit/domain/semantics/test_obographs_parser.py new file mode 100644 index 0000000..1e02680 --- /dev/null +++ b/server/tests/unit/domain/semantics/test_obographs_parser.py @@ -0,0 +1,295 @@ +"""Unit tests for OBO Graphs JSON parser.""" + +import json +from pathlib import Path + +import pytest + +from osa.domain.semantics.util.obographs import ParsedOntology, parse_obographs + + +def _find_server_root() -> Path: + path = Path(__file__).resolve() + while path != path.parent: + if (path / "pyproject.toml").exists(): + return path + path = path.parent + raise RuntimeError("Could not find pyproject.toml above test file") + + +ONTOLOGIES_DIR = _find_server_root() / "ontologies" + + +def _minimal_graph( + *, + graph_id: str = "http://purl.obolibrary.org/obo/test.owl", + lbl: str | None = "Test Ontology", + nodes: list[dict] | None = None, + edges: list[dict] | None = None, + meta: dict | None = None, +) -> dict: + """Build a minimal OBO Graphs JSON structure.""" + graph: dict = {"id": graph_id} + if lbl is not None: + graph["lbl"] = lbl + if nodes is not None: + graph["nodes"] = nodes + if edges is not None: + graph["edges"] = edges + if meta is not None: + graph["meta"] = meta + return {"graphs": [graph]} + + +def _class_node( + node_id: str, + label: str, + *, + definition: str | None = None, + synonyms: list[str] | None = None, + deprecated: bool = False, +) -> dict: + """Build a CLASS node for OBO Graphs.""" + node: dict = {"id": node_id, "lbl": label, "type": "CLASS"} + meta: dict = {} + if definition is not None: + meta["definition"] = {"val": definition} + if synonyms is not None: + meta["synonyms"] = [{"val": s} for s in synonyms] + if deprecated: + meta["deprecated"] = True + if meta: + node["meta"] = meta + return node + + +class TestParseMinimalGraph: + def test_parses_single_node(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Root")]) + result = parse_obographs(data) + + assert isinstance(result, ParsedOntology) + assert len(result.terms) == 1 + assert result.terms[0].term_id == "T:001" + assert result.terms[0].label == "Root" + + def test_extracts_title_from_graph_lbl(self): + data = _minimal_graph(lbl="Biological Sex", nodes=[_class_node("T:001", "Root")]) + result = parse_obographs(data) + assert result.title == "Biological Sex" + + def test_falls_back_to_graph_id_when_no_lbl(self): + data = _minimal_graph( + graph_id="http://purl.obolibrary.org/obo/pato.owl", + lbl=None, + nodes=[_class_node("T:001", "Root")], + ) + result = parse_obographs(data) + assert result.title == "http://purl.obolibrary.org/obo/pato.owl" + + +class TestVersionExtraction: + def test_extracts_version_from_graph_meta(self): + data = _minimal_graph( + nodes=[_class_node("T:001", "Root")], + meta={"version": "2024-01-15"}, + ) + result = parse_obographs(data) + assert result.version == "2024-01-15" + + def test_version_is_none_when_no_meta(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Root")]) + result = parse_obographs(data) + assert result.version is None + + +class TestDescriptionExtraction: + def test_extracts_description_from_meta_definition(self): + data = _minimal_graph( + nodes=[_class_node("T:001", "Root")], + meta={"definition": {"val": "An ontology for testing."}}, + ) + result = parse_obographs(data) + assert result.description == "An ontology for testing." + + def test_description_is_none_when_no_definition(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Root")]) + result = parse_obographs(data) + assert result.description is None + + +class TestTermMapping: + def test_maps_definition(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Root", definition="The root term.")]) + result = parse_obographs(data) + assert result.terms[0].definition == "The root term." + + def test_maps_synonyms(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Root", synonyms=["Base", "Top"])]) + result = parse_obographs(data) + assert result.terms[0].synonyms == ["Base", "Top"] + + def test_maps_deprecated_flag(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Obsolete", deprecated=True)]) + result = parse_obographs(data) + assert result.terms[0].deprecated is True + + def test_defaults_to_not_deprecated(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Root")]) + result = parse_obographs(data) + assert result.terms[0].deprecated is False + + def test_empty_synonyms_when_none(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Root")]) + result = parse_obographs(data) + assert result.terms[0].synonyms == [] + + def test_empty_parent_ids_when_no_edges(self): + data = _minimal_graph(nodes=[_class_node("T:001", "Root")]) + result = parse_obographs(data) + assert result.terms[0].parent_ids == [] + + +class TestEdgeMapping: + def test_builds_parent_ids_from_is_a_edges(self): + data = _minimal_graph( + nodes=[ + _class_node("T:001", "Root"), + _class_node("T:002", "Child"), + ], + edges=[{"sub": "T:002", "pred": "is_a", "obj": "T:001"}], + ) + result = parse_obographs(data) + child = next(t for t in result.terms if t.term_id == "T:002") + assert child.parent_ids == ["T:001"] + + def test_ignores_non_is_a_edges(self): + data = _minimal_graph( + nodes=[ + _class_node("T:001", "Root"), + _class_node("T:002", "Child"), + ], + edges=[ + {"sub": "T:002", "pred": "is_a", "obj": "T:001"}, + {"sub": "T:002", "pred": "part_of", "obj": "T:001"}, + ], + ) + result = parse_obographs(data) + child = next(t for t in result.terms if t.term_id == "T:002") + assert child.parent_ids == ["T:001"] + + def test_multiple_parents(self): + data = _minimal_graph( + nodes=[ + _class_node("T:001", "Root A"), + _class_node("T:002", "Root B"), + _class_node("T:003", "Child"), + ], + edges=[ + {"sub": "T:003", "pred": "is_a", "obj": "T:001"}, + {"sub": "T:003", "pred": "is_a", "obj": "T:002"}, + ], + ) + result = parse_obographs(data) + child = next(t for t in result.terms if t.term_id == "T:003") + assert sorted(child.parent_ids) == ["T:001", "T:002"] + + +class TestFiltering: + def test_skips_property_nodes(self): + data = _minimal_graph( + nodes=[ + _class_node("T:001", "Root"), + {"id": "P:001", "lbl": "has_part", "type": "PROPERTY"}, + ] + ) + result = parse_obographs(data) + assert len(result.terms) == 1 + assert result.terms[0].term_id == "T:001" + + def test_skips_individual_nodes(self): + data = _minimal_graph( + nodes=[ + _class_node("T:001", "Root"), + {"id": "I:001", "lbl": "Instance1", "type": "INDIVIDUAL"}, + ] + ) + result = parse_obographs(data) + assert len(result.terms) == 1 + + def test_skips_nodes_without_label(self): + data = _minimal_graph( + nodes=[ + _class_node("T:001", "Root"), + {"id": "T:002", "type": "CLASS"}, # no lbl + ] + ) + result = parse_obographs(data) + assert len(result.terms) == 1 + assert result.terms[0].term_id == "T:001" + + def test_skips_nodes_without_type(self): + """Nodes without explicit type should be skipped.""" + data = _minimal_graph( + nodes=[ + _class_node("T:001", "Root"), + {"id": "T:002", "lbl": "Mystery"}, # no type + ] + ) + result = parse_obographs(data) + assert len(result.terms) == 1 + + +class TestValidation: + def test_rejects_data_with_no_graphs_key(self): + with pytest.raises(ValueError, match="graphs"): + parse_obographs({"nodes": []}) + + def test_rejects_empty_graphs_list(self): + with pytest.raises(ValueError, match="graphs"): + parse_obographs({"graphs": []}) + + def test_rejects_graph_with_no_class_nodes(self): + data = _minimal_graph(nodes=[{"id": "P:001", "lbl": "has_part", "type": "PROPERTY"}]) + with pytest.raises(ValueError, match="no CLASS"): + parse_obographs(data) + + def test_rejects_graph_with_no_nodes(self): + data = _minimal_graph(nodes=[]) + with pytest.raises(ValueError, match="no CLASS"): + parse_obographs(data) + + +class TestSampleOntologies: + """Verify bundled sample ontology files parse correctly.""" + + def test_biological_sex_ontology(self): + data = json.loads((ONTOLOGIES_DIR / "biological-sex.obographs.json").read_text()) + result = parse_obographs(data) + + assert result.title == "Biological Sex" + assert result.version == "1.0.0" + assert result.description is not None + assert len(result.terms) == 5 + + root = next(t for t in result.terms if t.term_id == "OSAO:0000001") + assert root.label == "biological sex" + assert root.parent_ids == [] + + female = next(t for t in result.terms if t.term_id == "OSAO:0000002") + assert female.label == "female" + assert female.parent_ids == ["OSAO:0000001"] + assert "F" in female.synonyms + + def test_license_ontology(self): + data = json.loads((ONTOLOGIES_DIR / "license.obographs.json").read_text()) + result = parse_obographs(data) + + assert result.title == "License" + assert result.version == "1.0.0" + assert len(result.terms) == 7 + + cc0 = next(t for t in result.terms if t.term_id == "OSAO:1000002") + assert cc0.label == "CC0 1.0" + assert cc0.parent_ids == ["OSAO:1000001"] + assert "CC0" in cc0.synonyms diff --git a/server/tests/unit/domain/semantics/test_ontology.py b/server/tests/unit/domain/semantics/test_ontology.py new file mode 100644 index 0000000..f8ec0e7 --- /dev/null +++ b/server/tests/unit/domain/semantics/test_ontology.py @@ -0,0 +1,101 @@ +"""Unit tests for Ontology aggregate.""" + +from datetime import UTC, datetime + +import pytest + +from osa.domain.semantics.model.ontology import Ontology, Term +from osa.domain.shared.error import ValidationError +from osa.domain.shared.model.srn import OntologySRN + + +def _make_srn(id: str = "test-onto", version: str = "1.0.0") -> OntologySRN: + return OntologySRN.parse(f"urn:osa:localhost:onto:{id}@{version}") + + +def _make_term(term_id: str = "male", label: str = "Male") -> Term: + return Term(term_id=term_id, label=label) + + +class TestOntologyCreation: + def test_create_with_single_term(self): + onto = Ontology( + srn=_make_srn(), + title="Sex", + terms=[_make_term()], + created_at=datetime.now(UTC), + ) + assert onto.title == "Sex" + assert len(onto.terms) == 1 + + def test_create_with_multiple_terms(self): + onto = Ontology( + srn=_make_srn(), + title="Sex", + terms=[ + _make_term("male", "Male"), + _make_term("female", "Female"), + _make_term("mixed", "Mixed"), + ], + created_at=datetime.now(UTC), + ) + assert len(onto.terms) == 3 + + def test_create_with_description(self): + onto = Ontology( + srn=_make_srn(), + title="Sex", + description="Biological sex categories", + terms=[_make_term()], + created_at=datetime.now(UTC), + ) + assert onto.description == "Biological sex categories" + + def test_create_with_optional_term_fields(self): + term = Term( + term_id="neocortex", + label="Neocortex", + synonyms=["isocortex", "neopallium"], + parent_ids=["UBERON:0000955"], + definition="Part of cerebral cortex", + deprecated=False, + ) + onto = Ontology( + srn=_make_srn(), + title="Brain Regions", + terms=[term], + created_at=datetime.now(UTC), + ) + assert onto.terms[0].synonyms == ["isocortex", "neopallium"] + assert onto.terms[0].parent_ids == ["UBERON:0000955"] + + +class TestOntologyInvariants: + def test_rejects_empty_terms(self): + with pytest.raises(ValidationError, match="at least one term"): + Ontology( + srn=_make_srn(), + title="Empty", + terms=[], + created_at=datetime.now(UTC), + ) + + def test_rejects_duplicate_term_ids(self): + with pytest.raises(ValidationError, match="Duplicate term IDs"): + Ontology( + srn=_make_srn(), + title="Bad", + terms=[_make_term("male", "Male"), _make_term("male", "Male Duplicate")], + created_at=datetime.now(UTC), + ) + + +class TestOntologyImmutability: + def test_srn_is_set(self): + onto = Ontology( + srn=_make_srn("sex-onto", "1.0.0"), + title="Sex", + terms=[_make_term()], + created_at=datetime.now(UTC), + ) + assert str(onto.srn) == "urn:osa:localhost:onto:sex-onto@1.0.0" diff --git a/server/tests/unit/domain/semantics/test_ontology_service.py b/server/tests/unit/domain/semantics/test_ontology_service.py new file mode 100644 index 0000000..b59d690 --- /dev/null +++ b/server/tests/unit/domain/semantics/test_ontology_service.py @@ -0,0 +1,171 @@ +"""Unit tests for OntologyService.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock + +import pytest + +from osa.domain.semantics.model.ontology import Ontology, Term +from osa.domain.semantics.service.ontology import OntologyService +from osa.domain.shared.error import NotFoundError +from osa.domain.shared.model.srn import Domain, OntologySRN + + +def _make_srn(id: str = "test-onto", version: str = "1.0.0") -> OntologySRN: + return OntologySRN.parse(f"urn:osa:localhost:onto:{id}@{version}") + + +def _make_ontology(srn: OntologySRN | None = None) -> Ontology: + return Ontology( + srn=srn or _make_srn(), + title="Sex", + terms=[Term(term_id="male", label="Male"), Term(term_id="female", label="Female")], + created_at=datetime.now(UTC), + ) + + +class TestOntologyServiceCreate: + @pytest.mark.asyncio + async def test_create_ontology(self): + repo = AsyncMock() + service = OntologyService( + ontology_repo=repo, + node_domain=Domain("localhost"), + ) + result = await service.create_ontology( + title="Sex", + version="1.0.0", + terms=[Term(term_id="male", label="Male")], + description="Biological sex", + ) + assert result.title == "Sex" + repo.save.assert_called_once() + + @pytest.mark.asyncio + async def test_create_ontology_generates_srn(self): + repo = AsyncMock() + service = OntologyService( + ontology_repo=repo, + node_domain=Domain("localhost"), + ) + result = await service.create_ontology( + title="Test", + version="1.0.0", + terms=[Term(term_id="a", label="A")], + ) + assert str(result.srn).startswith("urn:osa:localhost:onto:") + assert str(result.srn).endswith("@1.0.0") + + +class TestOntologyServiceGet: + @pytest.mark.asyncio + async def test_get_existing(self): + onto = _make_ontology() + repo = AsyncMock() + repo.get.return_value = onto + service = OntologyService(ontology_repo=repo, node_domain=Domain("localhost")) + + result = await service.get_ontology(onto.srn) + assert result == onto + + @pytest.mark.asyncio + async def test_get_nonexistent_raises(self): + repo = AsyncMock() + repo.get.return_value = None + service = OntologyService(ontology_repo=repo, node_domain=Domain("localhost")) + + with pytest.raises(NotFoundError): + await service.get_ontology(_make_srn()) + + +class TestOntologyServiceList: + @pytest.mark.asyncio + async def test_list_ontologies(self): + onto = _make_ontology() + repo = AsyncMock() + repo.list.return_value = [onto] + service = OntologyService(ontology_repo=repo, node_domain=Domain("localhost")) + + result = await service.list_ontologies() + assert len(result) == 1 + assert result[0] == onto + + +def _obographs_data( + *, + lbl: str = "Test Ontology", + version: str | None = "2.1.0", + description: str | None = None, +) -> dict: + """Build minimal OBO Graphs data for service tests.""" + graph: dict = { + "id": "http://example.org/test.owl", + "lbl": lbl, + "nodes": [ + {"id": "T:001", "lbl": "Root", "type": "CLASS"}, + {"id": "T:002", "lbl": "Child", "type": "CLASS"}, + ], + "edges": [{"sub": "T:002", "pred": "is_a", "obj": "T:001"}], + } + meta: dict = {} + if version is not None: + meta["version"] = version + if description is not None: + meta["definition"] = {"val": description} + if meta: + graph["meta"] = meta + return {"graphs": [graph]} + + +class TestOntologyServiceImport: + @pytest.mark.asyncio + async def test_import_from_obographs_creates_ontology(self): + repo = AsyncMock() + service = OntologyService(ontology_repo=repo, node_domain=Domain("localhost")) + + result = await service.import_from_obographs(_obographs_data()) + + assert result.title == "Test Ontology" + assert len(result.terms) == 2 + repo.save.assert_called_once() + + @pytest.mark.asyncio + async def test_import_uses_version_from_obo_data(self): + repo = AsyncMock() + service = OntologyService(ontology_repo=repo, node_domain=Domain("localhost")) + + result = await service.import_from_obographs(_obographs_data(version="2.1.0")) + + assert str(result.srn).endswith("@2.1.0") + + @pytest.mark.asyncio + async def test_import_uses_version_override(self): + repo = AsyncMock() + service = OntologyService(ontology_repo=repo, node_domain=Domain("localhost")) + + result = await service.import_from_obographs( + _obographs_data(version="2024-01-15"), + version_override="3.0.0", + ) + + assert str(result.srn).endswith("@3.0.0") + + @pytest.mark.asyncio + async def test_import_falls_back_to_1_0_0(self): + repo = AsyncMock() + service = OntologyService(ontology_repo=repo, node_domain=Domain("localhost")) + + result = await service.import_from_obographs(_obographs_data(version=None)) + + assert str(result.srn).endswith("@1.0.0") + + @pytest.mark.asyncio + async def test_import_passes_description(self): + repo = AsyncMock() + service = OntologyService(ontology_repo=repo, node_domain=Domain("localhost")) + + result = await service.import_from_obographs( + _obographs_data(description="A test ontology.") + ) + + assert result.description == "A test ontology." diff --git a/server/tests/unit/domain/semantics/test_schema.py b/server/tests/unit/domain/semantics/test_schema.py new file mode 100644 index 0000000..0a245ab --- /dev/null +++ b/server/tests/unit/domain/semantics/test_schema.py @@ -0,0 +1,118 @@ +"""Unit tests for Schema aggregate.""" + +from datetime import UTC, datetime + +import pytest + +from osa.domain.semantics.model.schema import Schema +from osa.domain.semantics.model.value import ( + Cardinality, + FieldDefinition, + FieldType, + NumberConstraints, + TermConstraints, + TextConstraints, +) +from osa.domain.shared.error import ValidationError +from osa.domain.shared.model.srn import OntologySRN, SchemaSRN + + +def _make_srn(id: str = "test-schema", version: str = "1.0.0") -> SchemaSRN: + return SchemaSRN.parse(f"urn:osa:localhost:schema:{id}@{version}") + + +def _make_text_field(name: str = "title", required: bool = True) -> FieldDefinition: + return FieldDefinition( + name=name, + type=FieldType.TEXT, + required=required, + cardinality=Cardinality.EXACTLY_ONE, + ) + + +class TestSchemaCreation: + def test_create_with_single_field(self): + schema = Schema( + srn=_make_srn(), + title="Test Schema", + fields=[_make_text_field()], + created_at=datetime.now(UTC), + ) + assert schema.title == "Test Schema" + assert len(schema.fields) == 1 + + def test_create_with_multiple_fields(self): + schema = Schema( + srn=_make_srn(), + title="scRNA-seq", + fields=[ + _make_text_field("title"), + FieldDefinition( + name="sample_count", + type=FieldType.NUMBER, + required=True, + cardinality=Cardinality.EXACTLY_ONE, + constraints=NumberConstraints(integer_only=True, min_value=1), + ), + ], + created_at=datetime.now(UTC), + ) + assert len(schema.fields) == 2 + + def test_create_with_ontology_reference(self): + onto_srn = OntologySRN.parse("urn:osa:localhost:onto:sex@1.0.0") + schema = Schema( + srn=_make_srn(), + title="With Ontology", + fields=[ + FieldDefinition( + name="sex", + type=FieldType.TERM, + required=True, + cardinality=Cardinality.EXACTLY_ONE, + constraints=TermConstraints(ontology_srn=onto_srn), + ), + ], + created_at=datetime.now(UTC), + ) + assert schema.fields[0].constraints.ontology_srn == onto_srn + + def test_create_with_text_constraints(self): + schema = Schema( + srn=_make_srn(), + title="Constrained", + fields=[ + FieldDefinition( + name="title", + type=FieldType.TEXT, + required=True, + cardinality=Cardinality.EXACTLY_ONE, + constraints=TextConstraints(min_length=1, max_length=500), + ), + ], + created_at=datetime.now(UTC), + ) + assert schema.fields[0].constraints.max_length == 500 + + +class TestSchemaInvariants: + def test_rejects_empty_fields(self): + with pytest.raises(ValidationError, match="at least one field"): + Schema( + srn=_make_srn(), + title="Empty", + fields=[], + created_at=datetime.now(UTC), + ) + + def test_rejects_duplicate_field_names(self): + with pytest.raises(ValidationError, match="Duplicate field names"): + Schema( + srn=_make_srn(), + title="Duplicate", + fields=[ + _make_text_field("title"), + _make_text_field("title"), + ], + created_at=datetime.now(UTC), + ) diff --git a/server/tests/unit/domain/semantics/test_schema_service.py b/server/tests/unit/domain/semantics/test_schema_service.py new file mode 100644 index 0000000..71fbaf4 --- /dev/null +++ b/server/tests/unit/domain/semantics/test_schema_service.py @@ -0,0 +1,177 @@ +"""Unit tests for SchemaService.""" + +from datetime import UTC, datetime +from unittest.mock import AsyncMock + +import pytest + +from osa.domain.semantics.model.schema import Schema +from osa.domain.semantics.model.value import ( + Cardinality, + FieldDefinition, + FieldType, + TermConstraints, +) +from osa.domain.semantics.service.schema import SchemaService +from osa.domain.shared.error import NotFoundError, ValidationError +from osa.domain.shared.model.srn import Domain, OntologySRN, SchemaSRN + + +def _make_schema_srn(id: str = "test-schema", version: str = "1.0.0") -> SchemaSRN: + return SchemaSRN.parse(f"urn:osa:localhost:schema:{id}@{version}") + + +def _make_ontology_srn(id: str = "sex", version: str = "1.0.0") -> OntologySRN: + return OntologySRN.parse(f"urn:osa:localhost:onto:{id}@{version}") + + +def _make_text_field(name: str = "title") -> FieldDefinition: + return FieldDefinition( + name=name, + type=FieldType.TEXT, + required=True, + cardinality=Cardinality.EXACTLY_ONE, + ) + + +def _make_term_field(name: str = "sex", onto_srn: OntologySRN | None = None) -> FieldDefinition: + return FieldDefinition( + name=name, + type=FieldType.TERM, + required=True, + cardinality=Cardinality.EXACTLY_ONE, + constraints=TermConstraints(ontology_srn=onto_srn or _make_ontology_srn()), + ) + + +class TestSchemaServiceCreate: + @pytest.mark.asyncio + async def test_create_schema_without_ontology_refs(self): + schema_repo = AsyncMock() + ontology_repo = AsyncMock() + + service = SchemaService( + schema_repo=schema_repo, + ontology_repo=ontology_repo, + node_domain=Domain("localhost"), + ) + result = await service.create_schema( + title="Simple Schema", + version="1.0.0", + fields=[_make_text_field()], + ) + assert result.title == "Simple Schema" + schema_repo.save.assert_called_once() + + @pytest.mark.asyncio + async def test_create_schema_with_valid_ontology_ref(self): + schema_repo = AsyncMock() + ontology_repo = AsyncMock() + ontology_repo.exists.return_value = True + + service = SchemaService( + schema_repo=schema_repo, + ontology_repo=ontology_repo, + node_domain=Domain("localhost"), + ) + result = await service.create_schema( + title="With Ontology", + version="1.0.0", + fields=[_make_text_field(), _make_term_field()], + ) + assert len(result.fields) == 2 + ontology_repo.exists.assert_called_once() + + @pytest.mark.asyncio + async def test_create_schema_rejects_invalid_ontology_ref(self): + schema_repo = AsyncMock() + ontology_repo = AsyncMock() + ontology_repo.exists.return_value = False + + service = SchemaService( + schema_repo=schema_repo, + ontology_repo=ontology_repo, + node_domain=Domain("localhost"), + ) + with pytest.raises(ValidationError, match="Ontology.*not found"): + await service.create_schema( + title="Bad Ref", + version="1.0.0", + fields=[_make_term_field()], + ) + + @pytest.mark.asyncio + async def test_create_schema_generates_srn(self): + schema_repo = AsyncMock() + ontology_repo = AsyncMock() + + service = SchemaService( + schema_repo=schema_repo, + ontology_repo=ontology_repo, + node_domain=Domain("localhost"), + ) + result = await service.create_schema( + title="Test", + version="1.0.0", + fields=[_make_text_field()], + ) + assert str(result.srn).startswith("urn:osa:localhost:schema:") + assert str(result.srn).endswith("@1.0.0") + + +class TestSchemaServiceGet: + @pytest.mark.asyncio + async def test_get_existing(self): + schema = Schema( + srn=_make_schema_srn(), + title="Test", + fields=[_make_text_field()], + created_at=datetime.now(UTC), + ) + schema_repo = AsyncMock() + schema_repo.get.return_value = schema + ontology_repo = AsyncMock() + + service = SchemaService( + schema_repo=schema_repo, + ontology_repo=ontology_repo, + node_domain=Domain("localhost"), + ) + result = await service.get_schema(schema.srn) + assert result == schema + + @pytest.mark.asyncio + async def test_get_nonexistent_raises(self): + schema_repo = AsyncMock() + schema_repo.get.return_value = None + ontology_repo = AsyncMock() + + service = SchemaService( + schema_repo=schema_repo, + ontology_repo=ontology_repo, + node_domain=Domain("localhost"), + ) + with pytest.raises(NotFoundError): + await service.get_schema(_make_schema_srn()) + + +class TestSchemaServiceList: + @pytest.mark.asyncio + async def test_list_schemas(self): + schema = Schema( + srn=_make_schema_srn(), + title="Test", + fields=[_make_text_field()], + created_at=datetime.now(UTC), + ) + schema_repo = AsyncMock() + schema_repo.list.return_value = [schema] + ontology_repo = AsyncMock() + + service = SchemaService( + schema_repo=schema_repo, + ontology_repo=ontology_repo, + node_domain=Domain("localhost"), + ) + result = await service.list_schemas() + assert len(result) == 1 diff --git a/server/tests/unit/infrastructure/http/test_ontology_fetcher.py b/server/tests/unit/infrastructure/http/test_ontology_fetcher.py new file mode 100644 index 0000000..c8fac4c --- /dev/null +++ b/server/tests/unit/infrastructure/http/test_ontology_fetcher.py @@ -0,0 +1,54 @@ +"""Unit tests for HttpOntologyFetcher adapter.""" + +from unittest.mock import AsyncMock, MagicMock + +import httpx +import pytest + +from osa.infrastructure.http.ontology_fetcher import HttpOntologyFetcher + + +class TestHttpOntologyFetcher: + @pytest.mark.asyncio + async def test_fetches_and_parses_json(self): + expected = {"graphs": [{"id": "test", "nodes": []}]} + response = MagicMock(spec=httpx.Response) + response.json.return_value = expected + response.raise_for_status = MagicMock() + + client = AsyncMock(spec=httpx.AsyncClient) + client.get.return_value = response + + fetcher = HttpOntologyFetcher(client=client) + result = await fetcher.fetch_json("https://example.com/ontology.json") + + assert result == expected + client.get.assert_called_once_with("https://example.com/ontology.json") + response.raise_for_status.assert_called_once() + + @pytest.mark.asyncio + async def test_raises_on_http_error(self): + client = AsyncMock(spec=httpx.AsyncClient) + response = MagicMock(spec=httpx.Response) + response.raise_for_status.side_effect = httpx.HTTPStatusError( + "Not Found", + request=MagicMock(), + response=MagicMock(status_code=404), + ) + client.get.return_value = response + + fetcher = HttpOntologyFetcher(client=client) + with pytest.raises(httpx.HTTPStatusError): + await fetcher.fetch_json("https://example.com/missing.json") + + @pytest.mark.asyncio + async def test_raises_on_invalid_json(self): + client = AsyncMock(spec=httpx.AsyncClient) + response = MagicMock(spec=httpx.Response) + response.raise_for_status = MagicMock() + response.json.side_effect = ValueError("Invalid JSON") + client.get.return_value = response + + fetcher = HttpOntologyFetcher(client=client) + with pytest.raises(ValueError, match="Invalid JSON"): + await fetcher.fetch_json("https://example.com/bad.json") diff --git a/server/uv.lock b/server/uv.lock index cdf84bd..1fbc922 100644 --- a/server/uv.lock +++ b/server/uv.lock @@ -553,6 +553,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/de/15/545e2b6cf2e3be84bc1ed85613edd75b8aea69807a71c26f4ca6a9258e82/email_validator-2.3.0-py3-none-any.whl", hash = "sha256:80f13f623413e6b197ae73bb10bf4eb0908faf509ad8362c5edeb0be7fd450b4", size = 35604, upload-time = "2025-08-26T13:09:05.858Z" }, ] +[[package]] +name = "et-xmlfile" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d3/38/af70d7ab1ae9d4da450eeec1fa3918940a5fafb9055e934af8d6eb0c2313/et_xmlfile-2.0.0.tar.gz", hash = "sha256:dab3f4764309081ce75662649be815c4c9081e88f0837825f90fd28317d4da54", size = 17234, upload-time = "2024-10-25T17:25:40.039Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c1/8b/5fe2cc11fee489817272089c4203e679c63b570a5aaeb18d852ae3cbba6a/et_xmlfile-2.0.0-py3-none-any.whl", hash = "sha256:7a91720bc756843502c3b7504c77b8fe44217c85c537d85037f0f536151b2caa", size = 18059, upload-time = "2024-10-25T17:25:39.051Z" }, +] + [[package]] name = "executing" version = "2.2.1" @@ -1368,6 +1377,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b6/ca/862b1e7a639460f0ca25fd5b6135fb42cf9deea86d398a92e44dfda2279d/onnxruntime-1.23.2-cp313-cp313t-manylinux_2_27_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e2b9233c4947907fd1818d0e581c049c41ccc39b2856cc942ff6d26317cee145", size = 17394184, upload-time = "2025-10-22T03:47:08.127Z" }, ] +[[package]] +name = "openpyxl" +version = "3.1.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "et-xmlfile", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/f9/88d94a75de065ea32619465d2f77b29a0469500e99012523b91cc4141cd1/openpyxl-3.1.5.tar.gz", hash = "sha256:cf0e3cf56142039133628b5acffe8ef0c12bc902d2aadd3e0fe5878dc08d1050", size = 186464, upload-time = "2024-06-28T14:03:44.161Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c0/da/977ded879c29cbd04de313843e76868e6e13408a94ed6b987245dc7c8506/openpyxl-3.1.5-py2.py3-none-any.whl", hash = "sha256:5282c12b107bffeef825f4617dc029afaf41d0ea60823bbb665ef3079dc79de2", size = 250910, upload-time = "2024-06-28T14:03:41.161Z" }, +] + [[package]] name = "opentelemetry-api" version = "1.39.1" @@ -1585,10 +1606,12 @@ dependencies = [ { name = "greenlet", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "httpx", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "logfire", extra = ["fastapi", "httpx"], marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, + { name = "openpyxl", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "psycopg2-binary", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "pydantic", extra = ["email"], marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "pydantic-settings", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "pyjwt", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, + { name = "python-multipart", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "rich", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "sentence-transformers", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, { name = "sqlalchemy", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, @@ -1621,10 +1644,12 @@ requires-dist = [ { name = "greenlet", specifier = ">=3.2.4" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "logfire", extras = ["fastapi", "httpx"], specifier = ">=4.15.1" }, + { name = "openpyxl", specifier = ">=3.1.5" }, { name = "psycopg2-binary", specifier = ">=2.9.11" }, { name = "pydantic", extras = ["email"], specifier = ">=2.12.4" }, { name = "pydantic-settings", specifier = ">=2.12.0" }, { name = "pyjwt", specifier = ">=2.11.0" }, + { name = "python-multipart", specifier = ">=0.0.22" }, { name = "rich", specifier = ">=14.2.0" }, { name = "sentence-transformers", specifier = ">=5.2.0" }, { name = "sqlalchemy", specifier = ">=2.0.44" }, @@ -2039,6 +2064,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/14/1b/a298b06749107c305e1fe0f814c6c74aea7b2f1e10989cb30f544a1b3253/python_dotenv-1.2.1-py3-none-any.whl", hash = "sha256:b81ee9561e9ca4004139c6cbba3a238c32b03e4894671e181b671e8cb8425d61", size = 21230, upload-time = "2025-10-26T15:12:09.109Z" }, ] +[[package]] +name = "python-multipart" +version = "0.0.22" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/01/979e98d542a70714b0cb2b6728ed0b7c46792b695e3eaec3e20711271ca3/python_multipart-0.0.22.tar.gz", hash = "sha256:7340bef99a7e0032613f56dc36027b959fd3b30a787ed62d310e951f7c3a3a58", size = 37612, upload-time = "2026-01-25T10:15:56.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1b/d0/397f9626e711ff749a95d96b7af99b9c566a9bb5129b8e4c10fc4d100304/python_multipart-0.0.22-py3-none-any.whl", hash = "sha256:2b2cd894c83d21bf49d702499531c7bafd057d730c201782048f7945d82de155", size = 24579, upload-time = "2026-01-25T10:15:54.811Z" }, +] + [[package]] name = "pyyaml" version = "6.0.3" @@ -2500,6 +2534,7 @@ dependencies = [ { name = "typing-extensions", marker = "(platform_machine == 'arm64' and sys_platform == 'darwin') or (platform_machine == 'aarch64' and sys_platform == 'linux') or (platform_machine == 'x86_64' and sys_platform == 'linux')" }, ] wheels = [ + { url = "https://files.pythonhosted.org/packages/a4/1b/af5fccb50c341bd69dc016769503cb0857c1423fbe9343410dfeb65240f2/torch-2.10.0-1-cp313-none-macosx_11_0_arm64.whl", hash = "sha256:7350f6652dfd761f11f9ecb590bfe95b573e2961f7a242eccb3c8e78348d26fe", size = 79498248, upload-time = "2026-02-06T17:37:31.982Z" }, { url = "https://files.pythonhosted.org/packages/c9/6f/f2e91e34e3fcba2e3fc8d8f74e7d6c22e74e480bbd1db7bc8900fdf3e95c/torch-2.10.0-cp313-cp313-manylinux_2_28_aarch64.whl", hash = "sha256:5c4d217b14741e40776dd7074d9006fd28b8a97ef5654db959d8635b2fe5f29b", size = 146004247, upload-time = "2026-01-21T16:24:29.335Z" }, { url = "https://files.pythonhosted.org/packages/98/fb/5160261aeb5e1ee12ee95fe599d0541f7c976c3701d607d8fc29e623229f/torch-2.10.0-cp313-cp313-manylinux_2_28_x86_64.whl", hash = "sha256:6b71486353fce0f9714ca0c9ef1c850a2ae766b409808acd58e9678a3edb7738", size = 915716445, upload-time = "2026-01-21T16:22:45.353Z" }, { url = "https://files.pythonhosted.org/packages/1a/0b/39929b148f4824bc3ad6f9f72a29d4ad865bcf7ebfc2fa67584773e083d2/torch-2.10.0-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:3202429f58309b9fa96a614885eace4b7995729f44beb54d3e4a47773649d382", size = 79851305, upload-time = "2026-01-21T16:24:09.209Z" }, From d22a10242358527bb702f63af8a438cee50daaa8 Mon Sep 17 00:00:00 2001 From: Rory Byrne Date: Sun, 15 Feb 2026 15:49:48 +0000 Subject: [PATCH 2/3] feat: add deposition frontend, auth fixes, and worker cleanup - Deposit wizard UI with 4-step flow (convention, template, metadata, files) - Deposition detail page at /deposition/[srn] (client component for auth) - SDK layer rewrite: separate http, auth, search, deposition modules - Mock deposition SDK with tests for demo/development - Auth service: fix session commit, configurable base_role - Worker: remove explicit commits (UOW scope handles it) - Vector index: skip records with no indexable content gracefully - SRN deserialization fix for versioned resources - Seed script for sample convention and schema - Header navigation with Deposit link --- Justfile | 6 + deploy/docker-compose.dev.yml | 2 +- .../application/api/v1/routes/conventions.py | 18 + server/osa/config.py | 1 + server/osa/domain/auth/service/auth.py | 30 +- server/osa/domain/auth/util/di/provider.py | 15 +- server/osa/domain/shared/model/srn.py | 22 +- server/osa/domain/shared/query.py | 12 + server/osa/infrastructure/event/worker.py | 14 - .../infrastructure/index/vector/backend.py | 27 +- server/osa/infrastructure/persistence/di.py | 1 + server/scripts/seed.py | 147 ++++ .../unit/domain/auth/test_auth_provider.py | 6 +- .../unit/domain/auth/test_auth_service.py | 108 +++ web/package.json | 7 +- web/pnpm-lock.yaml | 808 ++++++++++++++++++ .../__tests__/lib/sdk/mock/deposition.test.ts | 121 +++ web/src/app/auth/callback/page.tsx | 7 +- web/src/app/deposit/page.tsx | 14 + web/src/app/deposition/[srn]/page.module.css | 63 ++ web/src/app/deposition/[srn]/page.tsx | 48 ++ web/src/app/layout.tsx | 5 +- web/src/app/record/[srn]/page.tsx | 4 +- web/src/app/search/page.tsx | 4 +- web/src/components/auth/AuthProvider.tsx | 27 +- web/src/components/deposit/ConventionStep.tsx | 47 + web/src/components/deposit/DataFilesStep.tsx | 101 +++ .../deposit/DepositWizard.module.css | 783 +++++++++++++++++ web/src/components/deposit/DepositWizard.tsx | 255 ++++++ .../deposit/DepositionDetail.module.css | 237 +++++ .../components/deposit/DepositionDetail.tsx | 128 +++ web/src/components/deposit/MetadataStep.tsx | 85 ++ web/src/components/deposit/TemplateStep.tsx | 51 ++ web/src/components/deposit/utils.ts | 17 + web/src/components/layout/Header.module.css | 56 ++ web/src/components/layout/Header.tsx | 45 +- web/src/components/search/SearchHero.tsx | 6 +- web/src/components/search/SearchResults.tsx | 4 +- web/src/lib/api/index.ts | 25 - web/src/lib/api/interface.ts | 46 - web/src/lib/api/osa.ts | 96 --- web/src/lib/sdk/auth.ts | 152 +++- web/src/lib/sdk/client.ts | 116 --- web/src/lib/sdk/deposition.ts | 137 +++ web/src/lib/sdk/http.ts | 53 ++ web/src/lib/sdk/index.ts | 119 ++- web/src/lib/sdk/mock/deposition-data.ts | 59 ++ web/src/lib/sdk/mock/deposition.ts | 152 ++++ .../lib/{api/mock.ts => sdk/mock/search.ts} | 34 +- web/src/lib/sdk/search.ts | 97 +++ web/src/lib/utils/constants.ts | 11 + web/src/types/convention.ts | 32 + web/src/types/deposition.ts | 52 ++ web/src/types/index.ts | 21 +- web/vitest.config.ts | 13 + 55 files changed, 4098 insertions(+), 449 deletions(-) create mode 100644 server/scripts/seed.py create mode 100644 web/src/__tests__/lib/sdk/mock/deposition.test.ts create mode 100644 web/src/app/deposit/page.tsx create mode 100644 web/src/app/deposition/[srn]/page.module.css create mode 100644 web/src/app/deposition/[srn]/page.tsx create mode 100644 web/src/components/deposit/ConventionStep.tsx create mode 100644 web/src/components/deposit/DataFilesStep.tsx create mode 100644 web/src/components/deposit/DepositWizard.module.css create mode 100644 web/src/components/deposit/DepositWizard.tsx create mode 100644 web/src/components/deposit/DepositionDetail.module.css create mode 100644 web/src/components/deposit/DepositionDetail.tsx create mode 100644 web/src/components/deposit/MetadataStep.tsx create mode 100644 web/src/components/deposit/TemplateStep.tsx create mode 100644 web/src/components/deposit/utils.ts delete mode 100644 web/src/lib/api/index.ts delete mode 100644 web/src/lib/api/interface.ts delete mode 100644 web/src/lib/api/osa.ts delete mode 100644 web/src/lib/sdk/client.ts create mode 100644 web/src/lib/sdk/deposition.ts create mode 100644 web/src/lib/sdk/http.ts create mode 100644 web/src/lib/sdk/mock/deposition-data.ts create mode 100644 web/src/lib/sdk/mock/deposition.ts rename web/src/lib/{api/mock.ts => sdk/mock/search.ts} (92%) create mode 100644 web/src/lib/sdk/search.ts create mode 100644 web/src/types/convention.ts create mode 100644 web/src/types/deposition.ts create mode 100644 web/vitest.config.ts diff --git a/Justfile b/Justfile index 4365a96..f926959 100644 --- a/Justfile +++ b/Justfile @@ -83,6 +83,12 @@ web-build: web-lint: cd web && pnpm lint +# === Seed === + +# Seed the database with sample data (run while dev is up) +seed: + docker compose -f deploy/docker-compose.yml -f deploy/docker-compose.dev.yml exec server /app/.venv/bin/python /app/scripts/seed.py + # === Database === # Start only the database diff --git a/deploy/docker-compose.dev.yml b/deploy/docker-compose.dev.yml index 94a9c2a..85abae4 100644 --- a/deploy/docker-compose.dev.yml +++ b/deploy/docker-compose.dev.yml @@ -19,7 +19,7 @@ services: OSA_LOGGING__LEVEL: ${LOG_LEVEL:-DEBUG} WATCHFILES_FORCE_POLLING: "true" entrypoint: [] - command: ["sh", "-c", "/app/.venv/bin/alembic upgrade head && /app/.venv/bin/uvicorn osa.application.api.rest.app:app --host 0.0.0.0 --port 8000 --reload"] + command: ["sh", "-c", "/app/.venv/bin/alembic upgrade head && /app/.venv/bin/python /app/scripts/seed.py && /app/.venv/bin/uvicorn osa.application.api.rest.app:app --host 0.0.0.0 --port 8000 --reload"] healthcheck: test: ["CMD", "curl", "--fail", "http://localhost:8000/api/v1/health"] interval: 10s diff --git a/server/osa/application/api/v1/routes/conventions.py b/server/osa/application/api/v1/routes/conventions.py index cdea2bd..6b65866 100644 --- a/server/osa/application/api/v1/routes/conventions.py +++ b/server/osa/application/api/v1/routes/conventions.py @@ -2,12 +2,17 @@ from dishka.integrations.fastapi import DishkaRoute, FromDishka from fastapi import APIRouter +from fastapi.responses import StreamingResponse from osa.domain.deposition.command.create_convention import ( CreateConvention, CreateConventionHandler, ConventionCreated, ) +from osa.domain.deposition.query.download_template import ( + DownloadTemplate, + DownloadTemplateHandler, +) from osa.domain.deposition.query.get_convention import ( GetConvention, GetConventionHandler, @@ -31,6 +36,19 @@ async def create_convention( return await handler.run(body) +@router.get("/{srn:path}/template") +async def download_convention_template( + srn: str, + handler: FromDishka[DownloadTemplateHandler], +) -> StreamingResponse: + result = await handler.run(DownloadTemplate(convention_srn=ConventionSRN.parse(srn))) + return StreamingResponse( + iter([result.content]), + media_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + headers={"Content-Disposition": f'attachment; filename="{result.filename}"'}, + ) + + @router.get("/{srn:path}", response_model=ConventionDetail) async def get_convention( srn: str, diff --git a/server/osa/config.py b/server/osa/config.py index feab7ea..471b068 100644 --- a/server/osa/config.py +++ b/server/osa/config.py @@ -193,6 +193,7 @@ class AuthConfig(BaseModel): orcid: OrcidConfig = OrcidConfig() jwt: JwtConfig # Required - no default, must be configured via env vars callback_url: str = "" # Full callback URL (e.g., https://myarchive.org/api/v1/auth/callback) + base_role: str | None = None # Implicit role for all authenticated users (e.g., "DEPOSITOR") class Config(BaseSettings): diff --git a/server/osa/domain/auth/service/auth.py b/server/osa/domain/auth/service/auth.py index 75e6d9d..5be17ad 100644 --- a/server/osa/domain/auth/service/auth.py +++ b/server/osa/domain/auth/service/auth.py @@ -3,6 +3,8 @@ import logging from osa.domain.auth.model.linked_account import LinkedAccount +from osa.domain.auth.model.role import Role +from osa.domain.auth.model.role_assignment import RoleAssignment from osa.domain.auth.model.token import RefreshToken from osa.domain.auth.model.user import User from osa.domain.auth.model.value import ProviderIdentity, TokenFamilyId, UserId @@ -12,6 +14,7 @@ RefreshTokenRepository, UserRepository, ) +from osa.domain.auth.port.role_repository import RoleAssignmentRepository from osa.domain.auth.service.token import TokenService from osa.domain.shared.outbox import Outbox from osa.domain.shared.service import Service @@ -31,8 +34,10 @@ class AuthService(Service): _user_repo: UserRepository _linked_account_repo: LinkedAccountRepository _refresh_token_repo: RefreshTokenRepository + _role_repo: RoleAssignmentRepository _token_service: TokenService _outbox: Outbox + _base_role: Role | None async def initiate_login( self, @@ -243,10 +248,33 @@ async def _find_or_create_user(self, identity_info: IdentityInfo) -> tuple[User, ) await self._linked_account_repo.save(linked_account) + # Assign configured base role to new users logger.info( - "New user created: user_id=%s, provider=%s", + "Base role check: _base_role=%r, is_not_none=%s, type=%s", + self._base_role, + self._base_role is not None, + type(self._base_role).__name__, + ) + if self._base_role is not None: + assignment = RoleAssignment.create( + user_id=user.id, + role=self._base_role, + assigned_by=user.id, + ) + logger.info( + "Saving base role assignment: user_id=%s, role=%s, assignment_id=%s", + user.id, + self._base_role.name, + assignment.id, + ) + await self._role_repo.save(assignment) + logger.info("Base role assignment saved successfully") + + logger.info( + "New user created: user_id=%s, provider=%s, base_role=%s", user.id, identity_info.provider, + self._base_role.name if self._base_role else None, ) return user, linked_account diff --git a/server/osa/domain/auth/util/di/provider.py b/server/osa/domain/auth/util/di/provider.py index efa7bf1..88348bb 100644 --- a/server/osa/domain/auth/util/di/provider.py +++ b/server/osa/domain/auth/util/di/provider.py @@ -18,6 +18,7 @@ from osa.domain.auth.command.token import LogoutHandler, RefreshTokensHandler from osa.domain.auth.model.identity import Anonymous, Identity from osa.domain.auth.model.principal import Principal +from osa.domain.auth.model.role import Role from osa.domain.auth.model.value import CurrentUser, ProviderIdentity, UserId from osa.domain.auth.port.repository import ( LinkedAccountRepository, @@ -63,19 +64,25 @@ def get_token_service(self, config: Config) -> TokenService: @provide(scope=Scope.UOW) def get_auth_service( self, + config: Config, user_repo: UserRepository, linked_account_repo: LinkedAccountRepository, refresh_token_repo: RefreshTokenRepository, + role_repo: RoleAssignmentRepository, token_service: TokenService, outbox: Outbox, ) -> AuthService: """Provide AuthService.""" + base_role = Role[config.auth.base_role] if config.auth.base_role else None + logger.info("AuthService base_role config: %s -> %s", config.auth.base_role, base_role) return AuthService( _user_repo=user_repo, _linked_account_repo=linked_account_repo, _refresh_token_repo=refresh_token_repo, + _role_repo=role_repo, _token_service=token_service, _outbox=outbox, + _base_role=base_role, ) @provide(scope=Scope.UOW) @@ -145,9 +152,15 @@ async def get_identity( user_id = UserId(UUID(payload["sub"])) - # Lookup roles from DB + # Lookup roles from DB (includes base_role if assigned at user creation) assignments = await role_repo.get_by_user_id(user_id) roles = frozenset(a.role for a in assignments) + logger.debug( + "Identity resolved: user_id=%s, roles=%s, assignments=%d", + user_id, + roles, + len(assignments), + ) return Principal( user_id=user_id, diff --git a/server/osa/domain/shared/model/srn.py b/server/osa/domain/shared/model/srn.py index 8f6f8c7..5da69aa 100644 --- a/server/osa/domain/shared/model/srn.py +++ b/server/osa/domain/shared/model/srn.py @@ -3,12 +3,14 @@ import re from enum import Enum from string import Template -from typing import ClassVar, Generic, Self, Type, TypeVar, Union +from typing import Any, ClassVar, Generic, Self, Type, TypeVar, Union from pydantic import ( Field, RootModel, field_validator, + model_serializer, + model_validator, ) from osa.domain.shared.model.value import ValueObject @@ -134,6 +136,20 @@ class SRN(ValueObject): _tpl: ClassVar[Template] = Template("urn:osa:${domain}:${type}:${id}${version}") + @model_validator(mode="before") + @classmethod + def _from_string(cls, data: Any) -> Any: + """Accept a plain SRN string and parse it into field dict.""" + if isinstance(data, str): + domain, typ, id_str, version = SRN._extract_parts(data) + return { + "domain": domain, + "type": typ, + "id": id_str, + "version": version, + } + return data + @field_validator("scheme") @classmethod def _scheme_ok(cls, v: str) -> str: @@ -148,6 +164,10 @@ def _nid_ok(cls, v: str) -> str: raise ValueError("nid must be 'osa'") return v + @model_serializer + def _serialize(self) -> str: + return self.render() + def __str__(self) -> str: return self.render() diff --git a/server/osa/domain/shared/query.py b/server/osa/domain/shared/query.py index fbf8c90..e896edb 100644 --- a/server/osa/domain/shared/query.py +++ b/server/osa/domain/shared/query.py @@ -44,8 +44,12 @@ async def auth_wrapped_run(self: Any, cmd: Any) -> Any: return await original_run(self, cmd) if isinstance(auth_gate, AtLeast): + import logging as _logging + from osa.domain.auth.model.principal import Principal + _auth_logger = _logging.getLogger("osa.authz") + principal = getattr(self, "principal", None) if not isinstance(principal, Principal): raise AuthorizationError( @@ -53,6 +57,14 @@ async def auth_wrapped_run(self: Any, cmd: Any) -> Any: code="missing_token", ) + _auth_logger.debug( + "Auth check: handler=%s, required=%s, principal_roles=%s, user_id=%s", + type(self).__name__, + auth_gate.role, + principal.roles, + principal.user_id, + ) + if not principal.has_role(auth_gate.role): raise AuthorizationError( f"Access denied: insufficient role for {type(self).__name__}", diff --git a/server/osa/infrastructure/event/worker.py b/server/osa/infrastructure/event/worker.py index 858f06f..b8da146 100644 --- a/server/osa/infrastructure/event/worker.py +++ b/server/osa/infrastructure/event/worker.py @@ -10,8 +10,6 @@ from apscheduler import AsyncScheduler from apscheduler.triggers.cron import CronTrigger from dishka import AsyncContainer -from sqlalchemy.ext.asyncio import AsyncSession - from osa.application.event import ServerStarted from osa.domain.auth.model.identity import Identity, System from osa.domain.shared.error import SkippedEvents @@ -187,7 +185,6 @@ async def _poll_once(self) -> bool: # Claim and process within a UOW scope (System identity for workers) async with self._container(scope=Scope.UOW, context={Identity: System()}) as scope: outbox = await scope.get(Outbox) - session = await scope.get(AsyncSession) # Claim events result = await outbox.claim( @@ -197,8 +194,6 @@ async def _poll_once(self) -> bool: ) if not result.events: - # No events available - commit and return - await session.commit() self._state.status = WorkerStatus.IDLE return False @@ -221,7 +216,6 @@ async def _poll_once(self) -> bool: for event in result.events: await outbox.mark_delivered(event.id) - await session.commit() self._state.processed_count += len(result.events) except SkippedEvents as e: @@ -236,7 +230,6 @@ async def _poll_once(self) -> bool: for event in result.events: if event.id not in skipped_set: await outbox.mark_delivered(event.id) - await session.commit() self._state.processed_count += len(result.events) - len(e.event_ids) except Exception as e: @@ -250,7 +243,6 @@ async def _poll_once(self) -> bool: str(e), max_retries=self._max_retries, ) - await session.commit() finally: self._state.current_batch = [] @@ -397,8 +389,6 @@ async def _emit_server_started(self) -> None: async with self._container(scope=Scope.UOW, context={Identity: System()}) as scope: outbox = await scope.get(Outbox) await outbox.append(ServerStarted(id=EventId(uuid4()))) - session = await scope.get(AsyncSession) - await session.commit() logger.info("ServerStarted event emitted") async def stop(self, timeout: float = 30.0) -> None: @@ -444,8 +434,6 @@ async def _run_schedule(self, config: "ScheduleConfig") -> None: async with self._container(scope=Scope.UOW, context={Identity: System()}) as scope: schedule = await scope.get(config.schedule_type) await schedule.run(**config.params) - session = await scope.get(AsyncSession) - await session.commit() # Reset failure counter on success self._schedule_failures.pop(config.id, None) @@ -489,9 +477,7 @@ async def _run_stale_claim_cleanup(self) -> None: scope=Scope.UOW, context={Identity: System()} ) as scope: outbox = await scope.get(Outbox) - session = await scope.get(AsyncSession) count = await outbox.reset_stale_claims(max_timeout) - await session.commit() if count > 0: logger.info(f"Reset {count} stale claims") diff --git a/server/osa/infrastructure/index/vector/backend.py b/server/osa/infrastructure/index/vector/backend.py index ad864e6..8b4c645 100644 --- a/server/osa/infrastructure/index/vector/backend.py +++ b/server/osa/infrastructure/index/vector/backend.py @@ -70,17 +70,28 @@ async def ingest_batch(self, records: list[tuple[str, dict[str, Any]]]) -> None: if not records: return - # Prepare batch data + # Prepare batch data, skipping records that have no indexable content ids = [] texts = [] metadatas = [] + skipped = 0 for srn, record in records: - ids.append(srn) - texts.append(self._to_text(record)) - # Filter metadata to ChromaDB-compatible types + text = self._to_text(record) safe_meta = {k: v for k, v in record.items() if isinstance(v, (str, int, float, bool))} - metadatas.append(safe_meta) + + if not text.strip() and not safe_meta: + logger.warning(f"Skipping record {srn}: no indexable content for vector backend") + skipped += 1 + continue + + ids.append(srn) + texts.append(text) + metadatas.append(safe_meta if safe_meta else {"srn": srn}) + + if not ids: + logger.warning(f"All {skipped} records skipped: no indexable content") + return # Generate embeddings in batch (much more efficient than one-by-one) logger.debug(f"Generating embeddings for {len(texts)} records") @@ -95,7 +106,11 @@ async def ingest_batch(self, records: list[tuple[str, dict[str, Any]]]) -> None: documents=texts, ) - logger.info(f"Indexed {len(records)} records (embedded + upserted)") + indexed = len(ids) + msg = f"Indexed {indexed} records" + if skipped: + msg += f" ({skipped} skipped — metadata fields don't match index config)" + logger.info(msg) async def flush(self) -> None: """No-op: batching is now handled at the event level. diff --git a/server/osa/infrastructure/persistence/di.py b/server/osa/infrastructure/persistence/di.py index 2b5e5d4..183303c 100644 --- a/server/osa/infrastructure/persistence/di.py +++ b/server/osa/infrastructure/persistence/di.py @@ -69,6 +69,7 @@ async def get_session( ) -> AsyncIterable[AsyncSession]: async with session_factory() as session: yield session + await session.commit() # UOW-scoped repositories dep_repo = provide(PostgresDepositionRepository, scope=Scope.UOW, provides=DepositionRepository) diff --git a/server/scripts/seed.py b/server/scripts/seed.py new file mode 100644 index 0000000..fb94c19 --- /dev/null +++ b/server/scripts/seed.py @@ -0,0 +1,147 @@ +"""Seed the database with sample data for local development. + +Standalone script — no osa package imports required. +Uses SQLAlchemy directly with $OSA_DATABASE__URL from the environment. +Idempotent: safe to run on every startup. +""" + +import asyncio +import json +import os +import sys +from datetime import datetime, timezone + +from sqlalchemy import text +from sqlalchemy.ext.asyncio import create_async_engine + +# --------------------------------------------------------------------------- +# Seed data +# --------------------------------------------------------------------------- + +SCHEMA_SRN = "urn:osa:localhost:schema:seed-sample-survey@1.0.0" +CONVENTION_SRN = "urn:osa:localhost:conv:seed-sample-survey@1.0.0" + +SCHEMA_FIELDS = [ + { + "name": "title", + "type": "text", + "required": True, + "cardinality": "exactly_one", + "description": "Title of the submission", + "constraints": {"type": "text", "min_length": 1, "max_length": 200}, + }, + { + "name": "author", + "type": "text", + "required": True, + "cardinality": "exactly_one", + "description": "Name of the primary author", + "constraints": {"type": "text", "min_length": 1, "max_length": 100}, + }, + { + "name": "description", + "type": "text", + "required": True, + "cardinality": "exactly_one", + "description": "A brief description of the data", + "constraints": {"type": "text", "max_length": 2000}, + }, + { + "name": "date", + "type": "date", + "required": True, + "cardinality": "exactly_one", + "description": "Date of data collection (YYYY-MM-DD)", + "constraints": None, + }, + { + "name": "sample_count", + "type": "number", + "required": True, + "cardinality": "exactly_one", + "description": "Number of samples in the dataset", + "constraints": {"type": "number", "min_value": 1, "integer_only": True}, + }, + { + "name": "notes", + "type": "text", + "required": False, + "cardinality": "exactly_one", + "description": "Any additional notes or comments", + "constraints": None, + }, +] + +FILE_REQUIREMENTS = { + "accepted_types": [".csv", ".tsv", ".txt", ".json", ".xlsx"], + "min_count": 1, + "max_count": 20, + "max_file_size": 104_857_600, +} + + +async def seed() -> None: + db_url = os.environ.get("OSA_DATABASE__URL") + if not db_url: + print("seed: OSA_DATABASE__URL not set, skipping", file=sys.stderr) + sys.exit(1) + + engine = create_async_engine(db_url) + now = datetime.now(timezone.utc) + + async with engine.begin() as conn: + # Schema + row = await conn.execute( + text("SELECT srn FROM schemas WHERE srn = :srn"), + {"srn": SCHEMA_SRN}, + ) + if row.scalar_one_or_none() is None: + await conn.execute( + text( + "INSERT INTO schemas (srn, title, fields, created_at) " + "VALUES (:srn, :title, :fields, :created_at)" + ), + { + "srn": SCHEMA_SRN, + "title": "Sample Survey", + "fields": json.dumps(SCHEMA_FIELDS), + "created_at": now, + }, + ) + print(f"seed: created schema {SCHEMA_SRN}") + else: + print(f"seed: schema already exists {SCHEMA_SRN}") + + # Convention + row = await conn.execute( + text("SELECT srn FROM conventions WHERE srn = :srn"), + {"srn": CONVENTION_SRN}, + ) + if row.scalar_one_or_none() is None: + await conn.execute( + text( + "INSERT INTO conventions (srn, title, description, schema_srn, file_requirements, validator_refs, created_at) " + "VALUES (:srn, :title, :description, :schema_srn, :file_requirements, :validator_refs, :created_at)" + ), + { + "srn": CONVENTION_SRN, + "title": "Sample Survey", + "description": ( + "A simple convention for testing the deposition workflow. " + "Upload any CSV/TSV/JSON file with basic metadata." + ), + "schema_srn": SCHEMA_SRN, + "file_requirements": json.dumps(FILE_REQUIREMENTS), + "validator_refs": json.dumps([]), + "created_at": now, + }, + ) + print(f"seed: created convention {CONVENTION_SRN}") + else: + print(f"seed: convention already exists {CONVENTION_SRN}") + + await engine.dispose() + + +if __name__ == "__main__": + asyncio.run(seed()) diff --git a/server/tests/unit/domain/auth/test_auth_provider.py b/server/tests/unit/domain/auth/test_auth_provider.py index 53c1068..b893c3d 100644 --- a/server/tests/unit/domain/auth/test_auth_provider.py +++ b/server/tests/unit/domain/auth/test_auth_provider.py @@ -78,8 +78,8 @@ async def test_valid_jwt_returns_principal_with_roles(self) -> None: @pytest.mark.asyncio async def test_expired_jwt_returns_anonymous(self) -> None: - config = _make_jwt_config() - token_service = _make_token_service(config) + jwt_config = _make_jwt_config() + token_service = _make_token_service(jwt_config) user_id = UserId.generate() # Create an expired token manually @@ -89,7 +89,7 @@ async def test_expired_jwt_returns_anonymous(self) -> None: "external_id": "0000-0001-2345-6789", "exp": datetime(2020, 1, 1, tzinfo=UTC), } - token = pyjwt.encode(payload, config.secret, algorithm=config.algorithm) + token = pyjwt.encode(payload, jwt_config.secret, algorithm=jwt_config.algorithm) request = _make_request(f"Bearer {token}") role_repo = _make_role_repo() diff --git a/server/tests/unit/domain/auth/test_auth_service.py b/server/tests/unit/domain/auth/test_auth_service.py index 28e6fd4..e21d7e0 100644 --- a/server/tests/unit/domain/auth/test_auth_service.py +++ b/server/tests/unit/domain/auth/test_auth_service.py @@ -8,6 +8,7 @@ from osa.config import JwtConfig from osa.domain.auth.model.linked_account import LinkedAccount +from osa.domain.auth.model.role import Role from osa.domain.auth.model.token import RefreshToken from osa.domain.auth.model.user import User from osa.domain.auth.model.value import IdentityId, RefreshTokenId, TokenFamilyId, UserId @@ -21,8 +22,10 @@ def make_auth_service( user_repo: AsyncMock | None = None, linked_account_repo: AsyncMock | None = None, refresh_token_repo: AsyncMock | None = None, + role_repo: AsyncMock | None = None, token_service: TokenService | None = None, outbox: AsyncMock | None = None, + base_role: Role | None = None, ) -> AuthService: """Create an AuthService with mocked dependencies.""" if user_repo is None: @@ -31,6 +34,8 @@ def make_auth_service( linked_account_repo = AsyncMock() if refresh_token_repo is None: refresh_token_repo = AsyncMock() + if role_repo is None: + role_repo = AsyncMock() if token_service is None: config = JwtConfig( secret="test-secret-key-256-bits-long-xx", @@ -46,8 +51,10 @@ def make_auth_service( _user_repo=user_repo, _linked_account_repo=linked_account_repo, _refresh_token_repo=refresh_token_repo, + _role_repo=role_repo, _token_service=token_service, _outbox=outbox, + _base_role=base_role, ) @@ -393,3 +400,104 @@ async def test_logout_succeeds_for_unknown_token(self): assert result is True refresh_token_repo.revoke_family.assert_not_called() + + +class TestAuthServiceBaseRole: + """Tests for base_role assignment on user creation.""" + + @pytest.mark.asyncio + async def test_new_user_gets_base_role_when_configured(self): + """complete_oauth should assign base_role to new users.""" + user_repo = AsyncMock() + linked_account_repo = AsyncMock() + linked_account_repo.get_by_provider_and_external_id.return_value = None + refresh_token_repo = AsyncMock() + role_repo = AsyncMock() + + service = make_auth_service( + user_repo=user_repo, + linked_account_repo=linked_account_repo, + refresh_token_repo=refresh_token_repo, + role_repo=role_repo, + base_role=Role.DEPOSITOR, + ) + provider = make_identity_provider() + + await service.complete_oauth( + provider=provider, + code="auth-code", + redirect_uri="http://localhost/callback", + ) + + role_repo.save.assert_called_once() + saved_assignment = role_repo.save.call_args[0][0] + assert saved_assignment.role == Role.DEPOSITOR + + @pytest.mark.asyncio + async def test_new_user_no_role_when_base_role_not_configured(self): + """complete_oauth should not assign roles when base_role is None.""" + user_repo = AsyncMock() + linked_account_repo = AsyncMock() + linked_account_repo.get_by_provider_and_external_id.return_value = None + refresh_token_repo = AsyncMock() + role_repo = AsyncMock() + + service = make_auth_service( + user_repo=user_repo, + linked_account_repo=linked_account_repo, + refresh_token_repo=refresh_token_repo, + role_repo=role_repo, + base_role=None, + ) + provider = make_identity_provider() + + await service.complete_oauth( + provider=provider, + code="auth-code", + redirect_uri="http://localhost/callback", + ) + + role_repo.save.assert_not_called() + + @pytest.mark.asyncio + async def test_existing_user_not_reassigned_base_role(self): + """complete_oauth should not assign base_role to returning users.""" + existing_user = User( + id=UserId(uuid4()), + display_name="Existing User", + created_at=datetime.now(UTC), + updated_at=None, + ) + existing_linked_account = LinkedAccount( + id=IdentityId(uuid4()), + user_id=existing_user.id, + provider="orcid", + external_id="0000-0001-2345-6789", + metadata=None, + created_at=datetime.now(UTC), + ) + + user_repo = AsyncMock() + user_repo.get.return_value = existing_user + linked_account_repo = AsyncMock() + linked_account_repo.get_by_provider_and_external_id.return_value = existing_linked_account + refresh_token_repo = AsyncMock() + role_repo = AsyncMock() + + service = make_auth_service( + user_repo=user_repo, + linked_account_repo=linked_account_repo, + refresh_token_repo=refresh_token_repo, + role_repo=role_repo, + base_role=Role.DEPOSITOR, + ) + provider = make_identity_provider() + + await service.complete_oauth( + provider=provider, + code="auth-code", + redirect_uri="http://localhost/callback", + ) + + # Existing user — no new role assignment + role_repo.save.assert_not_called() diff --git a/web/package.json b/web/package.json index a9b7ff9..c92a7b0 100644 --- a/web/package.json +++ b/web/package.json @@ -6,7 +6,9 @@ "dev": "next dev", "build": "next build", "start": "next start", - "lint": "eslint" + "lint": "eslint", + "test": "vitest run", + "test:watch": "vitest" }, "dependencies": { "@opensciencearchive/ui": "github:opensciencearchive/ui", @@ -22,6 +24,7 @@ "eslint": "^9", "eslint-config-next": "16.1.1", "tailwindcss": "^4", - "typescript": "^5" + "typescript": "^5", + "vitest": "^4.0.18" } } diff --git a/web/pnpm-lock.yaml b/web/pnpm-lock.yaml index 690375d..7b581dc 100644 --- a/web/pnpm-lock.yaml +++ b/web/pnpm-lock.yaml @@ -45,6 +45,9 @@ importers: typescript: specifier: ^5 version: 5.9.3 + vitest: + specifier: ^4.0.18 + version: 4.0.18(@types/node@20.19.27)(jiti@2.6.1)(lightningcss@1.30.2) packages: @@ -128,6 +131,162 @@ packages: '@emnapi/wasi-threads@1.1.0': resolution: {integrity: sha512-WI0DdZ8xFSbgMjR1sFsKABJ/C5OnRrjT06JXbZKexJGrDuPTzZdDYfFlsgcCXCyf+suG5QU2e/y1Wo2V/OapLQ==} + '@esbuild/aix-ppc64@0.27.3': + resolution: {integrity: sha512-9fJMTNFTWZMh5qwrBItuziu834eOCUcEqymSH7pY+zoMVEZg3gcPuBNxH1EvfVYe9h0x/Ptw8KBzv7qxb7l8dg==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.27.3': + resolution: {integrity: sha512-YdghPYUmj/FX2SYKJ0OZxf+iaKgMsKHVPF1MAq/P8WirnSpCStzKJFjOjzsW0QQ7oIAiccHdcqjbHmJxRb/dmg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [android] + + '@esbuild/android-arm@0.27.3': + resolution: {integrity: sha512-i5D1hPY7GIQmXlXhs2w8AWHhenb00+GxjxRncS2ZM7YNVGNfaMxgzSGuO8o8SJzRc/oZwU2bcScvVERk03QhzA==} + engines: {node: '>=18'} + cpu: [arm] + os: [android] + + '@esbuild/android-x64@0.27.3': + resolution: {integrity: sha512-IN/0BNTkHtk8lkOM8JWAYFg4ORxBkZQf9zXiEOfERX/CzxW3Vg1ewAhU7QSWQpVIzTW+b8Xy+lGzdYXV6UZObQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [android] + + '@esbuild/darwin-arm64@0.27.3': + resolution: {integrity: sha512-Re491k7ByTVRy0t3EKWajdLIr0gz2kKKfzafkth4Q8A5n1xTHrkqZgLLjFEHVD+AXdUGgQMq+Godfq45mGpCKg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [darwin] + + '@esbuild/darwin-x64@0.27.3': + resolution: {integrity: sha512-vHk/hA7/1AckjGzRqi6wbo+jaShzRowYip6rt6q7VYEDX4LEy1pZfDpdxCBnGtl+A5zq8iXDcyuxwtv3hNtHFg==} + engines: {node: '>=18'} + cpu: [x64] + os: [darwin] + + '@esbuild/freebsd-arm64@0.27.3': + resolution: {integrity: sha512-ipTYM2fjt3kQAYOvo6vcxJx3nBYAzPjgTCk7QEgZG8AUO3ydUhvelmhrbOheMnGOlaSFUoHXB6un+A7q4ygY9w==} + engines: {node: '>=18'} + cpu: [arm64] + os: [freebsd] + + '@esbuild/freebsd-x64@0.27.3': + resolution: {integrity: sha512-dDk0X87T7mI6U3K9VjWtHOXqwAMJBNN2r7bejDsc+j03SEjtD9HrOl8gVFByeM0aJksoUuUVU9TBaZa2rgj0oA==} + engines: {node: '>=18'} + cpu: [x64] + os: [freebsd] + + '@esbuild/linux-arm64@0.27.3': + resolution: {integrity: sha512-sZOuFz/xWnZ4KH3YfFrKCf1WyPZHakVzTiqji3WDc0BCl2kBwiJLCXpzLzUBLgmp4veFZdvN5ChW4Eq/8Fc2Fg==} + engines: {node: '>=18'} + cpu: [arm64] + os: [linux] + + '@esbuild/linux-arm@0.27.3': + resolution: {integrity: sha512-s6nPv2QkSupJwLYyfS+gwdirm0ukyTFNl3KTgZEAiJDd+iHZcbTPPcWCcRYH+WlNbwChgH2QkE9NSlNrMT8Gfw==} + engines: {node: '>=18'} + cpu: [arm] + os: [linux] + + '@esbuild/linux-ia32@0.27.3': + resolution: {integrity: sha512-yGlQYjdxtLdh0a3jHjuwOrxQjOZYD/C9PfdbgJJF3TIZWnm/tMd/RcNiLngiu4iwcBAOezdnSLAwQDPqTmtTYg==} + engines: {node: '>=18'} + cpu: [ia32] + os: [linux] + + '@esbuild/linux-loong64@0.27.3': + resolution: {integrity: sha512-WO60Sn8ly3gtzhyjATDgieJNet/KqsDlX5nRC5Y3oTFcS1l0KWba+SEa9Ja1GfDqSF1z6hif/SkpQJbL63cgOA==} + engines: {node: '>=18'} + cpu: [loong64] + os: [linux] + + '@esbuild/linux-mips64el@0.27.3': + resolution: {integrity: sha512-APsymYA6sGcZ4pD6k+UxbDjOFSvPWyZhjaiPyl/f79xKxwTnrn5QUnXR5prvetuaSMsb4jgeHewIDCIWljrSxw==} + engines: {node: '>=18'} + cpu: [mips64el] + os: [linux] + + '@esbuild/linux-ppc64@0.27.3': + resolution: {integrity: sha512-eizBnTeBefojtDb9nSh4vvVQ3V9Qf9Df01PfawPcRzJH4gFSgrObw+LveUyDoKU3kxi5+9RJTCWlj4FjYXVPEA==} + engines: {node: '>=18'} + cpu: [ppc64] + os: [linux] + + '@esbuild/linux-riscv64@0.27.3': + resolution: {integrity: sha512-3Emwh0r5wmfm3ssTWRQSyVhbOHvqegUDRd0WhmXKX2mkHJe1SFCMJhagUleMq+Uci34wLSipf8Lagt4LlpRFWQ==} + engines: {node: '>=18'} + cpu: [riscv64] + os: [linux] + + '@esbuild/linux-s390x@0.27.3': + resolution: {integrity: sha512-pBHUx9LzXWBc7MFIEEL0yD/ZVtNgLytvx60gES28GcWMqil8ElCYR4kvbV2BDqsHOvVDRrOxGySBM9Fcv744hw==} + engines: {node: '>=18'} + cpu: [s390x] + os: [linux] + + '@esbuild/linux-x64@0.27.3': + resolution: {integrity: sha512-Czi8yzXUWIQYAtL/2y6vogER8pvcsOsk5cpwL4Gk5nJqH5UZiVByIY8Eorm5R13gq+DQKYg0+JyQoytLQas4dA==} + engines: {node: '>=18'} + cpu: [x64] + os: [linux] + + '@esbuild/netbsd-arm64@0.27.3': + resolution: {integrity: sha512-sDpk0RgmTCR/5HguIZa9n9u+HVKf40fbEUt+iTzSnCaGvY9kFP0YKBWZtJaraonFnqef5SlJ8/TiPAxzyS+UoA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [netbsd] + + '@esbuild/netbsd-x64@0.27.3': + resolution: {integrity: sha512-P14lFKJl/DdaE00LItAukUdZO5iqNH7+PjoBm+fLQjtxfcfFE20Xf5CrLsmZdq5LFFZzb5JMZ9grUwvtVYzjiA==} + engines: {node: '>=18'} + cpu: [x64] + os: [netbsd] + + '@esbuild/openbsd-arm64@0.27.3': + resolution: {integrity: sha512-AIcMP77AvirGbRl/UZFTq5hjXK+2wC7qFRGoHSDrZ5v5b8DK/GYpXW3CPRL53NkvDqb9D+alBiC/dV0Fb7eJcw==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openbsd] + + '@esbuild/openbsd-x64@0.27.3': + resolution: {integrity: sha512-DnW2sRrBzA+YnE70LKqnM3P+z8vehfJWHXECbwBmH/CU51z6FiqTQTHFenPlHmo3a8UgpLyH3PT+87OViOh1AQ==} + engines: {node: '>=18'} + cpu: [x64] + os: [openbsd] + + '@esbuild/openharmony-arm64@0.27.3': + resolution: {integrity: sha512-NinAEgr/etERPTsZJ7aEZQvvg/A6IsZG/LgZy+81wON2huV7SrK3e63dU0XhyZP4RKGyTm7aOgmQk0bGp0fy2g==} + engines: {node: '>=18'} + cpu: [arm64] + os: [openharmony] + + '@esbuild/sunos-x64@0.27.3': + resolution: {integrity: sha512-PanZ+nEz+eWoBJ8/f8HKxTTD172SKwdXebZ0ndd953gt1HRBbhMsaNqjTyYLGLPdoWHy4zLU7bDVJztF5f3BHA==} + engines: {node: '>=18'} + cpu: [x64] + os: [sunos] + + '@esbuild/win32-arm64@0.27.3': + resolution: {integrity: sha512-B2t59lWWYrbRDw/tjiWOuzSsFh1Y/E95ofKz7rIVYSQkUYBjfSgf6oeYPNWHToFRr2zx52JKApIcAS/D5TUBnA==} + engines: {node: '>=18'} + cpu: [arm64] + os: [win32] + + '@esbuild/win32-ia32@0.27.3': + resolution: {integrity: sha512-QLKSFeXNS8+tHW7tZpMtjlNb7HKau0QDpwm49u0vUp9y1WOF+PEzkU84y9GqYaAVW8aH8f3GcBck26jh54cX4Q==} + engines: {node: '>=18'} + cpu: [ia32] + os: [win32] + + '@esbuild/win32-x64@0.27.3': + resolution: {integrity: sha512-4uJGhsxuptu3OcpVAzli+/gWusVGwZZHTlS63hh++ehExkVT8SgiEf7/uC/PclrPPkLhZqGgCTjd0VWLo6xMqA==} + engines: {node: '>=18'} + cpu: [x64] + os: [win32] + '@eslint-community/eslint-utils@4.9.1': resolution: {integrity: sha512-phrYmNiYppR7znFEdqgfWHXR6NCkZEK7hwWDHZUjit/2/U0r6XvkDl0SYnoM51Hq7FhCGdLDT6zxCCOY1hexsQ==} engines: {node: ^12.22.0 || ^14.17.0 || >=16.0.0} @@ -412,9 +571,137 @@ packages: resolution: {tarball: https://codeload.github.com/opensciencearchive/ui/tar.gz/b6aedd941e23a9e080bba98b2403c93d43bcc5b8} version: 0.1.0 + '@rollup/rollup-android-arm-eabi@4.57.1': + resolution: {integrity: sha512-A6ehUVSiSaaliTxai040ZpZ2zTevHYbvu/lDoeAteHI8QnaosIzm4qwtezfRg1jOYaUmnzLX1AOD6Z+UJjtifg==} + cpu: [arm] + os: [android] + + '@rollup/rollup-android-arm64@4.57.1': + resolution: {integrity: sha512-dQaAddCY9YgkFHZcFNS/606Exo8vcLHwArFZ7vxXq4rigo2bb494/xKMMwRRQW6ug7Js6yXmBZhSBRuBvCCQ3w==} + cpu: [arm64] + os: [android] + + '@rollup/rollup-darwin-arm64@4.57.1': + resolution: {integrity: sha512-crNPrwJOrRxagUYeMn/DZwqN88SDmwaJ8Cvi/TN1HnWBU7GwknckyosC2gd0IqYRsHDEnXf328o9/HC6OkPgOg==} + cpu: [arm64] + os: [darwin] + + '@rollup/rollup-darwin-x64@4.57.1': + resolution: {integrity: sha512-Ji8g8ChVbKrhFtig5QBV7iMaJrGtpHelkB3lsaKzadFBe58gmjfGXAOfI5FV0lYMH8wiqsxKQ1C9B0YTRXVy4w==} + cpu: [x64] + os: [darwin] + + '@rollup/rollup-freebsd-arm64@4.57.1': + resolution: {integrity: sha512-R+/WwhsjmwodAcz65guCGFRkMb4gKWTcIeLy60JJQbXrJ97BOXHxnkPFrP+YwFlaS0m+uWJTstrUA9o+UchFug==} + cpu: [arm64] + os: [freebsd] + + '@rollup/rollup-freebsd-x64@4.57.1': + resolution: {integrity: sha512-IEQTCHeiTOnAUC3IDQdzRAGj3jOAYNr9kBguI7MQAAZK3caezRrg0GxAb6Hchg4lxdZEI5Oq3iov/w/hnFWY9Q==} + cpu: [x64] + os: [freebsd] + + '@rollup/rollup-linux-arm-gnueabihf@4.57.1': + resolution: {integrity: sha512-F8sWbhZ7tyuEfsmOxwc2giKDQzN3+kuBLPwwZGyVkLlKGdV1nvnNwYD0fKQ8+XS6hp9nY7B+ZeK01EBUE7aHaw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.57.1': + resolution: {integrity: sha512-rGfNUfn0GIeXtBP1wL5MnzSj98+PZe/AXaGBCRmT0ts80lU5CATYGxXukeTX39XBKsxzFpEeK+Mrp9faXOlmrw==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm64-gnu@4.57.1': + resolution: {integrity: sha512-MMtej3YHWeg/0klK2Qodf3yrNzz6CGjo2UntLvk2RSPlhzgLvYEB3frRvbEF2wRKh1Z2fDIg9KRPe1fawv7C+g==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-arm64-musl@4.57.1': + resolution: {integrity: sha512-1a/qhaaOXhqXGpMFMET9VqwZakkljWHLmZOX48R0I/YLbhdxr1m4gtG1Hq7++VhVUmf+L3sTAf9op4JlhQ5u1Q==} + cpu: [arm64] + os: [linux] + + '@rollup/rollup-linux-loong64-gnu@4.57.1': + resolution: {integrity: sha512-QWO6RQTZ/cqYtJMtxhkRkidoNGXc7ERPbZN7dVW5SdURuLeVU7lwKMpo18XdcmpWYd0qsP1bwKPf7DNSUinhvA==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-loong64-musl@4.57.1': + resolution: {integrity: sha512-xpObYIf+8gprgWaPP32xiN5RVTi/s5FCR+XMXSKmhfoJjrpRAjCuuqQXyxUa/eJTdAE6eJ+KDKaoEqjZQxh3Gw==} + cpu: [loong64] + os: [linux] + + '@rollup/rollup-linux-ppc64-gnu@4.57.1': + resolution: {integrity: sha512-4BrCgrpZo4hvzMDKRqEaW1zeecScDCR+2nZ86ATLhAoJ5FQ+lbHVD3ttKe74/c7tNT9c6F2viwB3ufwp01Oh2w==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-ppc64-musl@4.57.1': + resolution: {integrity: sha512-NOlUuzesGauESAyEYFSe3QTUguL+lvrN1HtwEEsU2rOwdUDeTMJdO5dUYl/2hKf9jWydJrO9OL/XSSf65R5+Xw==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.57.1': + resolution: {integrity: sha512-ptA88htVp0AwUUqhVghwDIKlvJMD/fmL/wrQj99PRHFRAG6Z5nbWoWG4o81Nt9FT+IuqUQi+L31ZKAFeJ5Is+A==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-riscv64-musl@4.57.1': + resolution: {integrity: sha512-S51t7aMMTNdmAMPpBg7OOsTdn4tySRQvklmL3RpDRyknk87+Sp3xaumlatU+ppQ+5raY7sSTcC2beGgvhENfuw==} + cpu: [riscv64] + os: [linux] + + '@rollup/rollup-linux-s390x-gnu@4.57.1': + resolution: {integrity: sha512-Bl00OFnVFkL82FHbEqy3k5CUCKH6OEJL54KCyx2oqsmZnFTR8IoNqBF+mjQVcRCT5sB6yOvK8A37LNm/kPJiZg==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.57.1': + resolution: {integrity: sha512-ABca4ceT4N+Tv/GtotnWAeXZUZuM/9AQyCyKYyKnpk4yoA7QIAuBt6Hkgpw8kActYlew2mvckXkvx0FfoInnLg==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-linux-x64-musl@4.57.1': + resolution: {integrity: sha512-HFps0JeGtuOR2convgRRkHCekD7j+gdAuXM+/i6kGzQtFhlCtQkpwtNzkNj6QhCDp7DRJ7+qC/1Vg2jt5iSOFw==} + cpu: [x64] + os: [linux] + + '@rollup/rollup-openbsd-x64@4.57.1': + resolution: {integrity: sha512-H+hXEv9gdVQuDTgnqD+SQffoWoc0Of59AStSzTEj/feWTBAnSfSD3+Dql1ZruJQxmykT/JVY0dE8Ka7z0DH1hw==} + cpu: [x64] + os: [openbsd] + + '@rollup/rollup-openharmony-arm64@4.57.1': + resolution: {integrity: sha512-4wYoDpNg6o/oPximyc/NG+mYUejZrCU2q+2w6YZqrAs2UcNUChIZXjtafAiiZSUc7On8v5NyNj34Kzj/Ltk6dQ==} + cpu: [arm64] + os: [openharmony] + + '@rollup/rollup-win32-arm64-msvc@4.57.1': + resolution: {integrity: sha512-O54mtsV/6LW3P8qdTcamQmuC990HDfR71lo44oZMZlXU4tzLrbvTii87Ni9opq60ds0YzuAlEr/GNwuNluZyMQ==} + cpu: [arm64] + os: [win32] + + '@rollup/rollup-win32-ia32-msvc@4.57.1': + resolution: {integrity: sha512-P3dLS+IerxCT/7D2q2FYcRdWRl22dNbrbBEtxdWhXrfIMPP9lQhb5h4Du04mdl5Woq05jVCDPCMF7Ub0NAjIew==} + cpu: [ia32] + os: [win32] + + '@rollup/rollup-win32-x64-gnu@4.57.1': + resolution: {integrity: sha512-VMBH2eOOaKGtIJYleXsi2B8CPVADrh+TyNxJ4mWPnKfLB/DBUmzW+5m1xUrcwWoMfSLagIRpjUFeW5CO5hyciQ==} + cpu: [x64] + os: [win32] + + '@rollup/rollup-win32-x64-msvc@4.57.1': + resolution: {integrity: sha512-mxRFDdHIWRxg3UfIIAwCm6NzvxG0jDX/wBN6KsQFTvKFqqg9vTrWUE68qEjHt19A5wwx5X5aUi2zuZT7YR0jrA==} + cpu: [x64] + os: [win32] + '@rtsao/scc@1.1.0': resolution: {integrity: sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==} + '@standard-schema/spec@1.1.0': + resolution: {integrity: sha512-l2aFy5jALhniG5HgqrD6jXLi/rUWrKvqN/qJx6yoJsgKhblVd+iqqU4RCXavm/jPityDo5TCvKMnpjKnOriy0w==} + '@swc/helpers@0.5.15': resolution: {integrity: sha512-JQ5TuMi45Owi4/BIMAJBoSQoOJu12oOk/gADqlcUL9JEdHB8vyjUSsxqeNXnmXHjYKMi2WcYtezGEEhqUI/E2g==} @@ -509,6 +796,12 @@ packages: '@tybys/wasm-util@0.10.1': resolution: {integrity: sha512-9tTaPJLSiejZKx+Bmog4uSubteqTvFrVrURwkmHixBo0G4seD0zUxp98E1DzUBJxLQ3NPwXrGKDiVjwx/DpPsg==} + '@types/chai@5.2.3': + resolution: {integrity: sha512-Mw558oeA9fFbv65/y4mHtXDs9bPnFMZAL/jxdPFUpOHHIXX91mcgEHbS5Lahr+pwZFR8A7GQleRWeI6cGFC2UA==} + + '@types/deep-eql@4.0.2': + resolution: {integrity: sha512-c9h9dVVMigMPc4bwTvC5dxqtqJZwQPePsWjPlpSOnojbor6pGqdk541lfA7AqFQr5pB1BRdq0juY9db81BwyFw==} + '@types/estree@1.0.8': resolution: {integrity: sha512-dWHzHa2WqEXI/O1E9OjrocMTKJl2mSrEolh1Iomrv6U+JuNwaHXsXx9bLu5gG7BUWFIN0skIQJQ/L1rIex4X6w==} @@ -683,6 +976,35 @@ packages: cpu: [x64] os: [win32] + '@vitest/expect@4.0.18': + resolution: {integrity: sha512-8sCWUyckXXYvx4opfzVY03EOiYVxyNrHS5QxX3DAIi5dpJAAkyJezHCP77VMX4HKA2LDT/Jpfo8i2r5BE3GnQQ==} + + '@vitest/mocker@4.0.18': + resolution: {integrity: sha512-HhVd0MDnzzsgevnOWCBj5Otnzobjy5wLBe4EdeeFGv8luMsGcYqDuFRMcttKWZA5vVO8RFjexVovXvAM4JoJDQ==} + peerDependencies: + msw: ^2.4.9 + vite: ^6.0.0 || ^7.0.0-0 + peerDependenciesMeta: + msw: + optional: true + vite: + optional: true + + '@vitest/pretty-format@4.0.18': + resolution: {integrity: sha512-P24GK3GulZWC5tz87ux0m8OADrQIUVDPIjjj65vBXYG17ZeU3qD7r+MNZ1RNv4l8CGU2vtTRqixrOi9fYk/yKw==} + + '@vitest/runner@4.0.18': + resolution: {integrity: sha512-rpk9y12PGa22Jg6g5M3UVVnTS7+zycIGk9ZNGN+m6tZHKQb7jrP7/77WfZy13Y/EUDd52NDsLRQhYKtv7XfPQw==} + + '@vitest/snapshot@4.0.18': + resolution: {integrity: sha512-PCiV0rcl7jKQjbgYqjtakly6T1uwv/5BQ9SwBLekVg/EaYeQFPiXcgrC2Y7vDMA8dM1SUEAEV82kgSQIlXNMvA==} + + '@vitest/spy@4.0.18': + resolution: {integrity: sha512-cbQt3PTSD7P2OARdVW3qWER5EGq7PHlvE+QfzSC0lbwO+xnt7+XH06ZzFjFRgzUX//JmpxrCu92VdwvEPlWSNw==} + + '@vitest/utils@4.0.18': + resolution: {integrity: sha512-msMRKLMVLWygpK3u2Hybgi4MNjcYJvwTb0Ru09+fOyCXIgT5raYP041DRRdiJiI3k/2U6SEbAETB3YtBrUkCFA==} + acorn-jsx@5.3.2: resolution: {integrity: sha512-rq9s+JNhf0IChjtDXxllJ7g41oZk5SlXtp0LHwyA5cejwn7vKmKp4pPri6YEePv2PU65sAsegbXtIinmDFDXgQ==} peerDependencies: @@ -739,6 +1061,10 @@ packages: resolution: {integrity: sha512-BNoCY6SXXPQ7gF2opIP4GBE+Xw7U+pHMYKuzjgCN3GwiaIR09UUeKfheyIry77QtrCBlC0KK0q5/TER/tYh3PQ==} engines: {node: '>= 0.4'} + assertion-error@2.0.1: + resolution: {integrity: sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==} + engines: {node: '>=12'} + ast-types-flow@0.0.8: resolution: {integrity: sha512-OH/2E5Fg20h2aPrbe+QL8JZQFko0YZaF+j4mnQ7BGhfavO7OpSLa8a0y9sBwomHdSbkhTS8TQNayBfnW5DwbvQ==} @@ -799,6 +1125,10 @@ packages: caniuse-lite@1.0.30001762: resolution: {integrity: sha512-PxZwGNvH7Ak8WX5iXzoK1KPZttBXNPuaOvI2ZYU7NrlM+d9Ov+TUvlLOBNGzVXAntMSMMlJPd+jY6ovrVjSmUw==} + chai@6.2.2: + resolution: {integrity: sha512-NUPRluOfOiTKBKvWPtSD4PhFvWCqOi0BGStNWs57X9js7XGTprSmFoz5F0tWhR4WPjNeR9jXqdC7/UpSJTnlRg==} + engines: {node: '>=18'} + chalk@4.1.2: resolution: {integrity: sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==} engines: {node: '>=10'} @@ -907,6 +1237,9 @@ packages: resolution: {integrity: sha512-BrUQ0cPTB/IwXj23HtwHjS9n7O4h9FX94b4xc5zlTHxeLgTAdzYUDyy6KdExAl9lbN5rtfe44xpjpmj9grxs5w==} engines: {node: '>= 0.4'} + es-module-lexer@1.7.0: + resolution: {integrity: sha512-jEQoCwk8hyb2AZziIOLhDqpm5+2ww5uIE6lkO/6jcOCusfk6LhMHpXXfBLXTZ7Ydyt0j4VoUQv6uGNYbdW+kBA==} + es-object-atoms@1.1.1: resolution: {integrity: sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==} engines: {node: '>= 0.4'} @@ -923,6 +1256,11 @@ packages: resolution: {integrity: sha512-w+5mJ3GuFL+NjVtJlvydShqE1eN3h3PbI7/5LAsYJP/2qtuMXjfL2LpHSRqo4b4eSF5K/DH1JXKUAHSB2UW50g==} engines: {node: '>= 0.4'} + esbuild@0.27.3: + resolution: {integrity: sha512-8VwMnyGCONIs6cWue2IdpHxHnAjzxnw2Zr7MkVxB2vjmQ2ivqGFb4LEG3SMnv0Gb2F/G/2yA8zUaiL1gywDCCg==} + engines: {node: '>=18'} + hasBin: true + escalade@3.2.0: resolution: {integrity: sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==} engines: {node: '>=6'} @@ -1043,10 +1381,17 @@ packages: resolution: {integrity: sha512-MMdARuVEQziNTeJD8DgMqmhwR11BRQ/cBP+pLtYdSTnf3MIO8fFeiINEbX36ZdNlfU/7A9f3gUw49B3oQsvwBA==} engines: {node: '>=4.0'} + estree-walker@3.0.3: + resolution: {integrity: sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==} + esutils@2.0.3: resolution: {integrity: sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==} engines: {node: '>=0.10.0'} + expect-type@1.3.0: + resolution: {integrity: sha512-knvyeauYhqjOYvQ66MznSMs83wmHrCycNEN6Ao+2AeYEfxUIkuiVxdEa1qlGEPK+We3n0THiDciYSsCcgW/DoA==} + engines: {node: '>=12.0.0'} + fast-deep-equal@3.1.3: resolution: {integrity: sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q==} @@ -1095,6 +1440,11 @@ packages: resolution: {integrity: sha512-dKx12eRCVIzqCxFGplyFKJMPvLEWgmNtUrpTiJIR5u97zEhRG8ySrtboPHZXx7daLxQVrl643cTzbab2tkQjxg==} engines: {node: '>= 0.4'} + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + function-bind@1.1.2: resolution: {integrity: sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==} @@ -1554,6 +1904,9 @@ packages: resolution: {integrity: sha512-gXah6aZrcUxjWg2zR2MwouP2eHlCBzdV4pygudehaKXSGW4v2AsRQUK+lwwXhii6KFZcunEnmSUoYp5CXibxtA==} engines: {node: '>= 0.4'} + obug@2.1.1: + resolution: {integrity: sha512-uTqF9MuPraAQ+IsnPf366RG4cP9RtUi7MLO1N3KEc+wb0a6yKpeL0lmk2IB1jY5KHPAlTc6T/JRdC/YqxHNwkQ==} + optionator@0.9.4: resolution: {integrity: sha512-6IpQ7mKUxRcZNLIObR0hz7lxsapSSIYNZJwXPGeF0mTVqGKFIXj1DQcMoT22S3ROcLyY/rz0PWaWZ9ayWmad9g==} engines: {node: '>= 0.8.0'} @@ -1585,6 +1938,9 @@ packages: path-parse@1.0.7: resolution: {integrity: sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==} + pathe@2.0.3: + resolution: {integrity: sha512-WUjGcAqP1gQacoQe+OBJsFA7Ld4DyXuUIjZ5cc75cLHvJ7dtNsTugphxIADwspS+AraAUePCKrSVtPLFj/F88w==} + picocolors@1.1.1: resolution: {integrity: sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==} @@ -1662,6 +2018,11 @@ packages: resolution: {integrity: sha512-g6QUff04oZpHs0eG5p83rFLhHeV00ug/Yf9nZM6fLeUrPguBTkTQOdpAWWspMh55TZfVQDPaN3NQJfbVRAxdIw==} engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + rollup@4.57.1: + resolution: {integrity: sha512-oQL6lgK3e2QZeQ7gcgIkS2YZPg5slw37hYufJ3edKlfQSGGm8ICoxswK15ntSzF/a8+h7ekRy7k7oWc3BQ7y8A==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + run-parallel@1.2.0: resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} @@ -1729,6 +2090,9 @@ packages: resolution: {integrity: sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==} engines: {node: '>= 0.4'} + siginfo@2.0.0: + resolution: {integrity: sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==} + source-map-js@1.2.1: resolution: {integrity: sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==} engines: {node: '>=0.10.0'} @@ -1736,6 +2100,12 @@ packages: stable-hash@0.0.5: resolution: {integrity: sha512-+L3ccpzibovGXFK+Ap/f8LOS0ahMrHTf3xu7mMLSpEGU0EO9ucaysSylKo9eRDFNhWve/y275iPmIZ4z39a9iA==} + stackback@0.0.2: + resolution: {integrity: sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==} + + std-env@3.10.0: + resolution: {integrity: sha512-5GS12FdOZNliM5mAOxFRg7Ir0pWz8MdpYm6AY6VPkGpbA7ZzmbzNcBJQ0GPvvyWgcY7QAhCgf9Uy89I03faLkg==} + stop-iteration-iterator@1.1.0: resolution: {integrity: sha512-eLoXW/DHyl62zxY4SCaIgnRhuMr6ri4juEYARS8E6sCEqzKpOiE521Ucofdx+KnDZl5xmvGYaaKCk5FEOxJCoQ==} engines: {node: '>= 0.4'} @@ -1799,10 +2169,21 @@ packages: resolution: {integrity: sha512-g9ljZiwki/LfxmQADO3dEY1CbpmXT5Hm2fJ+QaGKwSXUylMybePR7/67YW7jOrrvjEgL1Fmz5kzyAjWVWLlucg==} engines: {node: '>=6'} + tinybench@2.9.0: + resolution: {integrity: sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==} + + tinyexec@1.0.2: + resolution: {integrity: sha512-W/KYk+NFhkmsYpuHq5JykngiOCnxeVL8v8dFnqxSD8qEEdRfXk1SDM6JzNqcERbcGYj9tMrDQBYV9cjgnunFIg==} + engines: {node: '>=18'} + tinyglobby@0.2.15: resolution: {integrity: sha512-j2Zq4NyQYG5XMST4cbs02Ak8iJUdxRM0XI5QyxXuZOzKOINmWurp3smXu3y5wDcJrptwpSjgXHzIQxR0omXljQ==} engines: {node: '>=12.0.0'} + tinyrainbow@3.0.3: + resolution: {integrity: sha512-PSkbLUoxOFRzJYjjxHJt9xro7D+iilgMX/C9lawzVuYiIdcihh9DXmVibBe8lmcFrRi/VzlPjBxbN7rH24q8/Q==} + engines: {node: '>=14.0.0'} + to-regex-range@5.0.1: resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} engines: {node: '>=8.0'} @@ -1870,6 +2251,80 @@ packages: uri-js@4.4.1: resolution: {integrity: sha512-7rKUyy33Q1yc98pQ1DAmLtwX109F7TIfWlW1Ydo8Wl1ii1SeHieeh0HHfPeL2fMXK6z0s8ecKs9frCuLJvndBg==} + vite@7.3.1: + resolution: {integrity: sha512-w+N7Hifpc3gRjZ63vYBXA56dvvRlNWRczTdmCBBa+CotUzAPf5b7YMdMR/8CQoeYE5LX3W4wj6RYTgonm1b9DA==} + engines: {node: ^20.19.0 || >=22.12.0} + hasBin: true + peerDependencies: + '@types/node': ^20.19.0 || >=22.12.0 + jiti: '>=1.21.0' + less: ^4.0.0 + lightningcss: ^1.21.0 + sass: ^1.70.0 + sass-embedded: ^1.70.0 + stylus: '>=0.54.8' + sugarss: ^5.0.0 + terser: ^5.16.0 + tsx: ^4.8.1 + yaml: ^2.4.2 + peerDependenciesMeta: + '@types/node': + optional: true + jiti: + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + sass-embedded: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + tsx: + optional: true + yaml: + optional: true + + vitest@4.0.18: + resolution: {integrity: sha512-hOQuK7h0FGKgBAas7v0mSAsnvrIgAvWmRFjmzpJ7SwFHH3g1k2u37JtYwOwmEKhK6ZO3v9ggDBBm0La1LCK4uQ==} + engines: {node: ^20.0.0 || ^22.0.0 || >=24.0.0} + hasBin: true + peerDependencies: + '@edge-runtime/vm': '*' + '@opentelemetry/api': ^1.9.0 + '@types/node': ^20.0.0 || ^22.0.0 || >=24.0.0 + '@vitest/browser-playwright': 4.0.18 + '@vitest/browser-preview': 4.0.18 + '@vitest/browser-webdriverio': 4.0.18 + '@vitest/ui': 4.0.18 + happy-dom: '*' + jsdom: '*' + peerDependenciesMeta: + '@edge-runtime/vm': + optional: true + '@opentelemetry/api': + optional: true + '@types/node': + optional: true + '@vitest/browser-playwright': + optional: true + '@vitest/browser-preview': + optional: true + '@vitest/browser-webdriverio': + optional: true + '@vitest/ui': + optional: true + happy-dom: + optional: true + jsdom: + optional: true + which-boxed-primitive@1.1.1: resolution: {integrity: sha512-TbX3mj8n0odCBFVlY8AxkqcHASw3L60jIuF8jFP78az3C2YhmGvqbHBpAjTRH2/xqYunrJ9g1jSyjCjpoWzIAA==} engines: {node: '>= 0.4'} @@ -1891,6 +2346,11 @@ packages: engines: {node: '>= 8'} hasBin: true + why-is-node-running@2.3.0: + resolution: {integrity: sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==} + engines: {node: '>=8'} + hasBin: true + word-wrap@1.2.5: resolution: {integrity: sha512-BN22B5eaMMI9UMtjrGd5g5eCYPpCPDUy0FJXbYsaT5zYxjFOckS53SQDE3pWkVoWpHXVb3BrYcEN4Twa55B5cA==} engines: {node: '>=0.10.0'} @@ -2031,6 +2491,84 @@ snapshots: tslib: 2.8.1 optional: true + '@esbuild/aix-ppc64@0.27.3': + optional: true + + '@esbuild/android-arm64@0.27.3': + optional: true + + '@esbuild/android-arm@0.27.3': + optional: true + + '@esbuild/android-x64@0.27.3': + optional: true + + '@esbuild/darwin-arm64@0.27.3': + optional: true + + '@esbuild/darwin-x64@0.27.3': + optional: true + + '@esbuild/freebsd-arm64@0.27.3': + optional: true + + '@esbuild/freebsd-x64@0.27.3': + optional: true + + '@esbuild/linux-arm64@0.27.3': + optional: true + + '@esbuild/linux-arm@0.27.3': + optional: true + + '@esbuild/linux-ia32@0.27.3': + optional: true + + '@esbuild/linux-loong64@0.27.3': + optional: true + + '@esbuild/linux-mips64el@0.27.3': + optional: true + + '@esbuild/linux-ppc64@0.27.3': + optional: true + + '@esbuild/linux-riscv64@0.27.3': + optional: true + + '@esbuild/linux-s390x@0.27.3': + optional: true + + '@esbuild/linux-x64@0.27.3': + optional: true + + '@esbuild/netbsd-arm64@0.27.3': + optional: true + + '@esbuild/netbsd-x64@0.27.3': + optional: true + + '@esbuild/openbsd-arm64@0.27.3': + optional: true + + '@esbuild/openbsd-x64@0.27.3': + optional: true + + '@esbuild/openharmony-arm64@0.27.3': + optional: true + + '@esbuild/sunos-x64@0.27.3': + optional: true + + '@esbuild/win32-arm64@0.27.3': + optional: true + + '@esbuild/win32-ia32@0.27.3': + optional: true + + '@esbuild/win32-x64@0.27.3': + optional: true + '@eslint-community/eslint-utils@4.9.1(eslint@9.39.2(jiti@2.6.1))': dependencies: eslint: 9.39.2(jiti@2.6.1) @@ -2257,8 +2795,85 @@ snapshots: '@opensciencearchive/ui@https://codeload.github.com/opensciencearchive/ui/tar.gz/b6aedd941e23a9e080bba98b2403c93d43bcc5b8': {} + '@rollup/rollup-android-arm-eabi@4.57.1': + optional: true + + '@rollup/rollup-android-arm64@4.57.1': + optional: true + + '@rollup/rollup-darwin-arm64@4.57.1': + optional: true + + '@rollup/rollup-darwin-x64@4.57.1': + optional: true + + '@rollup/rollup-freebsd-arm64@4.57.1': + optional: true + + '@rollup/rollup-freebsd-x64@4.57.1': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.57.1': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.57.1': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.57.1': + optional: true + + '@rollup/rollup-linux-loong64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-loong64-musl@4.57.1': + optional: true + + '@rollup/rollup-linux-ppc64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-ppc64-musl@4.57.1': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-riscv64-musl@4.57.1': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.57.1': + optional: true + + '@rollup/rollup-linux-x64-musl@4.57.1': + optional: true + + '@rollup/rollup-openbsd-x64@4.57.1': + optional: true + + '@rollup/rollup-openharmony-arm64@4.57.1': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.57.1': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.57.1': + optional: true + + '@rollup/rollup-win32-x64-gnu@4.57.1': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.57.1': + optional: true + '@rtsao/scc@1.1.0': {} + '@standard-schema/spec@1.1.0': {} + '@swc/helpers@0.5.15': dependencies: tslib: 2.8.1 @@ -2337,6 +2952,13 @@ snapshots: tslib: 2.8.1 optional: true + '@types/chai@5.2.3': + dependencies: + '@types/deep-eql': 4.0.2 + assertion-error: 2.0.1 + + '@types/deep-eql@4.0.2': {} + '@types/estree@1.0.8': {} '@types/json-schema@7.0.15': {} @@ -2505,6 +3127,45 @@ snapshots: '@unrs/resolver-binding-win32-x64-msvc@1.11.1': optional: true + '@vitest/expect@4.0.18': + dependencies: + '@standard-schema/spec': 1.1.0 + '@types/chai': 5.2.3 + '@vitest/spy': 4.0.18 + '@vitest/utils': 4.0.18 + chai: 6.2.2 + tinyrainbow: 3.0.3 + + '@vitest/mocker@4.0.18(vite@7.3.1(@types/node@20.19.27)(jiti@2.6.1)(lightningcss@1.30.2))': + dependencies: + '@vitest/spy': 4.0.18 + estree-walker: 3.0.3 + magic-string: 0.30.21 + optionalDependencies: + vite: 7.3.1(@types/node@20.19.27)(jiti@2.6.1)(lightningcss@1.30.2) + + '@vitest/pretty-format@4.0.18': + dependencies: + tinyrainbow: 3.0.3 + + '@vitest/runner@4.0.18': + dependencies: + '@vitest/utils': 4.0.18 + pathe: 2.0.3 + + '@vitest/snapshot@4.0.18': + dependencies: + '@vitest/pretty-format': 4.0.18 + magic-string: 0.30.21 + pathe: 2.0.3 + + '@vitest/spy@4.0.18': {} + + '@vitest/utils@4.0.18': + dependencies: + '@vitest/pretty-format': 4.0.18 + tinyrainbow: 3.0.3 + acorn-jsx@5.3.2(acorn@8.15.0): dependencies: acorn: 8.15.0 @@ -2593,6 +3254,8 @@ snapshots: get-intrinsic: 1.3.0 is-array-buffer: 3.0.5 + assertion-error@2.0.1: {} + ast-types-flow@0.0.8: {} async-function@1.0.0: {} @@ -2651,6 +3314,8 @@ snapshots: caniuse-lite@1.0.30001762: {} + chai@6.2.2: {} + chalk@4.1.2: dependencies: ansi-styles: 4.3.0 @@ -2819,6 +3484,8 @@ snapshots: iterator.prototype: 1.1.5 safe-array-concat: 1.1.3 + es-module-lexer@1.7.0: {} + es-object-atoms@1.1.1: dependencies: es-errors: 1.3.0 @@ -2840,6 +3507,35 @@ snapshots: is-date-object: 1.1.0 is-symbol: 1.1.1 + esbuild@0.27.3: + optionalDependencies: + '@esbuild/aix-ppc64': 0.27.3 + '@esbuild/android-arm': 0.27.3 + '@esbuild/android-arm64': 0.27.3 + '@esbuild/android-x64': 0.27.3 + '@esbuild/darwin-arm64': 0.27.3 + '@esbuild/darwin-x64': 0.27.3 + '@esbuild/freebsd-arm64': 0.27.3 + '@esbuild/freebsd-x64': 0.27.3 + '@esbuild/linux-arm': 0.27.3 + '@esbuild/linux-arm64': 0.27.3 + '@esbuild/linux-ia32': 0.27.3 + '@esbuild/linux-loong64': 0.27.3 + '@esbuild/linux-mips64el': 0.27.3 + '@esbuild/linux-ppc64': 0.27.3 + '@esbuild/linux-riscv64': 0.27.3 + '@esbuild/linux-s390x': 0.27.3 + '@esbuild/linux-x64': 0.27.3 + '@esbuild/netbsd-arm64': 0.27.3 + '@esbuild/netbsd-x64': 0.27.3 + '@esbuild/openbsd-arm64': 0.27.3 + '@esbuild/openbsd-x64': 0.27.3 + '@esbuild/openharmony-arm64': 0.27.3 + '@esbuild/sunos-x64': 0.27.3 + '@esbuild/win32-arm64': 0.27.3 + '@esbuild/win32-ia32': 0.27.3 + '@esbuild/win32-x64': 0.27.3 + escalade@3.2.0: {} escape-string-regexp@4.0.0: {} @@ -3045,8 +3741,14 @@ snapshots: estraverse@5.3.0: {} + estree-walker@3.0.3: + dependencies: + '@types/estree': 1.0.8 + esutils@2.0.3: {} + expect-type@1.3.0: {} + fast-deep-equal@3.1.3: {} fast-glob@3.3.1: @@ -3093,6 +3795,9 @@ snapshots: dependencies: is-callable: 1.2.7 + fsevents@2.3.3: + optional: true + function-bind@1.1.2: {} function.prototype.name@1.1.8: @@ -3535,6 +4240,8 @@ snapshots: define-properties: 1.2.1 es-object-atoms: 1.1.1 + obug@2.1.1: {} + optionator@0.9.4: dependencies: deep-is: 0.1.4 @@ -3568,6 +4275,8 @@ snapshots: path-parse@1.0.7: {} + pathe@2.0.3: {} + picocolors@1.1.1: {} picomatch@2.3.1: {} @@ -3647,6 +4356,37 @@ snapshots: reusify@1.1.0: {} + rollup@4.57.1: + dependencies: + '@types/estree': 1.0.8 + optionalDependencies: + '@rollup/rollup-android-arm-eabi': 4.57.1 + '@rollup/rollup-android-arm64': 4.57.1 + '@rollup/rollup-darwin-arm64': 4.57.1 + '@rollup/rollup-darwin-x64': 4.57.1 + '@rollup/rollup-freebsd-arm64': 4.57.1 + '@rollup/rollup-freebsd-x64': 4.57.1 + '@rollup/rollup-linux-arm-gnueabihf': 4.57.1 + '@rollup/rollup-linux-arm-musleabihf': 4.57.1 + '@rollup/rollup-linux-arm64-gnu': 4.57.1 + '@rollup/rollup-linux-arm64-musl': 4.57.1 + '@rollup/rollup-linux-loong64-gnu': 4.57.1 + '@rollup/rollup-linux-loong64-musl': 4.57.1 + '@rollup/rollup-linux-ppc64-gnu': 4.57.1 + '@rollup/rollup-linux-ppc64-musl': 4.57.1 + '@rollup/rollup-linux-riscv64-gnu': 4.57.1 + '@rollup/rollup-linux-riscv64-musl': 4.57.1 + '@rollup/rollup-linux-s390x-gnu': 4.57.1 + '@rollup/rollup-linux-x64-gnu': 4.57.1 + '@rollup/rollup-linux-x64-musl': 4.57.1 + '@rollup/rollup-openbsd-x64': 4.57.1 + '@rollup/rollup-openharmony-arm64': 4.57.1 + '@rollup/rollup-win32-arm64-msvc': 4.57.1 + '@rollup/rollup-win32-ia32-msvc': 4.57.1 + '@rollup/rollup-win32-x64-gnu': 4.57.1 + '@rollup/rollup-win32-x64-msvc': 4.57.1 + fsevents: 2.3.3 + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 @@ -3764,10 +4504,16 @@ snapshots: side-channel-map: 1.0.1 side-channel-weakmap: 1.0.2 + siginfo@2.0.0: {} + source-map-js@1.2.1: {} stable-hash@0.0.5: {} + stackback@0.0.2: {} + + std-env@3.10.0: {} + stop-iteration-iterator@1.1.0: dependencies: es-errors: 1.3.0 @@ -3844,11 +4590,17 @@ snapshots: tapable@2.3.0: {} + tinybench@2.9.0: {} + + tinyexec@1.0.2: {} + tinyglobby@0.2.15: dependencies: fdir: 6.5.0(picomatch@4.0.3) picomatch: 4.0.3 + tinyrainbow@3.0.3: {} + to-regex-range@5.0.1: dependencies: is-number: 7.0.0 @@ -3959,6 +4711,57 @@ snapshots: dependencies: punycode: 2.3.1 + vite@7.3.1(@types/node@20.19.27)(jiti@2.6.1)(lightningcss@1.30.2): + dependencies: + esbuild: 0.27.3 + fdir: 6.5.0(picomatch@4.0.3) + picomatch: 4.0.3 + postcss: 8.5.6 + rollup: 4.57.1 + tinyglobby: 0.2.15 + optionalDependencies: + '@types/node': 20.19.27 + fsevents: 2.3.3 + jiti: 2.6.1 + lightningcss: 1.30.2 + + vitest@4.0.18(@types/node@20.19.27)(jiti@2.6.1)(lightningcss@1.30.2): + dependencies: + '@vitest/expect': 4.0.18 + '@vitest/mocker': 4.0.18(vite@7.3.1(@types/node@20.19.27)(jiti@2.6.1)(lightningcss@1.30.2)) + '@vitest/pretty-format': 4.0.18 + '@vitest/runner': 4.0.18 + '@vitest/snapshot': 4.0.18 + '@vitest/spy': 4.0.18 + '@vitest/utils': 4.0.18 + es-module-lexer: 1.7.0 + expect-type: 1.3.0 + magic-string: 0.30.21 + obug: 2.1.1 + pathe: 2.0.3 + picomatch: 4.0.3 + std-env: 3.10.0 + tinybench: 2.9.0 + tinyexec: 1.0.2 + tinyglobby: 0.2.15 + tinyrainbow: 3.0.3 + vite: 7.3.1(@types/node@20.19.27)(jiti@2.6.1)(lightningcss@1.30.2) + why-is-node-running: 2.3.0 + optionalDependencies: + '@types/node': 20.19.27 + transitivePeerDependencies: + - jiti + - less + - lightningcss + - msw + - sass + - sass-embedded + - stylus + - sugarss + - terser + - tsx + - yaml + which-boxed-primitive@1.1.1: dependencies: is-bigint: 1.1.0 @@ -4004,6 +4807,11 @@ snapshots: dependencies: isexe: 2.0.0 + why-is-node-running@2.3.0: + dependencies: + siginfo: 2.0.0 + stackback: 0.0.2 + word-wrap@1.2.5: {} yallist@3.1.1: {} diff --git a/web/src/__tests__/lib/sdk/mock/deposition.test.ts b/web/src/__tests__/lib/sdk/mock/deposition.test.ts new file mode 100644 index 0000000..a7d13d5 --- /dev/null +++ b/web/src/__tests__/lib/sdk/mock/deposition.test.ts @@ -0,0 +1,121 @@ +import { describe, it, expect, beforeEach } from 'vitest'; +import { MockDepositionNamespace } from '@/lib/sdk/mock/deposition'; +import type { DepositionInterface } from '@/lib/sdk/deposition'; + +describe('MockDepositionNamespace', () => { + let api: DepositionInterface; + + beforeEach(() => { + api = new MockDepositionNamespace(); + }); + + describe('listConventions', () => { + it('returns conventions with expected shape', async () => { + const res = await api.listConventions(); + expect(res.items.length).toBeGreaterThan(0); + for (const conv of res.items) { + expect(conv).toHaveProperty('srn'); + expect(conv).toHaveProperty('title'); + expect(conv).toHaveProperty('description'); + expect(conv).toHaveProperty('schema_srn'); + expect(conv).toHaveProperty('created_at'); + expect(conv.srn).toMatch(/^urn:osa:/); + } + }); + }); + + describe('getConvention', () => { + it('returns detail with file requirements for a known SRN', async () => { + const list = await api.listConventions(); + const srn = list.items[0].srn; + const res = await api.getConvention(srn); + expect(res.srn).toBe(srn); + expect(res.file_requirements).toHaveProperty('accepted_types'); + expect(res.file_requirements).toHaveProperty('min_count'); + expect(res.file_requirements).toHaveProperty('max_count'); + expect(res.file_requirements).toHaveProperty('max_file_size'); + expect(res.validator_refs.length).toBeGreaterThan(0); + }); + + it('throws for unknown SRN', async () => { + await expect(api.getConvention('urn:osa:localhost:conv:nonexistent')).rejects.toThrow(); + }); + }); + + describe('create', () => { + it('returns a response with a valid SRN', async () => { + const list = await api.listConventions(); + const convSrn = list.items[0].srn; + const res = await api.create(convSrn); + expect(res.srn).toMatch(/^urn:osa:localhost:dep:/); + }); + }); + + describe('get', () => { + it('returns the created deposition', async () => { + const list = await api.listConventions(); + const convSrn = list.items[0].srn; + const { srn } = await api.create(convSrn); + const dep = await api.get(srn); + expect(dep.srn).toBe(srn); + expect(dep.convention_srn).toBe(convSrn); + expect(dep.status).toBe('draft'); + expect(dep.files).toEqual([]); + expect(dep.metadata).toEqual({}); + expect(dep.record_srn).toBeNull(); + }); + }); + + describe('uploadSpreadsheet', () => { + it('returns parse result with metadata and warnings', async () => { + const list = await api.listConventions(); + const { srn } = await api.create(list.items[0].srn); + const file = new File(['data'], 'metadata.xlsx', { type: 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet' }); + const res = await api.uploadSpreadsheet(srn, file); + expect(res.parse_result).toHaveProperty('metadata'); + expect(res.parse_result).toHaveProperty('warnings'); + expect(res.parse_result).toHaveProperty('errors'); + expect(typeof res.parse_result.metadata).toBe('object'); + expect(Array.isArray(res.parse_result.errors)).toBe(true); + }); + }); + + describe('uploadFile', () => { + it('returns file metadata with name and size', async () => { + const list = await api.listConventions(); + const { srn } = await api.create(list.items[0].srn); + const file = new File(['content'], 'sample.fastq', { type: 'application/octet-stream' }); + const res = await api.uploadFile(srn, file); + expect(res.file.name).toBe('sample.fastq'); + expect(res.file.size).toBeGreaterThan(0); + expect(res.file.checksum).toBeTruthy(); + expect(res.file).toHaveProperty('content_type'); + expect(res.file.uploaded_at).toBeTruthy(); + }); + }); + + describe('deleteFile', () => { + it('succeeds without error', async () => { + const list = await api.listConventions(); + const { srn } = await api.create(list.items[0].srn); + await expect(api.deleteFile(srn, 'sample.fastq')).resolves.toBeUndefined(); + }); + }); + + describe('submit', () => { + it('succeeds without error', async () => { + const list = await api.listConventions(); + const { srn } = await api.create(list.items[0].srn); + await expect(api.submit(srn)).resolves.toBeUndefined(); + }); + }); + + describe('downloadTemplate', () => { + it('returns a Blob', async () => { + const list = await api.listConventions(); + const blob = await api.downloadTemplate(list.items[0].srn); + expect(blob).toBeInstanceOf(Blob); + expect(blob.size).toBeGreaterThan(0); + }); + }); +}); diff --git a/web/src/app/auth/callback/page.tsx b/web/src/app/auth/callback/page.tsx index 700ed82..50502e6 100644 --- a/web/src/app/auth/callback/page.tsx +++ b/web/src/app/auth/callback/page.tsx @@ -3,7 +3,7 @@ import { Suspense, useEffect, useRef } from 'react'; import { useRouter, useSearchParams } from 'next/navigation'; import Link from 'next/link'; -import { OSAClient, parseAuthCallback } from '@/lib/sdk'; +import { osa, parseAuthCallback } from '@/lib/sdk'; function AuthCallbackContent() { const router = useRouter(); @@ -33,9 +33,8 @@ function AuthCallbackContent() { processedRef.current = true; - // Store in client - const client = new OSAClient({ baseUrl: '/api/v1' }); - client.handleAuthCallback(hash); + // Store via SDK singleton + osa.auth.handleCallback(hash); // Redirect to home (or wherever user came from) router.push('/'); diff --git a/web/src/app/deposit/page.tsx b/web/src/app/deposit/page.tsx new file mode 100644 index 0000000..dd990a3 --- /dev/null +++ b/web/src/app/deposit/page.tsx @@ -0,0 +1,14 @@ +import { DepositWizard } from '@/components/deposit/DepositWizard'; + +export const metadata = { + title: 'Deposit Data — Open Science Archive', + description: 'Submit your research data to the Open Science Archive.', +}; + +export default function DepositPage() { + return ( +
+ +
+ ); +} diff --git a/web/src/app/deposition/[srn]/page.module.css b/web/src/app/deposition/[srn]/page.module.css new file mode 100644 index 0000000..b6a7fcc --- /dev/null +++ b/web/src/app/deposition/[srn]/page.module.css @@ -0,0 +1,63 @@ +.main { + min-height: calc(100vh - 200px); + padding: var(--space-6) 0 var(--space-12); + background: var(--color-bg); +} + +.container { + max-width: var(--content-width-wide); + margin: 0 auto; + padding: 0 var(--space-5); +} + +.breadcrumb { + display: flex; + align-items: center; + gap: var(--space-2); + font-size: var(--text-xs); + margin-bottom: var(--space-5); +} + +.breadcrumbLink { + color: var(--color-text-muted); + text-decoration: none; + transition: color 120ms ease; +} + +.breadcrumbLink:hover { + color: var(--color-accent); +} + +.separator { + color: var(--color-text-subtle); +} + +.current { + color: var(--color-text); +} + +.backLink { + margin-top: var(--space-6); + padding-top: var(--space-5); + border-top: 1px solid var(--color-border-subtle); +} + +.loading { + color: var(--color-text-muted); + font-size: var(--text-sm); +} + +.error { + color: hsl(0, 70%, 50%); + font-size: var(--text-sm); +} + +@media (max-width: 640px) { + .main { + padding: var(--space-5) 0 var(--space-10); + } + + .container { + padding: 0 var(--space-4); + } +} diff --git a/web/src/app/deposition/[srn]/page.tsx b/web/src/app/deposition/[srn]/page.tsx new file mode 100644 index 0000000..f72964a --- /dev/null +++ b/web/src/app/deposition/[srn]/page.tsx @@ -0,0 +1,48 @@ +'use client'; + +import { useState, useEffect } from 'react'; +import { useParams } from 'next/navigation'; +import Link from 'next/link'; +import { osa } from '@/lib/sdk'; +import { DepositionDetail } from '@/components/deposit/DepositionDetail'; +import { BackButton } from '@/components/ui/BackButton'; +import type { Deposition } from '@/types'; +import styles from './page.module.css'; + +export default function DepositionPage() { + const params = useParams<{ srn: string }>(); + const decodedSrn = decodeURIComponent(params.srn); + + const [deposition, setDeposition] = useState(null); + const [error, setError] = useState(null); + + useEffect(() => { + osa.deposition.get(decodedSrn).then(setDeposition).catch((err) => { + setError(err?.message ?? 'Failed to load deposition'); + }); + }, [decodedSrn]); + + return ( +
+
+ + + {error &&

{error}

} + {!deposition && !error &&

Loading...

} + {deposition && } + +
+ + ← New deposition + +
+
+
+ ); +} diff --git a/web/src/app/layout.tsx b/web/src/app/layout.tsx index 15f9a7d..2bf2f1a 100644 --- a/web/src/app/layout.tsx +++ b/web/src/app/layout.tsx @@ -23,9 +23,6 @@ export const metadata: Metadata = { keywords: ['biology', 'genomics', 'GEO', 'research', 'scientific data', 'semantic search'], }; -// API URL: use env var for dev (different port), relative path for prod (reverse proxy) -const apiBaseUrl = process.env.NEXT_PUBLIC_API_URL || '/api/v1'; - export default function RootLayout({ children, }: Readonly<{ @@ -34,7 +31,7 @@ export default function RootLayout({ return ( - +
{children}