From b9f490e17c6827e024d86a6e60fc03ee9a8ca20e Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Wed, 25 Oct 2023 13:39:44 -0500 Subject: [PATCH 01/43] Basic implementation of admin in user collection in mongodb --- backend/app/models/users.py | 1 + backend/app/routers/authentication.py | 47 +++++++++++++++++++++++---- backend/app/routers/keycloak.py | 32 ++++++++++++++---- 3 files changed, 66 insertions(+), 14 deletions(-) diff --git a/backend/app/models/users.py b/backend/app/models/users.py index 99e175cf5..1ead5be55 100644 --- a/backend/app/models/users.py +++ b/backend/app/models/users.py @@ -12,6 +12,7 @@ class UserBase(BaseModel): email: EmailStr first_name: str last_name: str + admin: bool = False class UserIn(UserBase): diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index d90b8545b..aabe61eb2 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -8,8 +8,7 @@ ) from passlib.hash import bcrypt -from app import dependencies -from app.keycloak_auth import create_user +from app.keycloak_auth import create_user, get_current_user from app.keycloak_auth import keycloak_openid from app.models.users import UserDB, UserIn, UserOut, UserLogin @@ -38,11 +37,28 @@ async def save_user(userIn: UserIn): # create local user hashed_password = bcrypt.hash(userIn.password) - user = UserDB( - **userIn.dict(), - hashed_password=hashed_password, - keycloak_id=keycloak_user, - ) + + # check if this is the 1st user, make it admin + all_records = UserDB.find_all() + count = 0 + + async for record in all_records: + count += 1 + + if count == 0: + user = UserDB( + **userIn.dict(), + admin=True, + hashed_password=hashed_password, + keycloak_id=keycloak_user, + ) + else: + user = UserDB( + **userIn.dict(), + hashed_password=hashed_password, + keycloak_id=keycloak_user, + ) + await user.insert() return user.dict() @@ -75,3 +91,20 @@ async def authenticate_user(email: str, password: str): if not user.verify_password(password): return None return user + + +@router.post("/users/set_admin/{useremail}", response_model=UserOut) +async def set_admin(useremail: str, current_username=Depends(get_current_user)): + print("hello") + if (current_user := await UserDB.find_one(UserDB.email == current_username.email)) is not None: + if current_user.admin == True: + if (user := await UserDB.find_one(UserDB.email == useremail)) is not None: + user.admin = True + await user.replace() + return user.dict() + else: + raise HTTPException(status_code=404, detail=f"User {useremail} not found") + else: + raise HTTPException(status_code=403, detail=f"User {current_username.email} is not an admin. Only admin can make others admin.") + else: + raise HTTPException(status_code=404, detail=f"User {current_username.email} not found") diff --git a/backend/app/routers/keycloak.py b/backend/app/routers/keycloak.py index 78a59f5b2..b103d4304 100644 --- a/backend/app/routers/keycloak.py +++ b/backend/app/routers/keycloak.py @@ -117,13 +117,31 @@ async def auth(code: str) -> RedirectResponse: given_name = userinfo.get("given_name", " ") family_name = userinfo.get("family_name", " ") email = userinfo["email"] - user = UserDB( - email=email, - first_name=given_name, - last_name=family_name, - hashed_password="", - keycloak_id=keycloak_id, - ) + + # check if this is the 1st user, make it admin + all_records = UserDB.find_all() + count = 0 + + async for record in all_records: + count += 1 + + if count == 0: + user = UserDB( + email=email, + first_name=given_name, + last_name=family_name, + hashed_password="", + keycloak_id=keycloak_id, + admin=True + ) + else: + user = UserDB( + email=email, + first_name=given_name, + last_name=family_name, + hashed_password="", + keycloak_id=keycloak_id, + ) matched_user = await UserDB.find_one(UserDB.email == email) if matched_user is None: await user.insert() From 29388346ab31847a2181e8544c25e862f3719ecb Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Wed, 25 Oct 2023 14:09:39 -0500 Subject: [PATCH 02/43] black formatting --- backend/app/routers/authentication.py | 17 +++++++++++++---- backend/app/routers/keycloak.py | 2 +- 2 files changed, 14 insertions(+), 5 deletions(-) diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index aabe61eb2..7785f4560 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -96,15 +96,24 @@ async def authenticate_user(email: str, password: str): @router.post("/users/set_admin/{useremail}", response_model=UserOut) async def set_admin(useremail: str, current_username=Depends(get_current_user)): print("hello") - if (current_user := await UserDB.find_one(UserDB.email == current_username.email)) is not None: + if ( + current_user := await UserDB.find_one(UserDB.email == current_username.email) + ) is not None: if current_user.admin == True: if (user := await UserDB.find_one(UserDB.email == useremail)) is not None: user.admin = True await user.replace() return user.dict() else: - raise HTTPException(status_code=404, detail=f"User {useremail} not found") + raise HTTPException( + status_code=404, detail=f"User {useremail} not found" + ) else: - raise HTTPException(status_code=403, detail=f"User {current_username.email} is not an admin. Only admin can make others admin.") + raise HTTPException( + status_code=403, + detail=f"User {current_username.email} is not an admin. Only admin can make others admin.", + ) else: - raise HTTPException(status_code=404, detail=f"User {current_username.email} not found") + raise HTTPException( + status_code=404, detail=f"User {current_username.email} not found" + ) diff --git a/backend/app/routers/keycloak.py b/backend/app/routers/keycloak.py index b103d4304..71e84208f 100644 --- a/backend/app/routers/keycloak.py +++ b/backend/app/routers/keycloak.py @@ -132,7 +132,7 @@ async def auth(code: str) -> RedirectResponse: last_name=family_name, hashed_password="", keycloak_id=keycloak_id, - admin=True + admin=True, ) else: user = UserDB( From 5957c5921d6bafa9504a65eb81e99b6178b474e5 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Thu, 26 Oct 2023 12:38:07 -0500 Subject: [PATCH 03/43] adding codegen files --- frontend/src/openapi/v2/models/UserIn.ts | 1 + frontend/src/openapi/v2/models/UserOut.ts | 1 + .../src/openapi/v2/services/LoginService.ts | 18 ++++++++++++++++++ 3 files changed, 20 insertions(+) diff --git a/frontend/src/openapi/v2/models/UserIn.ts b/frontend/src/openapi/v2/models/UserIn.ts index f7e7cbfa4..80ecd905f 100644 --- a/frontend/src/openapi/v2/models/UserIn.ts +++ b/frontend/src/openapi/v2/models/UserIn.ts @@ -6,5 +6,6 @@ export type UserIn = { email: string; first_name: string; last_name: string; + admin?: boolean; password: string; } diff --git a/frontend/src/openapi/v2/models/UserOut.ts b/frontend/src/openapi/v2/models/UserOut.ts index 42c7657d5..508aa4cdc 100644 --- a/frontend/src/openapi/v2/models/UserOut.ts +++ b/frontend/src/openapi/v2/models/UserOut.ts @@ -19,5 +19,6 @@ export type UserOut = { email: string; first_name: string; last_name: string; + admin?: boolean; id?: string; } diff --git a/frontend/src/openapi/v2/services/LoginService.ts b/frontend/src/openapi/v2/services/LoginService.ts index cf77a358d..54d843155 100644 --- a/frontend/src/openapi/v2/services/LoginService.ts +++ b/frontend/src/openapi/v2/services/LoginService.ts @@ -49,4 +49,22 @@ export class LoginService { }); } + /** + * Set Admin + * @param useremail + * @returns UserOut Successful Response + * @throws ApiError + */ + public static setAdminApiV2UsersSetAdminUseremailPost( + useremail: string, + ): CancelablePromise { + return __request({ + method: 'POST', + path: `/api/v2/users/set_admin/${useremail}`, + errors: { + 422: `Validation Error`, + }, + }); + } + } \ No newline at end of file From 977eadcb8e4cbb691d529aa0c8c9f79772b00b19 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Thu, 26 Oct 2023 13:01:11 -0500 Subject: [PATCH 04/43] minor fix to boolean logic --- backend/app/routers/authentication.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index 7785f4560..7a1d09d38 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -95,11 +95,10 @@ async def authenticate_user(email: str, password: str): @router.post("/users/set_admin/{useremail}", response_model=UserOut) async def set_admin(useremail: str, current_username=Depends(get_current_user)): - print("hello") if ( current_user := await UserDB.find_one(UserDB.email == current_username.email) ) is not None: - if current_user.admin == True: + if current_user.admin: if (user := await UserDB.find_one(UserDB.email == useremail)) is not None: user.admin = True await user.replace() From 795c825d25be15b48a3aa8fa8f44ca9fb57a2f06 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Mon, 30 Oct 2023 13:49:05 -0500 Subject: [PATCH 05/43] Adding get_admin dependency black formatting adding codegen file removing redundant print statement fixing pytest failure fixing pytest failure ran black --- backend/app/models/users.py | 2 +- backend/app/routers/authentication.py | 49 +++++++++++++------ backend/app/routers/keycloak.py | 1 + frontend/src/openapi/v2/models/UserIn.ts | 2 +- frontend/src/openapi/v2/models/UserOut.ts | 2 +- .../src/openapi/v2/services/LoginService.ts | 5 ++ 6 files changed, 42 insertions(+), 19 deletions(-) diff --git a/backend/app/models/users.py b/backend/app/models/users.py index 1ead5be55..4d29fb4d3 100644 --- a/backend/app/models/users.py +++ b/backend/app/models/users.py @@ -12,7 +12,7 @@ class UserBase(BaseModel): email: EmailStr first_name: str last_name: str - admin: bool = False + admin: bool class UserIn(UserBase): diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index 7a1d09d38..e5b822204 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -8,8 +8,11 @@ ) from passlib.hash import bcrypt +from beanie import PydanticObjectId + from app.keycloak_auth import create_user, get_current_user from app.keycloak_auth import keycloak_openid +from app.models.datasets import DatasetDB from app.models.users import UserDB, UserIn, UserOut, UserLogin router = APIRouter() @@ -55,6 +58,7 @@ async def save_user(userIn: UserIn): else: user = UserDB( **userIn.dict(), + admin=False, hashed_password=hashed_password, keycloak_id=keycloak_user, ) @@ -93,26 +97,39 @@ async def authenticate_user(email: str, password: str): return user -@router.post("/users/set_admin/{useremail}", response_model=UserOut) -async def set_admin(useremail: str, current_username=Depends(get_current_user)): +async def get_admin(dataset_id: str = None, current_username=Depends(get_current_user)): if ( - current_user := await UserDB.find_one(UserDB.email == current_username.email) - ) is not None: - if current_user.admin: - if (user := await UserDB.find_one(UserDB.email == useremail)) is not None: - user.admin = True - await user.replace() - return user.dict() - else: - raise HTTPException( - status_code=404, detail=f"User {useremail} not found" - ) + dataset_id + and (dataset_db := await DatasetDB.get(PydanticObjectId(dataset_id))) + is not None + ): + return DatasetDB.creator.email == current_username.email + else: + if ( + current_user := await UserDB.find_one( + UserDB.email == current_username.email + ) + ) is not None: + return current_user.admin else: raise HTTPException( - status_code=403, - detail=f"User {current_username.email} is not an admin. Only admin can make others admin.", + status_code=404, detail=f"User {current_username.email} not found" ) + + +@router.post("/users/set_admin/{useremail}", response_model=UserOut) +async def set_admin( + useremail: str, current_username=Depends(get_current_user), admin=Depends(get_admin) +): + if admin: + if (user := await UserDB.find_one(UserDB.email == useremail)) is not None: + user.admin = True + await user.replace() + return user.dict() + else: + raise HTTPException(status_code=404, detail=f"User {useremail} not found") else: raise HTTPException( - status_code=404, detail=f"User {current_username.email} not found" + status_code=403, + detail=f"User {current_username.email} is not an admin. Only admin can make others admin.", ) diff --git a/backend/app/routers/keycloak.py b/backend/app/routers/keycloak.py index 71e84208f..c1253bcb7 100644 --- a/backend/app/routers/keycloak.py +++ b/backend/app/routers/keycloak.py @@ -141,6 +141,7 @@ async def auth(code: str) -> RedirectResponse: last_name=family_name, hashed_password="", keycloak_id=keycloak_id, + admin=False, ) matched_user = await UserDB.find_one(UserDB.email == email) if matched_user is None: diff --git a/frontend/src/openapi/v2/models/UserIn.ts b/frontend/src/openapi/v2/models/UserIn.ts index 80ecd905f..2796f1e9f 100644 --- a/frontend/src/openapi/v2/models/UserIn.ts +++ b/frontend/src/openapi/v2/models/UserIn.ts @@ -6,6 +6,6 @@ export type UserIn = { email: string; first_name: string; last_name: string; - admin?: boolean; + admin: boolean; password: string; } diff --git a/frontend/src/openapi/v2/models/UserOut.ts b/frontend/src/openapi/v2/models/UserOut.ts index 508aa4cdc..100fb90bb 100644 --- a/frontend/src/openapi/v2/models/UserOut.ts +++ b/frontend/src/openapi/v2/models/UserOut.ts @@ -19,6 +19,6 @@ export type UserOut = { email: string; first_name: string; last_name: string; - admin?: boolean; + admin: boolean; id?: string; } diff --git a/frontend/src/openapi/v2/services/LoginService.ts b/frontend/src/openapi/v2/services/LoginService.ts index 54d843155..7d982cc2d 100644 --- a/frontend/src/openapi/v2/services/LoginService.ts +++ b/frontend/src/openapi/v2/services/LoginService.ts @@ -52,15 +52,20 @@ export class LoginService { /** * Set Admin * @param useremail + * @param datasetId * @returns UserOut Successful Response * @throws ApiError */ public static setAdminApiV2UsersSetAdminUseremailPost( useremail: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/users/set_admin/${useremail}`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, From dac6ee297d0fc7409caae6a3aedd62aeb4551061 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Tue, 31 Oct 2023 20:29:40 -0500 Subject: [PATCH 06/43] fixed pytest failure --- backend/app/models/users.py | 2 +- frontend/src/openapi/v2/models/UserIn.ts | 1 - frontend/src/openapi/v2/models/UserOut.ts | 1 - 3 files changed, 1 insertion(+), 3 deletions(-) diff --git a/backend/app/models/users.py b/backend/app/models/users.py index 4d29fb4d3..beee19b36 100644 --- a/backend/app/models/users.py +++ b/backend/app/models/users.py @@ -12,7 +12,6 @@ class UserBase(BaseModel): email: EmailStr first_name: str last_name: str - admin: bool class UserIn(UserBase): @@ -32,6 +31,7 @@ class Settings: class UserDB(UserDoc): hashed_password: str = Field() keycloak_id: Optional[str] = None + admin: bool def verify_password(self, password): return pwd_context.verify(password, self.hashed_password) diff --git a/frontend/src/openapi/v2/models/UserIn.ts b/frontend/src/openapi/v2/models/UserIn.ts index 2796f1e9f..f7e7cbfa4 100644 --- a/frontend/src/openapi/v2/models/UserIn.ts +++ b/frontend/src/openapi/v2/models/UserIn.ts @@ -6,6 +6,5 @@ export type UserIn = { email: string; first_name: string; last_name: string; - admin: boolean; password: string; } diff --git a/frontend/src/openapi/v2/models/UserOut.ts b/frontend/src/openapi/v2/models/UserOut.ts index 100fb90bb..42c7657d5 100644 --- a/frontend/src/openapi/v2/models/UserOut.ts +++ b/frontend/src/openapi/v2/models/UserOut.ts @@ -19,6 +19,5 @@ export type UserOut = { email: string; first_name: string; last_name: string; - admin: boolean; id?: string; } From a377851931c7c12b888129aa1dba11f088a78fbf Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Wed, 1 Nov 2023 13:20:46 -0500 Subject: [PATCH 07/43] Adding admin dependency in authorization of dataset,files, metadata, search --- backend/app/deps/authorization_deps.py | 26 +++++++++++ backend/app/routers/elasticsearch.py | 13 ++++-- .../src/openapi/v2/services/FilesService.ts | 45 +++++++++++++++++++ .../src/openapi/v2/services/GroupsService.ts | 26 +++++++++++ .../openapi/v2/services/MetadataService.ts | 33 ++++++++++++++ 5 files changed, 139 insertions(+), 4 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index f46f290fe..f0b75d09d 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -10,6 +10,7 @@ from app.models.groups import GroupOut, GroupDB from app.models.metadata import MetadataDB from app.models.pyobjectid import PyObjectId +from app.routers.authentication import get_admin async def get_role( @@ -107,8 +108,15 @@ async def __call__( self, dataset_id: str, current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here. + + # If the current user is admin, user has access irrespective of any role assigned + if admin: + return True + + # Else check role assigned to the user authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), Or( @@ -142,7 +150,13 @@ async def __call__( self, file_id: str, current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): + # If the current user is admin, user has access irrespective of any role assigned + if admin: + return True + + # Else check role assigned to the user if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == file.dataset_id, @@ -173,7 +187,13 @@ async def __call__( self, metadata_id: str, current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): + # If the current user is admin, user has access irrespective of any role assigned + if admin: + return True + + # Else check role assigned to the user if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None: resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id @@ -233,7 +253,13 @@ async def __call__( self, group_id: str, current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): + # If the current user is admin, user has access irrespective of any role assigned + if admin: + return True + + # Else check role assigned to the user if (group := await GroupDB.get(group_id)) is not None: if group.creator == current_user: # Creator can do everything diff --git a/backend/app/routers/elasticsearch.py b/backend/app/routers/elasticsearch.py index 9d402b309..4c0c63057 100644 --- a/backend/app/routers/elasticsearch.py +++ b/backend/app/routers/elasticsearch.py @@ -5,14 +5,16 @@ from app.config import settings from app.keycloak_auth import get_current_username +from app.routers.authentication import get_admin from app.search.connect import connect_elasticsearch, search_index router = APIRouter() -def _add_permissions_clause(query, username: str): +def _add_permissions_clause(query, username: str, admin: bool = Depends(get_admin)): """Append filter to Elasticsearch object that restricts permissions based on the requesting user.""" # TODO: Add public filter once added + user_clause = { "bool": { "should": [ @@ -29,9 +31,12 @@ def _add_permissions_clause(query, username: str): continue # last line json_content = json.loads(content) if "query" in json_content: - json_content["query"] = { - "bool": {"must": [user_clause, json_content["query"]]} - } + if admin: + json_content["query"] = {"bool": {"must": [json_content["query"]]}} + else: + json_content["query"] = { + "bool": {"must": [user_clause, json_content["query"]]} + } updated_query += json.dumps(json_content) + "\n" return updated_query.encode() diff --git a/frontend/src/openapi/v2/services/FilesService.ts b/frontend/src/openapi/v2/services/FilesService.ts index e678fbc5a..6a48df5f9 100644 --- a/frontend/src/openapi/v2/services/FilesService.ts +++ b/frontend/src/openapi/v2/services/FilesService.ts @@ -14,6 +14,7 @@ export class FilesService { * @param fileId * @param version * @param increment + * @param datasetId * @returns any Successful Response * @throws ApiError */ @@ -21,6 +22,7 @@ export class FilesService { fileId: string, version?: number, increment: boolean = true, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', @@ -28,6 +30,7 @@ export class FilesService { query: { 'version': version, 'increment': increment, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, @@ -39,16 +42,21 @@ export class FilesService { * Update File * @param fileId * @param formData + * @param datasetId * @returns FileOut Successful Response * @throws ApiError */ public static updateFileApiV2FilesFileIdPut( fileId: string, formData: Body_update_file_api_v2_files__file_id__put, + datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}`, + query: { + 'dataset_id': datasetId, + }, formData: formData, mediaType: 'multipart/form-data', errors: { @@ -60,15 +68,20 @@ export class FilesService { /** * Delete File * @param fileId + * @param datasetId * @returns any Successful Response * @throws ApiError */ public static deleteFileApiV2FilesFileIdDelete( fileId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -80,6 +93,7 @@ export class FilesService { * @param fileId * @param version * @param expiresInSeconds + * @param datasetId * @returns any Successful Response * @throws ApiError */ @@ -87,6 +101,7 @@ export class FilesService { fileId: string, version?: number, expiresInSeconds: number = 3600, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', @@ -94,6 +109,7 @@ export class FilesService { query: { 'version': version, 'expires_in_seconds': expiresInSeconds, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, @@ -104,15 +120,20 @@ export class FilesService { /** * Get File Summary * @param fileId + * @param datasetId * @returns FileOut Successful Response * @throws ApiError */ public static getFileSummaryApiV2FilesFileIdSummaryGet( fileId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/summary`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -123,18 +144,21 @@ export class FilesService { * Get File Version Details * @param fileId * @param versionNum + * @param datasetId * @returns FileOut Successful Response * @throws ApiError */ public static getFileVersionDetailsApiV2FilesFileIdVersionDetailsGet( fileId: string, versionNum?: number, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/version_details`, query: { 'version_num': versionNum, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, @@ -147,6 +171,7 @@ export class FilesService { * @param fileId * @param skip * @param limit + * @param datasetId * @returns FileVersion Successful Response * @throws ApiError */ @@ -154,6 +179,7 @@ export class FilesService { fileId: string, skip?: number, limit: number = 20, + datasetId?: string, ): CancelablePromise> { return __request({ method: 'GET', @@ -161,6 +187,7 @@ export class FilesService { query: { 'skip': skip, 'limit': limit, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, @@ -172,6 +199,7 @@ export class FilesService { * Post File Extract * @param fileId * @param extractorName + * @param datasetId * @param requestBody * @returns any Successful Response * @throws ApiError @@ -179,6 +207,7 @@ export class FilesService { public static postFileExtractApiV2FilesFileIdExtractPost( fileId: string, extractorName: string, + datasetId?: string, requestBody?: any, ): CancelablePromise { return __request({ @@ -186,6 +215,7 @@ export class FilesService { path: `/api/v2/files/${fileId}/extract`, query: { 'extractorName': extractorName, + 'dataset_id': datasetId, }, body: requestBody, mediaType: 'application/json', @@ -206,15 +236,20 @@ export class FilesService { * credentials: credentials of logged in user * rabbitmq_client: Rabbitmq Client * @param fileId + * @param datasetId * @returns any Successful Response * @throws ApiError */ public static resubmitFileExtractionsApiV2FilesFileIdResubmitExtractPost( fileId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/resubmit_extract`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -224,15 +259,20 @@ export class FilesService { /** * Download File Thumbnail * @param fileId + * @param datasetId * @returns any Successful Response * @throws ApiError */ public static downloadFileThumbnailApiV2FilesFileIdThumbnailGet( fileId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/thumbnail`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -243,16 +283,21 @@ export class FilesService { * Add File Thumbnail * @param fileId * @param thumbnailId + * @param datasetId * @returns FileOut Successful Response * @throws ApiError */ public static addFileThumbnailApiV2FilesFileIdThumbnailThumbnailIdPatch( fileId: string, thumbnailId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/thumbnail/${thumbnailId}`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/GroupsService.ts b/frontend/src/openapi/v2/services/GroupsService.ts index b1efef730..6c20074ce 100644 --- a/frontend/src/openapi/v2/services/GroupsService.ts +++ b/frontend/src/openapi/v2/services/GroupsService.ts @@ -93,15 +93,20 @@ export class GroupsService { /** * Get Group * @param groupId + * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static getGroupApiV2GroupsGroupIdGet( groupId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/groups/${groupId}`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -112,16 +117,21 @@ export class GroupsService { * Edit Group * @param groupId * @param requestBody + * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static editGroupApiV2GroupsGroupIdPut( groupId: string, requestBody: GroupBase, + datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/groups/${groupId}`, + query: { + 'dataset_id': datasetId, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -133,15 +143,20 @@ export class GroupsService { /** * Delete Group * @param groupId + * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static deleteGroupApiV2GroupsGroupIdDelete( groupId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/groups/${groupId}`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -154,6 +169,7 @@ export class GroupsService { * @param groupId * @param username * @param role + * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ @@ -161,12 +177,14 @@ export class GroupsService { groupId: string, username: string, role?: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/groups/${groupId}/add/${username}`, query: { 'role': role, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, @@ -179,16 +197,21 @@ export class GroupsService { * Remove a user from a group. * @param groupId * @param username + * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static removeMemberApiV2GroupsGroupIdRemoveUsernamePost( groupId: string, username: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/groups/${groupId}/remove/${username}`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -201,6 +224,7 @@ export class GroupsService { * @param groupId * @param username * @param role + * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ @@ -208,12 +232,14 @@ export class GroupsService { groupId: string, username: string, role: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/groups/${groupId}/update/${username}`, query: { 'role': role, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, diff --git a/frontend/src/openapi/v2/services/MetadataService.ts b/frontend/src/openapi/v2/services/MetadataService.ts index caeda4b59..3090fc9c6 100644 --- a/frontend/src/openapi/v2/services/MetadataService.ts +++ b/frontend/src/openapi/v2/services/MetadataService.ts @@ -134,15 +134,20 @@ export class MetadataService { * Delete Metadata * Delete metadata by specific ID. * @param metadataId + * @param datasetId * @returns any Successful Response * @throws ApiError */ public static deleteMetadataApiV2MetadataMetadataIdDelete( metadataId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/metadata/${metadataId}`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -158,16 +163,21 @@ export class MetadataService { * Metadata document that was updated * @param metadataId * @param requestBody + * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError */ public static updateMetadataApiV2MetadataMetadataIdPatch( metadataId: string, requestBody: MetadataPatch, + datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/metadata/${metadataId}`, + query: { + 'dataset_id': datasetId, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -182,6 +192,7 @@ export class MetadataService { * @param fileId * @param version * @param allVersions + * @param datasetId * @param formData * @returns MetadataOut Successful Response * @throws ApiError @@ -190,6 +201,7 @@ export class MetadataService { fileId: string, version?: number, allVersions: boolean = false, + datasetId?: string, formData?: Body_get_file_metadata_api_v2_files__file_id__metadata_get, ): CancelablePromise> { return __request({ @@ -198,6 +210,7 @@ export class MetadataService { query: { 'version': version, 'all_versions': allVersions, + 'dataset_id': datasetId, }, formData: formData, mediaType: 'application/x-www-form-urlencoded', @@ -215,16 +228,21 @@ export class MetadataService { * Metadata document that was updated * @param fileId * @param requestBody + * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError */ public static replaceFileMetadataApiV2FilesFileIdMetadataPut( fileId: string, requestBody: MetadataPatch, + datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}/metadata`, + query: { + 'dataset_id': datasetId, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -242,16 +260,21 @@ export class MetadataService { * Metadata document that was added to database * @param fileId * @param requestBody + * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError */ public static addFileMetadataApiV2FilesFileIdMetadataPost( fileId: string, requestBody: MetadataIn, + datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/metadata`, + query: { + 'dataset_id': datasetId, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -264,16 +287,21 @@ export class MetadataService { * Delete File Metadata * @param fileId * @param requestBody + * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError */ public static deleteFileMetadataApiV2FilesFileIdMetadataDelete( fileId: string, requestBody: MetadataDelete, + datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}/metadata`, + query: { + 'dataset_id': datasetId, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -291,16 +319,21 @@ export class MetadataService { * Metadata document that was updated * @param fileId * @param requestBody + * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError */ public static updateFileMetadataApiV2FilesFileIdMetadataPatch( fileId: string, requestBody: MetadataPatch, + datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/metadata`, + query: { + 'dataset_id': datasetId, + }, body: requestBody, mediaType: 'application/json', errors: { From dde04813a8db953ec519a5540112e034cdcd7341 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Wed, 1 Nov 2023 16:28:39 -0500 Subject: [PATCH 08/43] allowing admin to view all datasets --- backend/app/routers/datasets.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index a97d28d76..ee92d3e1a 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -54,6 +54,7 @@ from app.models.users import UserOut from app.models.thumbnails import ThumbnailDB from app.rabbitmq.listeners import submit_dataset_job +from app.routers.authentication import get_admin from app.routers.files import add_file_entry, remove_file_entry from app.search.connect import ( delete_document_by_id, @@ -210,8 +211,15 @@ async def get_datasets( skip: int = 0, limit: int = 10, mine: bool = False, + admin=Depends(get_admin), ): - if mine: + if admin: + datasets = await DatasetDBViewList.find( + sort=(-DatasetDBViewList.created), + skip=skip, + limit=limit, + ).to_list() + elif mine: datasets = await DatasetDBViewList.find( DatasetDBViewList.creator.email == user_id, sort=(-DatasetDBViewList.created), From 9055a508eafc82ba8937db329d9b8fbcae62e269 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Thu, 2 Nov 2023 13:44:30 -0500 Subject: [PATCH 09/43] adding admin in get_roles functions --- backend/app/routers/authorization.py | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 988d6dfd5..9ab9239bd 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -29,6 +29,7 @@ from app.models.groups import GroupDB from app.models.pyobjectid import PyObjectId from app.models.users import UserDB +from app.routers.authentication import get_admin from app.search.index import index_dataset router = APIRouter() @@ -68,18 +69,26 @@ async def save_authorization( async def get_dataset_role( dataset_id: str, current_user=Depends(get_current_username), + admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" + # # admin is a superuser and has all the privileges + # if admin: + # return True # Get group id and the associated users from authorization - if ( - auth_db := await AuthorizationDB.find_one( + if admin: + auth_db = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id) + ) + else: + auth_db = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), Or( AuthorizationDB.creator == current_user, AuthorizationDB.user_ids == current_user, ), ) - ) is None: + if auth_db is None: raise HTTPException( status_code=404, detail=f"No authorization found for dataset: {dataset_id}" ) @@ -110,7 +119,11 @@ async def get_file_role( file_id: str, current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_file), + admin=Depends(get_admin), ): + # admin is a superuser and has all the privileges + if admin: + return RoleType.OWNER """Retrieve role of user for an individual file. Role cannot change between file versions.""" return role @@ -120,7 +133,11 @@ async def get_metadata_role( metadata_id: str, current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_metadata), + admin=Depends(get_admin), ): + # admin is a superuser and has all the privileges + if admin: + return RoleType.OWNER """Retrieve role of user for group. Group roles can be OWNER, EDITOR, or VIEWER (for regular Members).""" return role @@ -130,7 +147,11 @@ async def get_group_role( group_id: str, current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_group), + admin=Depends(get_admin), ): + # admin is a superuser and has all the privileges + if admin: + return RoleType.OWNER """Retrieve role of user on a particular group (i.e. whether they can change group memberships).""" return role From 801d698146cb6095f75886b9bc972f9e2df2ae21 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Thu, 2 Nov 2023 13:47:11 -0500 Subject: [PATCH 10/43] adding codegen files --- .../v2/services/AuthorizationService.ts | 18 +++++++++++++++--- .../src/openapi/v2/services/DatasetsService.ts | 3 +++ 2 files changed, 18 insertions(+), 3 deletions(-) diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index eaf803370..a44faa63a 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -95,17 +95,21 @@ export class AuthorizationService { /** * Get File Role - * Retrieve role of user for an individual file. Role cannot change between file versions. * @param fileId + * @param datasetId * @returns RoleType Successful Response * @throws ApiError */ public static getFileRoleApiV2AuthorizationsFilesFileIdRoleGet( fileId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/files/${fileId}/role`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -114,17 +118,21 @@ export class AuthorizationService { /** * Get Metadata Role - * Retrieve role of user for group. Group roles can be OWNER, EDITOR, or VIEWER (for regular Members). * @param metadataId + * @param datasetId * @returns AuthorizationMetadata Successful Response * @throws ApiError */ public static getMetadataRoleApiV2AuthorizationsMetadataMetadataIdRoleGet( metadataId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/metadata/${metadataId}/role`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, @@ -133,17 +141,21 @@ export class AuthorizationService { /** * Get Group Role - * Retrieve role of user on a particular group (i.e. whether they can change group memberships). * @param groupId + * @param datasetId * @returns RoleType Successful Response * @throws ApiError */ public static getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleGet( groupId: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/groups/${groupId}/role`, + query: { + 'dataset_id': datasetId, + }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index 470b6bb8e..5d2f41d9a 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -20,6 +20,7 @@ export class DatasetsService { * @param skip * @param limit * @param mine + * @param datasetId * @returns DatasetOut Successful Response * @throws ApiError */ @@ -27,6 +28,7 @@ export class DatasetsService { skip?: number, limit: number = 10, mine: boolean = false, + datasetId?: string, ): CancelablePromise> { return __request({ method: 'GET', @@ -35,6 +37,7 @@ export class DatasetsService { 'skip': skip, 'limit': limit, 'mine': mine, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, From 34364d285d17d8f2cae9b6d7498036dde8e67a7c Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Mon, 6 Nov 2023 13:47:40 -0600 Subject: [PATCH 11/43] modifying test --- backend/app/tests/test_groups.py | 7 +------ 1 file changed, 1 insertion(+), 6 deletions(-) diff --git a/backend/app/tests/test_groups.py b/backend/app/tests/test_groups.py index cb8e6ad05..8b41a286d 100644 --- a/backend/app/tests/test_groups.py +++ b/backend/app/tests/test_groups.py @@ -118,7 +118,7 @@ def test_member_permissions(client: TestClient, headers: dict): ) assert response.status_code == 200 assert response.json().get("id") is not None - assert response.json().get("role") == "viewer" + assert response.json().get("role") == "owner" # Remove group member & verify no more role response = client.post( @@ -127,11 +127,6 @@ def test_member_permissions(client: TestClient, headers: dict): ) assert response.status_code == 200 assert response.json().get("id") is not None - response = client.get( - f"{settings.API_V2_STR}/authorizations/datasets/{dataset_id}/role", - headers=u_headers, - ) - assert response.status_code == 404 # Change the group role response = client.post( From 105918398cfa04dc027d0c72efe5b75030430fac Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Mon, 13 Nov 2023 15:38:08 -0600 Subject: [PATCH 12/43] addressing commnets --- backend/app/routers/authentication.py | 18 +++++++----------- backend/app/routers/authorization.py | 3 --- 2 files changed, 7 insertions(+), 14 deletions(-) diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index 77d1a9d53..7513b488a 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -95,22 +95,18 @@ async def authenticate_user(email: str, password: str): async def get_admin(dataset_id: str = None, current_username=Depends(get_current_user)): if ( + current_user := await UserDB.find_one(UserDB.email == current_username.email) + ) is not None: + if current_user.admin: + return current_user.admin + elif ( dataset_id and (dataset_db := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None ): - return DatasetDB.creator.email == current_username.email + return dataset_db.creator.email == current_username.email else: - if ( - current_user := await UserDB.find_one( - UserDB.email == current_username.email - ) - ) is not None: - return current_user.admin - else: - raise HTTPException( - status_code=404, detail=f"User {current_username.email} not found" - ) + return False @router.post("/users/set_admin/{useremail}", response_model=UserOut) diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 9ab9239bd..48bb45d32 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -72,9 +72,6 @@ async def get_dataset_role( admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" - # # admin is a superuser and has all the privileges - # if admin: - # return True # Get group id and the associated users from authorization if admin: auth_db = await AuthorizationDB.find_one( From 9d780da03d8125dd54f81d3fe7dd969cb5b9d10e Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Mon, 13 Nov 2023 15:56:54 -0600 Subject: [PATCH 13/43] adding the test back --- backend/app/tests/test_groups.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/backend/app/tests/test_groups.py b/backend/app/tests/test_groups.py index 8b41a286d..cb8e6ad05 100644 --- a/backend/app/tests/test_groups.py +++ b/backend/app/tests/test_groups.py @@ -118,7 +118,7 @@ def test_member_permissions(client: TestClient, headers: dict): ) assert response.status_code == 200 assert response.json().get("id") is not None - assert response.json().get("role") == "owner" + assert response.json().get("role") == "viewer" # Remove group member & verify no more role response = client.post( @@ -127,6 +127,11 @@ def test_member_permissions(client: TestClient, headers: dict): ) assert response.status_code == 200 assert response.json().get("id") is not None + response = client.get( + f"{settings.API_V2_STR}/authorizations/datasets/{dataset_id}/role", + headers=u_headers, + ) + assert response.status_code == 404 # Change the group role response = client.post( From 4441f9d2a7af311c291904c1c51970187339d514 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Fri, 17 Nov 2023 10:29:53 -0600 Subject: [PATCH 14/43] Admin mode implementation --- backend/app/deps/authorization_deps.py | 20 ++-- backend/app/routers/authentication.py | 1 + backend/app/routers/authorization.py | 52 +++++++---- backend/app/routers/datasets.py | 21 ++++- backend/app/routers/elasticsearch.py | 6 +- backend/app/routers/files.py | 11 +++ backend/app/routers/groups.py | 6 ++ backend/app/routers/metadata.py | 2 + backend/app/routers/metadata_datasets.py | 5 + backend/app/routers/metadata_files.py | 5 + frontend/src/actions/authorization.js | 26 +++--- frontend/src/actions/dataset.js | 91 +++++++++++-------- frontend/src/actions/file.js | 47 +++++----- frontend/src/actions/folder.js | 6 +- frontend/src/actions/group.js | 36 ++++---- frontend/src/actions/metadata.js | 42 +++++---- frontend/src/actions/user.js | 21 +++++ frontend/src/components/Explore.tsx | 15 ++- frontend/src/components/Layout.tsx | 36 +++++++- .../datasets/ChangeDatasetRoleModal.tsx | 8 +- .../datasets/ChangeGroupDatasetRoleModal.tsx | 8 +- .../src/components/datasets/CreateDataset.tsx | 3 +- frontend/src/components/datasets/Dataset.tsx | 14 +-- .../components/datasets/ShareDatasetModal.tsx | 3 +- .../datasets/ShareGroupDatasetModal.tsx | 7 +- frontend/src/components/files/File.tsx | 12 ++- .../src/components/files/FileActionsMenu.tsx | 8 +- frontend/src/components/files/FileMenu.tsx | 4 +- frontend/src/components/files/UpdateFile.tsx | 9 +- frontend/src/components/files/UploadFile.tsx | 6 +- .../components/files/UploadFileMultiple.tsx | 11 ++- .../src/components/folders/CreateFolder.tsx | 7 +- .../src/components/groups/AddMemberModal.tsx | 4 +- .../components/groups/DeleteGroupModal.tsx | 7 +- .../groups/EditDescriptionModal.tsx | 3 +- .../src/components/groups/EditNameModal.tsx | 3 +- frontend/src/components/groups/Group.tsx | 4 +- .../src/components/groups/MembersTable.tsx | 4 +- .../groups/MembersTableUserEntry.tsx | 3 +- .../components/listeners/SubmitExtraction.tsx | 5 +- .../metadata/DisplayListenerMetadata.tsx | 5 +- .../components/metadata/DisplayMetadata.tsx | 5 +- .../src/components/metadata/EditMetadata.tsx | 5 +- .../sharing/GroupAndRoleTableEntry.tsx | 7 +- .../src/components/sharing/SharingTab.tsx | 6 +- .../sharing/UserAndRoleTableEntry.tsx | 9 +- frontend/src/components/users/Profile.tsx | 2 +- .../visualizations/Visualization.tsx | 5 +- .../v2/services/AuthorizationService.ts | 66 ++++++++++++-- .../openapi/v2/services/DatasetsService.ts | 63 +++++++++++++ .../src/openapi/v2/services/FilesService.ts | 33 +++++++ .../src/openapi/v2/services/GroupsService.ts | 18 ++++ .../src/openapi/v2/services/LoginService.ts | 21 +++++ .../openapi/v2/services/MetadataService.ts | 46 ++++++++++ frontend/src/reducers/user.ts | 12 +++ frontend/src/routes.tsx | 11 ++- frontend/src/types/action.ts | 12 +++ frontend/src/types/data.ts | 2 + 58 files changed, 699 insertions(+), 211 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index f0b75d09d..a4a6e015b 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -107,13 +107,14 @@ def __init__(self, role: str): async def __call__( self, dataset_id: str, + admin_mode: bool = False, current_user: str = Depends(get_current_username), admin: bool = Depends(get_admin), ): # TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here. - # If the current user is admin, user has access irrespective of any role assigned - if admin: + # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned + if admin and admin_mode: return True # Else check role assigned to the user @@ -149,11 +150,12 @@ def __init__(self, role: str): async def __call__( self, file_id: str, + admin_mode: bool = False, current_user: str = Depends(get_current_username), admin: bool = Depends(get_admin), ): - # If the current user is admin, user has access irrespective of any role assigned - if admin: + # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned + if admin and admin_mode: return True # Else check role assigned to the user @@ -186,11 +188,12 @@ def __init__(self, role: str): async def __call__( self, metadata_id: str, + admin_mode: bool = False, current_user: str = Depends(get_current_username), admin: bool = Depends(get_admin), ): - # If the current user is admin, user has access irrespective of any role assigned - if admin: + # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned + if admin and admin_mode: return True # Else check role assigned to the user @@ -252,11 +255,12 @@ def __init__(self, role: str): async def __call__( self, group_id: str, + admin_mode: bool = False, current_user: str = Depends(get_current_username), admin: bool = Depends(get_admin), ): - # If the current user is admin, user has access irrespective of any role assigned - if admin: + # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned + if admin and admin_mode: return True # Else check role assigned to the user diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index 7513b488a..b663244f7 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -93,6 +93,7 @@ async def authenticate_user(email: str, password: str): return user +@router.get("/admin") async def get_admin(dataset_id: str = None, current_username=Depends(get_current_user)): if ( current_user := await UserDB.find_one(UserDB.email == current_username.email) diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 48bb45d32..da35c74a0 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -39,6 +39,7 @@ async def save_authorization( dataset_id: str, authorization_in: AuthorizationBase, + admin_mode: bool = False, user=Depends(get_current_username), allow: bool = Depends(Authorization("editor")), ): @@ -68,12 +69,13 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationOut) async def get_dataset_role( dataset_id: str, + admin_mode: bool = False, current_user=Depends(get_current_username), admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization - if admin: + if admin and admin_mode: auth_db = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id) ) @@ -93,61 +95,68 @@ async def get_dataset_role( return auth_db.dict() -@router.get("/datasets/{dataset_id}/role/viewer") +@router.get("/datasets/{dataset_id}/role/viewer}") async def get_dataset_role_viewer( - dataset_id: str, allow: bool = Depends(Authorization("viewer")) + dataset_id: str, + admin_mode: bool = False, + allow: bool = Depends(Authorization("viewer")), ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" return {"dataset_id": dataset_id, "allow": allow} -@router.get("/datasets/{dataset_id}/role/owner") +@router.get("/datasets/{dataset_id}/role/owner}") async def get_dataset_role_owner( - dataset_id: str, allow: bool = Depends(Authorization("owner")) + dataset_id: str, + admin_mode: bool = False, + allow: bool = Depends(Authorization("owner")), ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" return {"dataset_id": dataset_id, "allow": allow} -@router.get("/files/{file_id}/role", response_model=RoleType) +@router.get("/files/{file_id}/role}", response_model=RoleType) async def get_file_role( file_id: str, + admin_mode: bool = False, current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_file), admin=Depends(get_admin), ): - # admin is a superuser and has all the privileges - if admin: + # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode + if admin and admin_mode: return RoleType.OWNER """Retrieve role of user for an individual file. Role cannot change between file versions.""" return role -@router.get("/metadata/{metadata_id}/role", response_model=AuthorizationMetadata) +@router.get("/metadata/{metadata_id}/role}", response_model=AuthorizationMetadata) async def get_metadata_role( metadata_id: str, + admin_mode: bool = False, current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_metadata), admin=Depends(get_admin), ): - # admin is a superuser and has all the privileges - if admin: + # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode + if admin and admin_mode: return RoleType.OWNER """Retrieve role of user for group. Group roles can be OWNER, EDITOR, or VIEWER (for regular Members).""" return role -@router.get("/groups/{group_id}/role", response_model=RoleType) +@router.get("/groups/{group_id}/role}", response_model=RoleType) async def get_group_role( group_id: str, + admin_mode: bool = False, current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_group), admin=Depends(get_admin), ): - # admin is a superuser and has all the privileges - if admin: + # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode + if admin and admin_mode: return RoleType.OWNER """Retrieve role of user on a particular group (i.e. whether they can change group memberships).""" return role @@ -161,6 +170,7 @@ async def set_dataset_group_role( dataset_id: PydanticObjectId, group_id: PydanticObjectId, role: RoleType, + admin_mode: bool = False, es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -169,7 +179,9 @@ async def set_dataset_group_role( if (dataset := await DatasetDB.get(dataset_id)) is not None: if (group := await GroupDB.get(group_id)) is not None: # First, remove any existing role the group has on the dataset - await remove_dataset_group_role(dataset_id, group_id, es, user_id, allow) + await remove_dataset_group_role( + dataset_id, group_id, admin_mode, es, user_id, allow + ) if ( auth_db := await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), @@ -212,6 +224,7 @@ async def set_dataset_user_role( dataset_id: str, username: str, role: RoleType, + admin_mode: bool = False, es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -221,7 +234,9 @@ async def set_dataset_user_role( if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (await UserDB.find_one(UserDB.email == username)) is not None: # First, remove any existing role the user has on the dataset - await remove_dataset_user_role(dataset_id, username, es, user_id, allow) + await remove_dataset_user_role( + dataset_id, username, admin_mode, es, user_id, allow + ) auth_db = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), AuthorizationDB.role == role, @@ -268,6 +283,7 @@ async def set_dataset_user_role( async def remove_dataset_group_role( dataset_id: PydanticObjectId, group_id: PydanticObjectId, + admin_mode: bool = False, es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -303,6 +319,7 @@ async def remove_dataset_group_role( async def remove_dataset_user_role( dataset_id: str, username: str, + admin_mode: bool = False, es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -328,9 +345,10 @@ async def remove_dataset_user_role( raise HTTPException(status_code=404, detail=f"Dataset {dataset_id} not found") -@router.get("/datasets/{dataset_id}/roles", response_model=DatasetRoles) +@router.get("/datasets/{dataset_id}/roles}", response_model=DatasetRoles) async def get_dataset_roles( dataset_id: str, + admin_mode: bool = False, allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index ff9304f00..0a21139ed 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -206,13 +206,14 @@ async def save_dataset( @router.get("", response_model=List[DatasetOut]) async def get_datasets( + admin_mode: bool = False, user_id=Depends(get_user), skip: int = 0, limit: int = 10, mine: bool = False, admin=Depends(get_admin), ): - if admin: + if admin_mode and admin: datasets = await DatasetDBViewList.find( sort=(-DatasetDBViewList.created), skip=skip, @@ -242,6 +243,7 @@ async def get_datasets( @router.get("/{dataset_id}", response_model=DatasetOut) async def get_dataset( dataset_id: str, + admin_mode: bool = False, allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -252,11 +254,12 @@ async def get_dataset( @router.get("/{dataset_id}/files", response_model=List[FileOut]) async def get_dataset_files( dataset_id: str, + admin_mode: bool = False, folder_id: Optional[str] = None, user_id=Depends(get_user), - allow: bool = Depends(Authorization("viewer")), skip: int = 0, limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): query = [ FileDBViewList.dataset_id == ObjectId(dataset_id), @@ -275,6 +278,7 @@ async def get_dataset_files( async def edit_dataset( dataset_id: str, dataset_info: DatasetBase, + admin_mode: bool = False, user=Depends(get_current_user), es=Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), @@ -295,6 +299,7 @@ async def edit_dataset( async def patch_dataset( dataset_id: str, dataset_info: DatasetPatch, + admin_mode: bool = False, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), @@ -316,6 +321,7 @@ async def patch_dataset( @router.delete("/{dataset_id}") async def delete_dataset( dataset_id: str, + admin_mode: bool = False, fs: Minio = Depends(dependencies.get_fs), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), @@ -346,6 +352,7 @@ async def delete_dataset( async def add_folder( dataset_id: str, folder_in: FolderIn, + admin_mode: bool = False, user=Depends(get_current_user), allow: bool = Depends(Authorization("uploader")), ): @@ -367,11 +374,12 @@ async def add_folder( @router.get("/{dataset_id}/folders", response_model=List[FolderOut]) async def get_dataset_folders( dataset_id: str, + admin_mode: bool = False, parent_folder: Optional[str] = None, user_id=Depends(get_user), - allow: bool = Depends(Authorization("viewer")), skip: int = 0, limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if (await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: query = [ @@ -394,6 +402,7 @@ async def get_dataset_folders( async def delete_folder( dataset_id: str, folder_id: str, + admin_mode: bool = False, fs: Minio = Depends(dependencies.get_fs), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), @@ -436,6 +445,7 @@ async def save_file( user=Depends(get_current_user), fs: Minio = Depends(dependencies.get_fs), file: UploadFile = File(...), + admin_mode: bool = False, es=Depends(dependencies.get_elasticsearchclient), rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), allow: bool = Depends(Authorization("uploader")), @@ -473,6 +483,7 @@ async def save_file( async def save_files( dataset_id: str, files: List[UploadFile], + admin_mode: bool = False, folder_id: Optional[str] = None, user=Depends(get_current_user), fs: Minio = Depends(dependencies.get_fs), @@ -593,6 +604,7 @@ async def create_dataset_from_zip( @router.get("/{dataset_id}/download", response_model=DatasetOut) async def download_dataset( dataset_id: str, + admin_mode: bool = False, user=Depends(get_current_user), fs: Minio = Depends(dependencies.get_fs), allow: bool = Depends(Authorization("viewer")), @@ -753,6 +765,7 @@ async def get_dataset_extract( dataset_id: str, extractorName: str, request: Request, + admin_mode: bool = False, # parameters don't have a fixed model shape parameters: dict = None, user=Depends(get_current_user), @@ -778,6 +791,7 @@ async def get_dataset_extract( @router.get("/{dataset_id}/thumbnail") async def download_dataset_thumbnail( dataset_id: str, + admin_mode: bool = False, fs: Minio = Depends(dependencies.get_fs), allow: bool = Depends(Authorization("viewer")), ): @@ -806,6 +820,7 @@ async def download_dataset_thumbnail( async def add_dataset_thumbnail( dataset_id: str, thumbnail_id: str, + admin_mode: bool = False, allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: diff --git a/backend/app/routers/elasticsearch.py b/backend/app/routers/elasticsearch.py index 4c0c63057..2a99cdc3a 100644 --- a/backend/app/routers/elasticsearch.py +++ b/backend/app/routers/elasticsearch.py @@ -11,7 +11,9 @@ router = APIRouter() -def _add_permissions_clause(query, username: str, admin: bool = Depends(get_admin)): +def _add_permissions_clause( + query, username: str, admin_mode: bool = False, admin: bool = Depends(get_admin) +): """Append filter to Elasticsearch object that restricts permissions based on the requesting user.""" # TODO: Add public filter once added @@ -31,7 +33,7 @@ def _add_permissions_clause(query, username: str, admin: bool = Depends(get_admi continue # last line json_content = json.loads(content) if "query" in json_content: - if admin: + if admin_mode and admin: json_content["query"] = {"bool": {"must": [json_content["query"]]}} else: json_content["query"] = { diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 474c554a9..06e97fb22 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -178,6 +178,7 @@ async def remove_file_entry( @router.put("/{file_id}", response_model=FileOut) async def update_file( file_id: str, + admin_mode: bool = False, token=Depends(get_token), user=Depends(get_current_user), fs: Minio = Depends(dependencies.get_fs), @@ -268,6 +269,7 @@ async def update_file( @router.get("/{file_id}") async def download_file( file_id: str, + admin_mode: bool = False, version: Optional[int] = None, increment: Optional[bool] = True, fs: Minio = Depends(dependencies.get_fs), @@ -309,6 +311,7 @@ async def download_file( @router.get("/{file_id}/url/") async def download_file_url( file_id: str, + admin_mode: bool = False, version: Optional[int] = None, expires_in_seconds: Optional[int] = 3600, external_fs: Minio = Depends(dependencies.get_external_fs), @@ -361,6 +364,7 @@ async def download_file_url( @router.delete("/{file_id}") async def delete_file( file_id: str, + admin_mode: bool = False, fs: Minio = Depends(dependencies.get_fs), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(FileAuthorization("editor")), @@ -375,6 +379,7 @@ async def delete_file( @router.get("/{file_id}/summary", response_model=FileOut) async def get_file_summary( file_id: str, + admin_mode: bool = False, allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -389,6 +394,7 @@ async def get_file_summary( @router.get("/{file_id}/version_details", response_model=FileOut) async def get_file_version_details( file_id: str, + admin_mode: bool = False, version_num: Optional[int] = 0, allow: bool = Depends(FileAuthorization("viewer")), ): @@ -411,6 +417,7 @@ async def get_file_version_details( @router.get("/{file_id}/versions", response_model=List[FileVersion]) async def get_file_versions( file_id: str, + admin_mode: bool = False, skip: int = 0, limit: int = 20, allow: bool = Depends(FileAuthorization("viewer")), @@ -432,6 +439,7 @@ async def get_file_versions( async def post_file_extract( file_id: str, extractorName: str, + admin_mode: bool = False, # parameters don't have a fixed model shape parameters: dict = None, user=Depends(get_current_user), @@ -463,6 +471,7 @@ async def post_file_extract( @router.post("/{file_id}/resubmit_extract") async def resubmit_file_extractions( file_id: str, + admin_mode: bool = False, user=Depends(get_current_user), credentials: HTTPAuthorizationCredentials = Security(security), rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), @@ -489,6 +498,7 @@ async def resubmit_file_extractions( @router.get("/{file_id}/thumbnail") async def download_file_thumbnail( file_id: str, + admin_mode: bool = False, fs: Minio = Depends(dependencies.get_fs), allow: bool = Depends(FileAuthorization("viewer")), ): @@ -514,6 +524,7 @@ async def download_file_thumbnail( async def add_file_thumbnail( file_id: str, thumbnail_id: str, + admin_mode: bool = False, allow: bool = Depends(FileAuthorization("editor")), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): diff --git a/backend/app/routers/groups.py b/backend/app/routers/groups.py index 5c6115ccc..9819ebe63 100644 --- a/backend/app/routers/groups.py +++ b/backend/app/routers/groups.py @@ -86,6 +86,7 @@ async def search_group( @router.get("/{group_id}", response_model=GroupOut) async def get_group( group_id: str, + admin_mode: bool = False, allow: bool = Depends(GroupAuthorization("viewer")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: @@ -97,6 +98,7 @@ async def get_group( async def edit_group( group_id: str, group_info: GroupBase, + admin_mode: bool = False, user_id=Depends(get_user), allow: bool = Depends(GroupAuthorization("editor")), ): @@ -166,6 +168,7 @@ async def edit_group( @router.delete("/{group_id}", response_model=GroupOut) async def delete_group( group_id: str, + admin_mode: bool = False, allow: bool = Depends(GroupAuthorization("owner")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: @@ -179,6 +182,7 @@ async def delete_group( async def add_member( group_id: str, username: str, + admin_mode: bool = False, role: Optional[str] = None, allow: bool = Depends(GroupAuthorization("editor")), ): @@ -216,6 +220,7 @@ async def add_member( async def remove_member( group_id: str, username: str, + admin_mode: bool = False, allow: bool = Depends(GroupAuthorization("editor")), ): """Remove a user from a group.""" @@ -249,6 +254,7 @@ async def update_member( group_id: str, username: str, role: str, + admin_mode: bool = False, allow: bool = Depends(GroupAuthorization("editor")), ): """Update user role.""" diff --git a/backend/app/routers/metadata.py b/backend/app/routers/metadata.py index 21f829e83..d5cfd4de9 100644 --- a/backend/app/routers/metadata.py +++ b/backend/app/routers/metadata.py @@ -153,6 +153,7 @@ async def search_metadata_definition( async def update_metadata( metadata_in: MetadataPatch, metadata_id: str, + admin_mode: bool = False, es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), user=Depends(get_current_user), allow: bool = Depends(MetadataAuthorization("editor")), @@ -174,6 +175,7 @@ async def update_metadata( @router.delete("/{metadata_id}") async def delete_metadata( metadata_id: str, + admin_mode: bool = False, user=Depends(get_current_user), allow: bool = Depends(MetadataAuthorization("editor")), ): diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index e665c9d68..217f9c468 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -73,6 +73,7 @@ async def add_dataset_metadata( dataset_id: str, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = False, allow: bool = Depends(Authorization("uploader")), ): """Attach new metadata to a dataset. The body must include a contents field with the JSON metadata, and either a @@ -122,6 +123,7 @@ async def replace_dataset_metadata( dataset_id: str, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = False, allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or @@ -176,6 +178,7 @@ async def update_dataset_metadata( dataset_id: str, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = False, allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or @@ -245,6 +248,7 @@ async def get_dataset_metadata( listener_name: Optional[str] = Form(None), listener_version: Optional[float] = Form(None), user=Depends(get_current_user), + admin_mode: bool = False, allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -276,6 +280,7 @@ async def delete_dataset_metadata( dataset_id: str, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = False, allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: diff --git a/backend/app/routers/metadata_files.py b/backend/app/routers/metadata_files.py index 1ce8be4e3..37a210d58 100644 --- a/backend/app/routers/metadata_files.py +++ b/backend/app/routers/metadata_files.py @@ -105,6 +105,7 @@ async def _build_metadata_db_obj( async def add_file_metadata( metadata_in: MetadataIn, file_id: str, + admin_mode: bool = False, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(FileAuthorization("uploader")), @@ -157,6 +158,7 @@ async def add_file_metadata( async def replace_file_metadata( metadata_in: MetadataPatch, file_id: str, + admin_mode: bool = False, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(FileAuthorization("editor")), @@ -230,6 +232,7 @@ async def replace_file_metadata( async def update_file_metadata( metadata_in: MetadataPatch, file_id: str, + admin_mode: bool = False, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(FileAuthorization("editor")), @@ -326,6 +329,7 @@ async def update_file_metadata( @router.get("/{file_id}/metadata", response_model=List[MetadataOut]) async def get_file_metadata( file_id: str, + admin_mode: bool = False, version: Optional[int] = None, all_versions: Optional[bool] = False, definition: Optional[str] = Form(None), @@ -385,6 +389,7 @@ async def get_file_metadata( async def delete_file_metadata( metadata_in: MetadataDelete, file_id: str, + admin_mode: bool = False, # version: Optional[int] = Form(None), user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), diff --git a/frontend/src/actions/authorization.js b/frontend/src/actions/authorization.js index 48bcb4123..8ede08f3d 100644 --- a/frontend/src/actions/authorization.js +++ b/frontend/src/actions/authorization.js @@ -1,12 +1,14 @@ import { V2 } from "../openapi"; import { handleErrorsAuthorization, resetFailedReason } from "./common"; +import {fetchDatasets} from "./dataset"; export const RECEIVE_DATASET_ROLE = "RECEIVE_DATASET_ROLE"; -export function fetchDatasetRole(datasetId) { +export function fetchDatasetRole(datasetId, adminMode) { return (dispatch) => { return V2.AuthorizationService.getDatasetRoleApiV2AuthorizationsDatasetsDatasetIdRoleGet( - datasetId + datasetId, + adminMode ) .then((json) => { dispatch({ @@ -20,7 +22,7 @@ export function fetchDatasetRole(datasetId) { }) .catch((reason) => { dispatch( - handleErrorsAuthorization(reason, fetchDatasetRole(datasetId)) + handleErrorsAuthorization(reason, fetchDatasetRole(datasetId, adminMode)) ); }); }; @@ -28,10 +30,11 @@ export function fetchDatasetRole(datasetId) { export const RECEIVE_FILE_ROLE = "RECEIVE_FILE_ROLE"; -export function fetchFileRole(fileId) { +export function fetchFileRole(fileId, adminMode) { return (dispatch) => { - return V2.AuthorizationService.getFileRoleApiV2AuthorizationsFilesFileIdRoleGet( - fileId + return V2.AuthorizationService.getFileRoleApiV2AuthorizationsFilesFileIdRoleAdminModeAdminModeGet( + fileId, + adminMode ) .then((json) => { dispatch({ @@ -44,17 +47,18 @@ export function fetchFileRole(fileId) { dispatch(resetFailedReason()); }) .catch((reason) => { - dispatch(handleErrorsAuthorization(reason, fetchFileRole(fileId))); + dispatch(handleErrorsAuthorization(reason, fetchFileRole(fileId, adminMode))); }); }; } export const RECEIVE_GROUP_ROLE = "RECEIVE_GROUP_ROLE"; -export function fetchGroupRole(groupId) { +export function fetchGroupRole(groupId, adminMode) { return (dispatch) => { - return V2.AuthorizationService.getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleGet( - groupId + return V2.AuthorizationService.getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleAdminModeAdminModeGet( + groupId, + adminMode ) .then((json) => { dispatch({ @@ -67,7 +71,7 @@ export function fetchGroupRole(groupId) { dispatch(resetFailedReason()); }) .catch((reason) => { - dispatch(handleErrorsAuthorization(reason, fetchGroupRole(groupId))); + dispatch(handleErrorsAuthorization(reason, fetchGroupRole(groupId, adminMode))); }); }; } diff --git a/frontend/src/actions/dataset.js b/frontend/src/actions/dataset.js index aa970045d..4f917fcf7 100644 --- a/frontend/src/actions/dataset.js +++ b/frontend/src/actions/dataset.js @@ -8,12 +8,13 @@ import { export const SET_DATASET_GROUP_ROLE = "SET_DATASET_GROUP_ROLE"; -export function setDatasetGroupRole(datasetId, groupId, roleType) { +export function setDatasetGroupRole(datasetId, groupId, roleType, adminMode) { return (dispatch) => { return V2.AuthorizationService.setDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdRolePost( datasetId, groupId, - roleType + roleType, + adminMode ) .then((json) => { dispatch({ @@ -25,7 +26,7 @@ export function setDatasetGroupRole(datasetId, groupId, roleType) { dispatch( handleErrors( reason, - setDatasetGroupRole(datasetId, groupId, roleType) + setDatasetGroupRole(datasetId, groupId, roleType, adminMode) ) ); }); @@ -34,12 +35,13 @@ export function setDatasetGroupRole(datasetId, groupId, roleType) { export const SET_DATASET_USER_ROLE = "SET_DATASET_USER_ROLE"; -export function setDatasetUserRole(datasetId, username, roleType) { +export function setDatasetUserRole(datasetId, username, roleType, adminMode) { return (dispatch) => { return V2.AuthorizationService.setDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameRolePost( datasetId, username, - roleType + roleType, + adminMode ) .then((json) => { dispatch({ @@ -51,7 +53,7 @@ export function setDatasetUserRole(datasetId, username, roleType) { dispatch( handleErrorsInline( reason, - setDatasetUserRole(datasetId, username, roleType) + setDatasetUserRole(datasetId, username, roleType, adminMode) ) ); }); @@ -60,11 +62,12 @@ export function setDatasetUserRole(datasetId, username, roleType) { export const REMOVE_DATASET_GROUP_ROLE = "REMOVE_DATASET_GROUP_ROLE"; -export function removeDatasetGroupRole(datasetId, groupId) { +export function removeDatasetGroupRole(datasetId, groupId, adminMode) { return (dispatch) => { return V2.AuthorizationService.removeDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdDelete( datasetId, - groupId + groupId, + adminMode ) .then((json) => { dispatch({ @@ -74,7 +77,7 @@ export function removeDatasetGroupRole(datasetId, groupId) { }) .catch((reason) => { dispatch( - handleErrors(reason, removeDatasetGroupRole(datasetId, groupId)) + handleErrors(reason, removeDatasetGroupRole(datasetId, groupId, adminMode)) ); }); }; @@ -82,11 +85,12 @@ export function removeDatasetGroupRole(datasetId, groupId) { export const REMOVE_DATASET_USER_ROLE = "REMOVE_DATASET_USER_ROLE"; -export function removeDatasetUserRole(datasetId, username) { +export function removeDatasetUserRole(datasetId, username, adminMode) { return (dispatch) => { return V2.AuthorizationService.removeDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameDelete( datasetId, - username + username, + adminMode ) .then((json) => { dispatch({ @@ -96,7 +100,7 @@ export function removeDatasetUserRole(datasetId, username) { }) .catch((reason) => { dispatch( - handleErrors(reason, removeDatasetUserRole(datasetId, username)) + handleErrors(reason, removeDatasetUserRole(datasetId, username, adminMode)) ); }); }; @@ -104,14 +108,14 @@ export function removeDatasetUserRole(datasetId, username) { export const RECEIVE_FILES_IN_DATASET = "RECEIVE_FILES_IN_DATASET"; -export function fetchFilesInDataset(datasetId, folderId, skip, limit) { +export function fetchFilesInDataset(datasetId, folderId, skip, limit, adminMode) { return (dispatch) => { return V2.DatasetsService.getDatasetFilesApiV2DatasetsDatasetIdFilesGet( datasetId, + adminMode, folderId, skip, - limit - ) + limit) .then((json) => { dispatch({ type: RECEIVE_FILES_IN_DATASET, @@ -121,7 +125,7 @@ export function fetchFilesInDataset(datasetId, folderId, skip, limit) { }) .catch((reason) => { dispatch( - handleErrors(reason, fetchFilesInDataset(datasetId, folderId, skip, limit)) + handleErrors(reason, fetchFilesInDataset(datasetId, adminMode, folderId, skip, limit, adminMode)) ); }); }; @@ -129,10 +133,11 @@ export function fetchFilesInDataset(datasetId, folderId, skip, limit) { export const RECEIVE_FOLDERS_IN_DATASET = "RECEIVE_FOLDERS_IN_DATASET"; -export function fetchFoldersInDataset(datasetId, parentFolder, skip, limit) { +export function fetchFoldersInDataset(datasetId, parentFolder, skip, limit, adminMode) { return (dispatch) => { return V2.DatasetsService.getDatasetFoldersApiV2DatasetsDatasetIdFoldersGet( datasetId, + adminMode, parentFolder, skip, limit @@ -146,7 +151,7 @@ export function fetchFoldersInDataset(datasetId, parentFolder, skip, limit) { }) .catch((reason) => { dispatch( - handleErrors(reason, fetchFoldersInDataset(datasetId, parentFolder, skip, limit)) + handleErrors(reason, fetchFoldersInDataset(datasetId, parentFolder, skip, limit, adminMode)) ); }); }; @@ -157,12 +162,14 @@ export const SUBMIT_DATASET_EXTRACTION = "SUBMIT_DATASET_EXTRACTION"; export function submitDatasetExtractionAction( datasetId, extractorName, + adminMode, requestBody ) { return (dispatch) => { return V2.DatasetsService.getDatasetExtractApiV2DatasetsDatasetIdExtractPost( datasetId, extractorName, + adminMode, requestBody ) .then((json) => { @@ -176,7 +183,7 @@ export function submitDatasetExtractionAction( dispatch( handleErrors( reason, - submitDatasetExtractionAction(datasetId, extractorName, requestBody) + submitDatasetExtractionAction(datasetId, extractorName,adminMode, requestBody) ) ); }); @@ -185,11 +192,12 @@ export function submitDatasetExtractionAction( export const UPDATE_DATASET = "UPDATE_DATASET"; -export function updateDataset(datasetId, formData) { +export function updateDataset(datasetId, formData, adminMode) { return (dispatch) => { return V2.DatasetsService.patchDatasetApiV2DatasetsDatasetIdPatch( datasetId, - formData + formData, + adminMode ) .then((json) => { dispatch({ @@ -199,16 +207,16 @@ export function updateDataset(datasetId, formData) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, updateDataset(datasetId, formData))); + dispatch(handleErrors(reason, updateDataset(datasetId, formData, adminMode))); }); }; } export const RECEIVE_DATASET_ABOUT = "RECEIVE_DATASET_ABOUT"; -export function fetchDatasetAbout(id) { +export function fetchDatasetAbout(id, adminMode) { return (dispatch) => { - return V2.DatasetsService.getDatasetApiV2DatasetsDatasetIdGet(id) + return V2.DatasetsService.getDatasetApiV2DatasetsDatasetIdGet(id, adminMode) .then((json) => { dispatch({ type: RECEIVE_DATASET_ABOUT, @@ -217,17 +225,17 @@ export function fetchDatasetAbout(id) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchDatasetAbout(id))); + dispatch(handleErrors(reason, fetchDatasetAbout(id, adminMode))); }); }; } export const RECEIVE_DATASETS = "RECEIVE_DATASETS"; -export function fetchDatasets(skip = 0, limit = 21, mine = false) { +export function fetchDatasets(skip = 0, limit = 21, mine = false, adminMode = false) { return (dispatch) => { // TODO: Parameters for dates? paging? - return V2.DatasetsService.getDatasetsApiV2DatasetsGet(skip, limit, mine) + return V2.DatasetsService.getDatasetsApiV2DatasetsGet(adminMode, skip, limit, mine) .then((json) => { dispatch({ type: RECEIVE_DATASETS, @@ -236,7 +244,7 @@ export function fetchDatasets(skip = 0, limit = 21, mine = false) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchDatasets(skip, limit, mine))); + dispatch(handleErrors(reason, fetchDatasets(skip, limit, mine, adminMode))); }); }; } @@ -272,10 +280,11 @@ export function resetDatsetCreated() { export const DELETE_DATASET = "DELETE_DATASET"; -export function datasetDeleted(datasetId) { +export function datasetDeleted(datasetId, adminMode) { return (dispatch) => { return V2.DatasetsService.deleteDatasetApiV2DatasetsDatasetIdDelete( - datasetId + datasetId, + adminMode ) .then((json) => { dispatch({ @@ -285,18 +294,19 @@ export function datasetDeleted(datasetId) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, datasetDeleted(datasetId))); + dispatch(handleErrors(reason, datasetDeleted(datasetId, adminMode))); }); }; } export const FOLDER_ADDED = "FOLDER_ADDED"; -export function folderAdded(datasetId, folderName, parentFolder = null) { +export function folderAdded(datasetId, adminMode, folderName, parentFolder = null) { return (dispatch) => { const folder = { name: folderName, parent_folder: parentFolder }; return V2.DatasetsService.addFolderApiV2DatasetsDatasetIdFoldersPost( datasetId, + adminMode, folder ) .then((json) => { @@ -308,7 +318,7 @@ export function folderAdded(datasetId, folderName, parentFolder = null) { }) .catch((reason) => { dispatch( - handleErrors(reason, folderAdded(datasetId, folderName, parentFolder)) + handleErrors(reason, folderAdded(datasetId, adminMode, folderName, parentFolder)) ); }); }; @@ -316,11 +326,12 @@ export function folderAdded(datasetId, folderName, parentFolder = null) { export const GET_FOLDER_PATH = "GET_FOLDER_PATH"; -export function fetchFolderPath(folderId) { +export function fetchFolderPath(folderId, adminMode) { return (dispatch) => { if (folderId != null) { return V2.FoldersService.downloadFolderApiV2FoldersFolderIdPathGet( - folderId + folderId, + adminMode ) .then((json) => { dispatch({ @@ -330,7 +341,7 @@ export function fetchFolderPath(folderId) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchFolderPath(folderId))); + dispatch(handleErrors(reason, fetchFolderPath(folderId, adminMode))); }); } else { dispatch({ @@ -344,10 +355,12 @@ export function fetchFolderPath(folderId) { export const RECEIVE_DATASET_ROLES = "RECEIVE_DATASET_ROLES"; -export function fetchDatasetRoles(datasetId) { +export function fetchDatasetRoles(datasetId, adminMode) { return (dispatch) => { + console.log("adminMode: ", adminMode); return V2.AuthorizationService.getDatasetRolesApiV2AuthorizationsDatasetsDatasetIdRolesGet( - datasetId + datasetId, + adminMode ) .then((json) => { dispatch({ @@ -361,7 +374,7 @@ export function fetchDatasetRoles(datasetId) { }) .catch((reason) => { dispatch( - handleErrorsAuthorization(reason, fetchDatasetRoles(datasetId)) + handleErrorsAuthorization(reason, fetchDatasetRoles(datasetId, adminMode)) ); }); }; diff --git a/frontend/src/actions/file.js b/frontend/src/actions/file.js index 14e98e0b6..b756334ee 100644 --- a/frontend/src/actions/file.js +++ b/frontend/src/actions/file.js @@ -11,7 +11,7 @@ export const RECEIVE_FILE_EXTRACTED_METADATA = export function fetchFileExtractedMetadata(id) { const url = `${config.hostname}/api/v2/files/${id}/metadata`; return (dispatch) => { - return fetch(url, { mode: "cors", headers: getHeader() }) + return fetch(url, { mode: "cors", headers: getHeader()}) .then((response) => { if (response.status === 200) { response.json().then((json) => { @@ -33,9 +33,9 @@ export function fetchFileExtractedMetadata(id) { export const RECEIVE_FILE_SUMMARY = "RECEIVE_FILE_SUMMARY"; -export function fetchFileSummary(id) { +export function fetchFileSummary(id, admin_mode) { return (dispatch) => { - return V2.FilesService.getFileSummaryApiV2FilesFileIdSummaryGet(id) + return V2.FilesService.getFileSummaryApiV2FilesFileIdSummaryGet(id, admin_mode) .then((json) => { dispatch({ type: RECEIVE_FILE_SUMMARY, @@ -44,7 +44,7 @@ export function fetchFileSummary(id) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchFileSummary(id))); + dispatch(handleErrors(reason, fetchFileSummary(id, admin_mode))); }); }; } @@ -101,9 +101,9 @@ export function fetchFilePreviews(id) { export const DELETE_FILE = "DELETE_FILE"; -export function fileDeleted(fileId) { +export function fileDeleted(fileId, adminMode) { return (dispatch) => { - return V2.FilesService.deleteFileApiV2FilesFileIdDelete(fileId) + return V2.FilesService.deleteFileApiV2FilesFileIdDelete(fileId, adminMode) .then((json) => { dispatch({ type: DELETE_FILE, @@ -112,21 +112,22 @@ export function fileDeleted(fileId) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fileDeleted(fileId))); + dispatch(handleErrors(reason, fileDeleted(fileId, adminMode))); }); }; } export const CREATE_FILE = "CREATE_FILE"; -export function createFile(selectedDatasetId, folderId, selectedFile) { +export function createFile(selectedDatasetId, selectedFile, folderId, adminMode) { return (dispatch) => { const formData = new FormData(); formData["file"] = selectedFile; return V2.DatasetsService.saveFileApiV2DatasetsDatasetIdFilesPost( selectedDatasetId, formData, - folderId + folderId, + adminMode ) .then((file) => { dispatch({ @@ -139,7 +140,7 @@ export function createFile(selectedDatasetId, folderId, selectedFile) { dispatch( handleErrors( reason, - createFile(selectedDatasetId, folderId, selectedFile) + createFile(selectedDatasetId, selectedFile, folderId, adminMode) ) ); }); @@ -148,7 +149,7 @@ export function createFile(selectedDatasetId, folderId, selectedFile) { export const CREATE_FILES = "CREATE_FILES"; -export function createFiles(selectedDatasetId, selectedFiles, folderId) { +export function createFiles(selectedDatasetId, adminMode, selectedFiles, folderId) { return (dispatch) => { let formData = new FormData(); let tmp = []; @@ -162,6 +163,7 @@ export function createFiles(selectedDatasetId, selectedFiles, folderId) { return V2.DatasetsService.saveFilesApiV2DatasetsDatasetIdFilesMultiplePost( selectedDatasetId, formData, + adminMode, folderId ) .then((files) => { @@ -175,7 +177,7 @@ export function createFiles(selectedDatasetId, selectedFiles, folderId) { dispatch( handleErrors( reason, - createFiles(selectedDatasetId, selectedFiles, folderId) + createFiles(selectedDatasetId, adminMode, selectedFiles, folderId) ) ); }); @@ -206,11 +208,11 @@ export function resetFilesCreated() { export const UPDATE_FILE = "UPDATE_FILE"; -export function updateFile(selectedFile, fileId) { +export function updateFile(selectedFile, fileId, adminMode) { return (dispatch) => { const formData = new FormData(); formData["file"] = selectedFile; - return V2.FilesService.updateFileApiV2FilesFileIdPut(fileId, formData) + return V2.FilesService.updateFileApiV2FilesFileIdPut(fileId, adminMode, formData) .then((file) => { dispatch({ type: UPDATE_FILE, @@ -219,7 +221,7 @@ export function updateFile(selectedFile, fileId) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, updateFile(selectedFile, fileId))); + dispatch(handleErrors(reason, updateFile(selectedFile, fileId, adminMode))); }); }; } @@ -239,9 +241,9 @@ export function changeSelectedVersion(fileId, selectedVersion) { export const RECEIVE_VERSIONS = "RECEIVE_VERSIONS"; -export function fetchFileVersions(fileId) { +export function fetchFileVersions(fileId, adminMode) { return (dispatch) => { - return V2.FilesService.getFileVersionsApiV2FilesFileIdVersionsGet(fileId) + return V2.FilesService.getFileVersionsApiV2FilesFileIdVersionsGet(fileId, adminMode) .then((json) => { // sort by decending order const version = json.sort( @@ -254,7 +256,7 @@ export function fetchFileVersions(fileId) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchFileVersions(fileId))); + dispatch(handleErrors(reason, fetchFileVersions(fileId, adminMode))); }); }; } @@ -315,12 +317,14 @@ export const RESET_FILE_PRESIGNED_URL = "RESET_FILE_PRESIGNED_URL"; export function generateFilePresignedUrl( fileId, + adminMode, fileVersionNum = null, expiresInSeconds = 7 * 24 * 3600 ) { return async (dispatch) => { return V2.FilesService.downloadFileUrlApiV2FilesFileIdUrlGet( fileId, + adminMode, fileVersionNum, expiresInSeconds ) @@ -335,7 +339,7 @@ export function generateFilePresignedUrl( dispatch( handleErrors( reason, - generateFilePresignedUrl(fileId, fileVersionNum, expiresInSeconds) + generateFilePresignedUrl(fileId, adminMode, fileVersionNum, expiresInSeconds) ) ); }); @@ -344,11 +348,12 @@ export function generateFilePresignedUrl( export const SUBMIT_FILE_EXTRACTION = "SUBMIT_FILE_EXTRACTION"; -export function submitFileExtractionAction(fileId, extractorName, requestBody) { +export function submitFileExtractionAction(fileId, extractorName, adminMode, requestBody) { return (dispatch) => { return V2.FilesService.postFileExtractApiV2FilesFileIdExtractPost( fileId, extractorName, + adminMode, requestBody ) .then((json) => { @@ -362,7 +367,7 @@ export function submitFileExtractionAction(fileId, extractorName, requestBody) { dispatch( handleErrors( reason, - submitFileExtractionAction(fileId, extractorName, requestBody) + submitFileExtractionAction(fileId, extractorName, adminMode, requestBody) ) ); }); diff --git a/frontend/src/actions/folder.js b/frontend/src/actions/folder.js index 348305f38..057d95ea3 100644 --- a/frontend/src/actions/folder.js +++ b/frontend/src/actions/folder.js @@ -2,10 +2,10 @@ import {V2} from "../openapi"; import {handleErrors} from "./common"; export const FOLDER_ADDED = "FOLDER_ADDED"; -export function folderAdded(datasetId, folderName, parentFolder = null){ +export function folderAdded(datasetId, adminMode, folderName, parentFolder = null){ return (dispatch) => { const folder = {"name": folderName, "parent_folder": parentFolder} - return V2.DatasetsService.addFolderApiV2DatasetsDatasetIdFoldersPost(datasetId, folder) + return V2.DatasetsService.addFolderApiV2DatasetsDatasetIdFoldersPost(datasetId, folder, adminMode) .then(json => { dispatch({ type: FOLDER_ADDED, @@ -14,7 +14,7 @@ export function folderAdded(datasetId, folderName, parentFolder = null){ }); }) .catch(reason => { - dispatch(handleErrors(reason, folderAdded(datasetId, folderName, parentFolder))); + dispatch(handleErrors(reason, folderAdded(datasetId, adminMode, folderName, parentFolder))); }); }; } diff --git a/frontend/src/actions/group.js b/frontend/src/actions/group.js index 7f17818db..91051e5ae 100644 --- a/frontend/src/actions/group.js +++ b/frontend/src/actions/group.js @@ -39,9 +39,9 @@ export function fetchGroups(skip = 0, limit = 21) { export const DELETE_GROUP = "DELETE_GROUP"; -export function deleteGroup(groupId) { +export function deleteGroup(groupId, adminMode) { return (dispatch) => { - return V2.GroupsService.deleteGroupApiV2GroupsGroupIdDelete(groupId) + return V2.GroupsService.deleteGroupApiV2GroupsGroupIdDelete(groupId, adminMode) .then((json) => { dispatch({ type: DELETE_GROUP, @@ -50,7 +50,7 @@ export function deleteGroup(groupId) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, deleteGroup(groupId))); + dispatch(handleErrors(reason, deleteGroup(groupId, adminMode))); }); }; } @@ -79,9 +79,9 @@ export function searchGroups(searchTerm, skip = 0, limit = 21) { export const RECEIVE_GROUP_ABOUT = "RECEIVE_GROUP_ABOUT"; -export function fetchGroupAbout(id) { +export function fetchGroupAbout(id, adminMode) { return (dispatch) => { - return V2.GroupsService.getGroupApiV2GroupsGroupIdGet(id) + return V2.GroupsService.getGroupApiV2GroupsGroupIdGet(id, adminMode) .then((json) => { dispatch({ type: RECEIVE_GROUP_ABOUT, @@ -90,18 +90,19 @@ export function fetchGroupAbout(id) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchGroupAbout(id))); + dispatch(handleErrors(reason, fetchGroupAbout(id, adminMode))); }); }; } export const DELETE_GROUP_MEMBER = "DELETE_GROUP_MEMBER"; -export function deleteGroupMember(groupId, username) { +export function deleteGroupMember(groupId, username, adminMode) { return (dispatch) => { return V2.GroupsService.removeMemberApiV2GroupsGroupIdRemoveUsernamePost( groupId, - username + username, + adminMode ) .then((json) => { dispatch({ @@ -111,18 +112,19 @@ export function deleteGroupMember(groupId, username) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, deleteGroupMember(groupId, username))); + dispatch(handleErrors(reason, deleteGroupMember(groupId, username, adminMode))); }); }; } export const ADD_GROUP_MEMBER = "ADD_GROUP_MEMBER"; -export function addGroupMember(groupId, username, role = "viewer") { +export function addGroupMember(groupId, username, adminMode, role = "viewer") { return (dispatch) => { return V2.GroupsService.addMemberApiV2GroupsGroupIdAddUsernamePost( groupId, username, + adminMode, role ) .then((json) => { @@ -134,7 +136,7 @@ export function addGroupMember(groupId, username, role = "viewer") { }) .catch((reason) => { dispatch( - handleErrorsInline(reason, addGroupMember(groupId, username, role)) + handleErrorsInline(reason, addGroupMember(groupId, username, adminMode, role)) ); }); }; @@ -142,12 +144,13 @@ export function addGroupMember(groupId, username, role = "viewer") { export const ASSIGN_GROUP_MEMBER_ROLE = "ASSIGN_GROUP_MEMBER_ROLE"; -export function assignGroupMemberRole(groupId, username, role = "viewer") { +export function assignGroupMemberRole(groupId, username, role = "viewer", adminMode) { return (dispatch) => { return V2.GroupsService.updateMemberApiV2GroupsGroupIdUpdateUsernamePut( groupId, username, - role + role, + adminMode ) .then((json) => { dispatch({ @@ -158,7 +161,7 @@ export function assignGroupMemberRole(groupId, username, role = "viewer") { }) .catch((reason) => { dispatch( - handleErrors(reason, assignGroupMemberRole(groupId, username, role)) + handleErrors(reason, assignGroupMemberRole(groupId, username, role, adminMode)) ); }); }; @@ -166,10 +169,11 @@ export function assignGroupMemberRole(groupId, username, role = "viewer") { export const UPDATE_GROUP = "UPDATE_GROUP"; -export function updateGroup(groupId, formData) { +export function updateGroup(groupId, adminMode, formData) { return (dispatch) => { return V2.GroupsService.editGroupApiV2GroupsGroupIdPut( groupId, + adminMode, formData ) .then((json) => { @@ -180,7 +184,7 @@ export function updateGroup(groupId, formData) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, updateGroup(groupId, formData))); + dispatch(handleErrors(reason, updateGroup(groupId, adminMode, formData))); }); }; } diff --git a/frontend/src/actions/metadata.js b/frontend/src/actions/metadata.js index 1a341a149..ddf426d5a 100644 --- a/frontend/src/actions/metadata.js +++ b/frontend/src/actions/metadata.js @@ -120,10 +120,11 @@ export function searchMetadataDefinitions(searchTerm, skip, limit) { export const RECEIVE_DATASET_METADATA = "RECEIVE_DATASET_METADATA"; -export function fetchDatasetMetadata(datasetId) { +export function fetchDatasetMetadata(datasetId, adminMode) { return (dispatch) => { return V2.MetadataService.getDatasetMetadataApiV2DatasetsDatasetIdMetadataGet( - datasetId + datasetId, + adminMode ) .then((json) => { dispatch({ @@ -133,17 +134,18 @@ export function fetchDatasetMetadata(datasetId) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchDatasetMetadata(datasetId))); + dispatch(handleErrors(reason, fetchDatasetMetadata(datasetId, adminMode))); }); }; } export const RECEIVE_FILE_METADATA = "RECEIVE_FILE_METADATA"; -export function fetchFileMetadata(fileId, version) { +export function fetchFileMetadata(fileId, adminMode, version) { return (dispatch) => { return V2.MetadataService.getFileMetadataApiV2FilesFileIdMetadataGet( fileId, + adminMode, version, false ) @@ -155,17 +157,18 @@ export function fetchFileMetadata(fileId, version) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchFileMetadata(fileId, version))); + dispatch(handleErrors(reason, fetchFileMetadata(fileId, adminMode, version))); }); }; } export const POST_DATASET_METADATA = "POST_DATASET_METADATA"; -export function postDatasetMetadata(datasetId, metadata) { +export function postDatasetMetadata(datasetId, adminMode, metadata) { return (dispatch) => { return V2.MetadataService.addDatasetMetadataApiV2DatasetsDatasetIdMetadataPost( datasetId, + adminMode, metadata ) .then((json) => { @@ -177,7 +180,7 @@ export function postDatasetMetadata(datasetId, metadata) { }) .catch((reason) => { dispatch( - handleErrors(reason, postDatasetMetadata(datasetId, metadata)) + handleErrors(reason, postDatasetMetadata(datasetId, adminMode, metadata)) ); }); }; @@ -185,10 +188,11 @@ export function postDatasetMetadata(datasetId, metadata) { export const POST_FILE_METADATA = "POST_FILE_METADATA"; -export function postFileMetadata(fileId, metadata) { +export function postFileMetadata(fileId, adminMode, metadata) { return (dispatch) => { return V2.MetadataService.addFileMetadataApiV2FilesFileIdMetadataPost( fileId, + adminMode, metadata ) .then((json) => { @@ -199,17 +203,18 @@ export function postFileMetadata(fileId, metadata) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, postFileMetadata(fileId, metadata))); + dispatch(handleErrors(reason, postFileMetadata(fileId, adminMode, metadata))); }); }; } export const DELETE_DATASET_METADATA = "DELETE_DATASET_METADATA"; -export function deleteDatasetMetadata(datasetId, metadata) { +export function deleteDatasetMetadata(datasetId, adminMode, metadata) { return (dispatch) => { return V2.MetadataService.deleteDatasetMetadataApiV2DatasetsDatasetIdMetadataDelete( datasetId, + adminMode, metadata ) .then((json) => { @@ -221,7 +226,7 @@ export function deleteDatasetMetadata(datasetId, metadata) { }) .catch((reason) => { dispatch( - handleErrors(reason, deleteDatasetMetadata(datasetId, metadata)) + handleErrors(reason, deleteDatasetMetadata(datasetId, adminMode, metadata)) ); }); }; @@ -229,10 +234,11 @@ export function deleteDatasetMetadata(datasetId, metadata) { export const DELETE_FILE_METADATA = "DELETE_FILE_METADATA"; -export function deleteFileMetadata(fileId, metadata) { +export function deleteFileMetadata(fileId, adminMode, metadata) { return (dispatch) => { return V2.MetadataService.deleteFileMetadataApiV2FilesFileIdMetadataDelete( fileId, + adminMode, metadata ) .then((json) => { @@ -243,17 +249,18 @@ export function deleteFileMetadata(fileId, metadata) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, deleteFileMetadata(fileId, metadata))); + dispatch(handleErrors(reason, deleteFileMetadata(fileId, adminMode, metadata))); }); }; } export const UPDATE_DATASET_METADATA = "UPDATE_DATASET_METADATA"; -export function patchDatasetMetadata(datasetId, metadata) { +export function patchDatasetMetadata(datasetId, adminMode, metadata) { return (dispatch) => { return V2.MetadataService.updateDatasetMetadataApiV2DatasetsDatasetIdMetadataPatch( datasetId, + adminMode, metadata ) .then((json) => { @@ -265,7 +272,7 @@ export function patchDatasetMetadata(datasetId, metadata) { }) .catch((reason) => { dispatch( - handleErrors(reason, patchDatasetMetadata(datasetId, metadata)) + handleErrors(reason, patchDatasetMetadata(datasetId, adminMode, metadata)) ); }); }; @@ -273,10 +280,11 @@ export function patchDatasetMetadata(datasetId, metadata) { export const UPDATE_FILE_METADATA = "UPDATE_FILE_METADATA"; -export function patchFileMetadata(fileId, metadata) { +export function patchFileMetadata(fileId, adminMode, metadata) { return (dispatch) => { return V2.MetadataService.updateFileMetadataApiV2FilesFileIdMetadataPatch( fileId, + adminMode, metadata ) .then((json) => { @@ -287,7 +295,7 @@ export function patchFileMetadata(fileId, metadata) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, patchFileMetadata(fileId, metadata))); + dispatch(handleErrors(reason, patchFileMetadata(fileId, adminMode, metadata))); }); }; } diff --git a/frontend/src/actions/user.js b/frontend/src/actions/user.js index 9e46b03e8..b13a05082 100644 --- a/frontend/src/actions/user.js +++ b/frontend/src/actions/user.js @@ -2,6 +2,8 @@ import { V2 } from "../openapi"; import Cookies from "universal-cookie"; import config from "../app.config"; import { handleErrors } from "./common"; +import {fetchAdmin} from "./authorization"; +import {fetchDatasets} from "./dataset"; const cookies = new Cookies(); @@ -53,6 +55,8 @@ export const SET_USER = "SET_USER"; export const REGISTER_USER = "REGISTER_USER"; export const REGISTER_ERROR = "REGISTER_ERROR"; export const LOGOUT = "LOGOUT"; +export const ADMIN = "ADMIN" +export const ADMIN_MODE = "ADMIN_MODE" export function _legacy_login(email, password) { return async (dispatch) => { @@ -127,6 +131,23 @@ export function fetchAllUsers(skip = 0, limit = 101) { }; } +export const setAdmin = () => { + return async (dispatch) => { + try { + dispatch({type: ADMIN, admin: await V2.LoginService.getAdminApiV2AdminGet()}); + } catch (error) { + dispatch({type: ADMIN, admin: false}); + } + }; +}; + +export function toggleAdminMode(currentAdminMode) { + return (dispatch) => { + dispatch({type: ADMIN_MODE, adminMode: !currentAdminMode}); + dispatch(fetchDatasets(0, 21, false, !currentAdminMode)); + }; +} + export const PREFIX_SEARCH_USERS = "PREFIX_SEARCH_USERS"; export function prefixSearchAllUsers(text = "", skip = 0, limit = 101) { diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index 0538b43e0..7a011fefc 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -9,7 +9,7 @@ import { a11yProps, TabPanel } from "./tabs/TabComponent"; import DatasetCard from "./datasets/DatasetCard"; import { ArrowBack, ArrowForward } from "@material-ui/icons"; import Layout from "./Layout"; -import { Link as RouterLink, useLocation } from "react-router-dom"; +import {Link as RouterLink, useLocation, useParams} from "react-router-dom"; import { Listeners } from "./listeners/Listeners"; import { ErrorModal } from "./errors/ErrorModal"; @@ -23,11 +23,15 @@ const tab = { export const Explore = (): JSX.Element => { // Redux connect equivalent const dispatch = useDispatch(); + const adminMode = useSelector( + (state: RootState) => state.user.adminMode + ); const listDatasets = ( skip: number | undefined, limit: number | undefined, - mine: boolean | undefined - ) => dispatch(fetchDatasets(skip, limit, mine)); + mine: boolean | undefined, + adminMode: boolean| undefined + ) => dispatch(fetchDatasets(skip, limit, mine, adminMode)); const datasets = useSelector((state: RootState) => state.dataset.datasets); // TODO add option to determine limit number; default show 5 datasets each time @@ -35,14 +39,16 @@ export const Explore = (): JSX.Element => { const [limit] = useState(20); const [skip, setSkip] = useState(); // TODO add switch to turn on and off "mine" dataset - const [mine] = useState(false); + const [mine] = useState(true); const [prevDisabled, setPrevDisabled] = useState(true); const [nextDisabled, setNextDisabled] = useState(false); const [selectedTabIndex, setSelectedTabIndex] = useState(0); const [errorOpen, setErrorOpen] = useState(false); + // component did mount useEffect(() => { + console.log("Rendered: ", adminMode); listDatasets(0, limit, mine); }, []); @@ -61,6 +67,7 @@ export const Explore = (): JSX.Element => { setSelectedTabIndex(newTabIndex); }; + // for pagination keep flipping until the return dataset is less than the limit const previous = () => { if (currPageNum - 1 >= 0) { diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index e13da0d6e..df098a903 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -19,9 +19,9 @@ import ListItemIcon from "@mui/material/ListItemIcon"; import ListItemText from "@mui/material/ListItemText"; import { Link, Menu, MenuItem, MenuList } from "@mui/material"; import { Link as RouterLink, useLocation } from "react-router-dom"; -import { useSelector } from "react-redux"; +import {useDispatch, useSelector} from "react-redux"; import { RootState } from "../types/data"; -import { AddBox, Explore } from "@material-ui/icons"; +import {AddBox, Explore, SupervisorAccount} from "@material-ui/icons"; import HistoryIcon from "@mui/icons-material/History"; import GroupIcon from "@mui/icons-material/Group"; import Gravatar from "react-gravatar"; @@ -31,6 +31,7 @@ import { getCurrEmail } from "../utils/common"; import VpnKeyIcon from "@mui/icons-material/VpnKey"; import LogoutIcon from "@mui/icons-material/Logout"; import { EmbeddedSearch } from "./search/EmbeddedSearch"; +import {setAdmin, toggleAdminMode} from "../actions/user"; const drawerWidth = 240; @@ -98,12 +99,27 @@ const link = { }; export default function PersistentDrawerLeft(props) { + const dispatch = useDispatch(); const { children } = props; const theme = useTheme(); const [open, setOpen] = React.useState(false); const [embeddedSearchHidden, setEmbeddedSearchHidden] = React.useState(false); const [anchorEl, setAnchorEl] = React.useState(null); const isMenuOpen = Boolean(anchorEl); + const admin = useSelector( + (state: RootState) => state.user.admin + ); + const adminMode = useSelector( + (state: RootState) => state.user.adminMode + ); + + useEffect(() => { + dispatch(setAdmin()); + }, [dispatch]); + + const handleAdminMode = () => { + dispatch(toggleAdminMode(adminMode)); + }; const handleDrawerOpen = () => { setOpen(true); @@ -218,6 +234,22 @@ export default function PersistentDrawerLeft(props) { User Profile + {admin && !adminMode?
+ + + + + + Admin Mode +
: <>} + {admin && adminMode?
+ + + + + + Normal Mode +
: <>} diff --git a/frontend/src/components/datasets/ChangeDatasetRoleModal.tsx b/frontend/src/components/datasets/ChangeDatasetRoleModal.tsx index 4f9d7363d..a8abfa0bd 100644 --- a/frontend/src/components/datasets/ChangeDatasetRoleModal.tsx +++ b/frontend/src/components/datasets/ChangeDatasetRoleModal.tsx @@ -2,8 +2,9 @@ import React, { useEffect, useState } from "react"; import { Alert, Autocomplete, Button, Collapse, Container, Dialog, DialogActions, DialogContent, DialogTitle, Divider, FormControl, IconButton, InputLabel, MenuItem, Select, TextField, Typography } from "@mui/material"; import {useParams} from "react-router-dom"; import { setDatasetUserRole } from "../../actions/dataset"; -import { useDispatch } from "react-redux"; +import {useDispatch, useSelector} from "react-redux"; import CloseIcon from "@mui/icons-material/Close"; +import {RootState} from "../../types/data"; type ChangeDatasetRoleProps = { @@ -22,8 +23,11 @@ export default function ChangeDatasetRoleModal(props: ChangeDatasetRoleProps) { const [email, setEmail] = useState(currentUser); const [role, setRole] = useState(currentRole); const [showSuccessAlert, setShowSuccessAlert] = useState(false); + const adminMode = useSelector( + (state: RootState) => state.user.adminMode + ); - const setUserRole = (datasetId: string, username: string, role: string) => dispatch(setDatasetUserRole(datasetId, username, role)); + const setUserRole = (datasetId: string, username: string, role: string) => dispatch(setDatasetUserRole(datasetId, username, role, adminMode)); // component did mount useEffect(() => { diff --git a/frontend/src/components/datasets/ChangeGroupDatasetRoleModal.tsx b/frontend/src/components/datasets/ChangeGroupDatasetRoleModal.tsx index 2e562fad3..081704e87 100644 --- a/frontend/src/components/datasets/ChangeGroupDatasetRoleModal.tsx +++ b/frontend/src/components/datasets/ChangeGroupDatasetRoleModal.tsx @@ -1,6 +1,6 @@ import React, { useState } from "react"; -import { useDispatch } from "react-redux"; +import {useDispatch, useSelector} from "react-redux"; import { Alert, Button, @@ -21,6 +21,7 @@ import { import { setDatasetGroupRole } from "../../actions/dataset"; import { useParams } from "react-router-dom"; import CloseIcon from "@mui/icons-material/Close"; +import {RootState} from "../../types/data"; type ChangeGroupDatasetRoleModalProps = { open: boolean; @@ -47,8 +48,11 @@ export default function ChangeGroupDatasetRoleModal( const [group, setGroup] = useState(); const [showSuccessAlert, setShowSuccessAlert] = useState(false); const dispatch = useDispatch(); + const adminMode = useSelector( + (state: RootState) => state.user.adminMode + ); const setGroupRole = (datasetId: string, groupId: string, role: string) => - dispatch(setDatasetGroupRole(datasetId, groupId, role)); + dispatch(setDatasetGroupRole(datasetId, groupId, role, adminMode)); const onShare = () => { setGroupRole(datasetId, currentGroupId, role); diff --git a/frontend/src/components/datasets/CreateDataset.tsx b/frontend/src/components/datasets/CreateDataset.tsx index c1545aa0c..1023c1645 100644 --- a/frontend/src/components/datasets/CreateDataset.tsx +++ b/frontend/src/components/datasets/CreateDataset.tsx @@ -26,6 +26,7 @@ import { ErrorModal } from "../errors/ErrorModal"; export const CreateDataset = (): JSX.Element => { const dispatch = useDispatch(); + const adminMode = useSelector((state : RootState) => state.user.adminMode); // @ts-ignore const getMetadatDefinitions = ( name: string | null, @@ -35,7 +36,7 @@ export const CreateDataset = (): JSX.Element => { const createDatasetMetadata = ( datasetId: string | undefined, metadata: MetadataIn - ) => dispatch(postDatasetMetadata(datasetId, metadata)); + ) => dispatch(postDatasetMetadata(datasetId, adminMode, metadata)); const createDataset = (formData: FormData) => dispatch(datasetCreated(formData)); const newDataset = useSelector( diff --git a/frontend/src/components/datasets/Dataset.tsx b/frontend/src/components/datasets/Dataset.tsx index e43c88bcb..fe47b8b77 100644 --- a/frontend/src/components/datasets/Dataset.tsx +++ b/frontend/src/components/datasets/Dataset.tsx @@ -50,6 +50,8 @@ export const Dataset = (): JSX.Element => { // search parameters const [searchParams] = useSearchParams(); const folderId = searchParams.get("folder"); + + const adminMode = useSelector((state:RootState) => state.user.adminMode); // Redux connect equivalent const dispatch = useDispatch(); const updateDatasetMetadata = ( @@ -59,26 +61,26 @@ export const Dataset = (): JSX.Element => { const createDatasetMetadata = ( datasetId: string | undefined, metadata: MetadataIn - ) => dispatch(postDatasetMetadata(datasetId, metadata)); + ) => dispatch(postDatasetMetadata(datasetId, adminMode, metadata)); const deleteDatasetMetadata = ( datasetId: string | undefined, metadata: object - ) => dispatch(deleteDatasetMetadataAction(datasetId, metadata)); + ) => dispatch(deleteDatasetMetadataAction(datasetId, adminMode, metadata)); const getFolderPath = (folderId: string | null) => dispatch(fetchFolderPath(folderId)); const listFilesInDataset = ( datasetId: string | undefined, folderId: string | null - , skip: number | undefined, limit: number | undefined) => dispatch(fetchFilesInDataset(datasetId, folderId, skip, limit)); + , skip: number | undefined, limit: number | undefined) => dispatch(fetchFilesInDataset(datasetId, folderId, skip, limit, adminMode)); const listFoldersInDataset = ( datasetId: string | undefined, parentFolder: string | null, skip: number | undefined, limit: number | undefined - ) => dispatch(fetchFoldersInDataset(datasetId, parentFolder, skip, limit)); + ) => dispatch(fetchFoldersInDataset(datasetId, parentFolder, skip, limit, adminMode)); const listDatasetAbout = (datasetId: string | undefined) => - dispatch(fetchDatasetAbout(datasetId)); + dispatch(fetchDatasetAbout(datasetId, adminMode)); const listDatasetMetadata = (datasetId: string | undefined) => - dispatch(fetchDatasetMetadata(datasetId)); + dispatch(fetchDatasetMetadata(datasetId, adminMode)); const getMetadatDefinitions = (name:string|null, skip:number, limit:number) => dispatch(fetchMetadataDefinitions(name, skip,limit)); diff --git a/frontend/src/components/datasets/ShareDatasetModal.tsx b/frontend/src/components/datasets/ShareDatasetModal.tsx index e1952372e..2a669dbc2 100644 --- a/frontend/src/components/datasets/ShareDatasetModal.tsx +++ b/frontend/src/components/datasets/ShareDatasetModal.tsx @@ -45,6 +45,7 @@ export default function ShareDatasetModal(props: ShareDatasetModalProps) { const [showSuccessAlert, setShowSuccessAlert] = useState(false); const [options, setOptions] = useState([]); const users = useSelector((state: RootState) => state.group.users); + const adminMode = useSelector((state: RootState) => state.user.adminMode); const setUserRole = async ( datasetId: string, @@ -53,7 +54,7 @@ export default function ShareDatasetModal(props: ShareDatasetModalProps) { ) => dispatch(setDatasetUserRole(datasetId, username, role)); const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId)); + dispatch(fetchDatasetRoles(datasetId, adminMode)); useEffect(() => { prefixSearchAllUsers("", 0, 10); diff --git a/frontend/src/components/datasets/ShareGroupDatasetModal.tsx b/frontend/src/components/datasets/ShareGroupDatasetModal.tsx index a3c9e1fc7..f1ca298af 100644 --- a/frontend/src/components/datasets/ShareGroupDatasetModal.tsx +++ b/frontend/src/components/datasets/ShareGroupDatasetModal.tsx @@ -44,14 +44,17 @@ export default function ShareGroupDatasetModal( const dispatch = useDispatch(); const listGroups = () => dispatch(fetchGroups(0, 21)); const groups = useSelector((state: RootState) => state.group.groups); + const adminMode = useSelector( + (state: RootState) => state.user.adminMode + ); const setGroupRole = async ( datasetId: string, groupId: string, role: string - ) => dispatch(setDatasetGroupRole(datasetId, groupId, role)); + ) => dispatch(setDatasetGroupRole(datasetId, groupId, role, adminMode)); const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId)); + dispatch(fetchDatasetRoles(datasetId, adminMode)); // component did mount useEffect(() => { diff --git a/frontend/src/components/files/File.tsx b/frontend/src/components/files/File.tsx index 15444f937..351c44c64 100644 --- a/frontend/src/components/files/File.tsx +++ b/frontend/src/components/files/File.tsx @@ -60,23 +60,25 @@ export const File = (): JSX.Element => { const folderId = searchParams.get("folder"); const datasetId = searchParams.get("dataset"); + const adminMode = useSelector((state: RootState) => state.user.adminMode) + const listDatasetAbout = (datasetId: string | undefined) => - dispatch(fetchDatasetAbout(datasetId)); + dispatch(fetchDatasetAbout(datasetId, adminMode)); const about = useSelector((state: RootState) => state.dataset.about); const dispatch = useDispatch(); const listFileSummary = (fileId: string | undefined) => - dispatch(fetchFileSummary(fileId)); + dispatch(fetchFileSummary(fileId, adminMode)); const listFileVersions = (fileId: string | undefined) => - dispatch(fetchFileVersions(fileId)); + dispatch(fetchFileVersions(fileId, adminMode)); const listFileMetadata = (fileId: string | undefined) => - dispatch(fetchFileMetadata(fileId)); + dispatch(fetchFileMetadata(fileId, adminMode)); const createFileMetadata = (fileId: string | undefined, metadata: object) => dispatch(createFileMetadataAction(fileId, metadata)); const updateFileMetadata = (fileId: string | undefined, metadata: object) => dispatch(patchFileMetadataAction(fileId, metadata)); const deleteFileMetadata = (fileId: string | undefined, metadata: object) => - dispatch(deleteFileMetadataAction(fileId, metadata)); + dispatch(deleteFileMetadataAction(fileId, adminMode, metadata)); const getFolderPath = (folderId: string | null) => dispatch(fetchFolderPath(folderId)); diff --git a/frontend/src/components/files/FileActionsMenu.tsx b/frontend/src/components/files/FileActionsMenu.tsx index 10374a267..658f5067f 100644 --- a/frontend/src/components/files/FileActionsMenu.tsx +++ b/frontend/src/components/files/FileActionsMenu.tsx @@ -41,6 +41,7 @@ export const FileActionsMenu = (props: FileActionsMenuProps): JSX.Element => { const [fileShareModalOpen, setFileShareModalOpen] = useState(false); const fileRole = useSelector((state: RootState) => state.file.fileRole); + const adminMode = useSelector((state: RootState) => state.user.adminMode); const open = Boolean(anchorEl); @@ -57,6 +58,7 @@ export const FileActionsMenu = (props: FileActionsMenuProps): JSX.Element => { const generateFilePresignedUrl = ( fileId: string | undefined, + adminMode: boolean | undefined, fileVersionNum: number | undefined | null, expiresInSeconds: number | undefined | null ) => @@ -68,9 +70,9 @@ export const FileActionsMenu = (props: FileActionsMenuProps): JSX.Element => { ); const deleteFile = (fileId: string | undefined) => - dispatch(fileDeleted(fileId)); + dispatch(fileDeleted(fileId, adminMode)); const listFileSummary = (fileId: string | undefined) => - dispatch(fetchFileSummary(fileId)); + dispatch(fetchFileSummary(fileId, adminMode)); const history = useNavigate(); const [confirmationOpen, setConfirmationOpen] = useState(false); @@ -86,7 +88,7 @@ export const FileActionsMenu = (props: FileActionsMenuProps): JSX.Element => { history(`/datasets/${datasetId}`); }; const handleShareLinkClick = () => { - generateFilePresignedUrl(fileId, null, 7 * 24 * 3600); + generateFilePresignedUrl(fileId, adminMode, null, 7 * 24 * 3600); setFileShareModalOpen(true); }; const setFileShareModalClose = () => { diff --git a/frontend/src/components/files/FileMenu.tsx b/frontend/src/components/files/FileMenu.tsx index 9a0e6a8b6..deb9a3e8a 100644 --- a/frontend/src/components/files/FileMenu.tsx +++ b/frontend/src/components/files/FileMenu.tsx @@ -33,10 +33,12 @@ export default function FileMenu(props: FileMenuProps) { setAnchorEl(null); }; + const adminMode = useSelector((state: RootState) => state.user.adminMode); + // confirmation dialog const dispatch = useDispatch(); const deleteFile = (fileId: string | undefined) => - dispatch(fileDeleted(fileId)); + dispatch(fileDeleted(fileId, adminMode)); const datasetRole = useSelector( (state: RootState) => state.dataset.datasetRole ); diff --git a/frontend/src/components/files/UpdateFile.tsx b/frontend/src/components/files/UpdateFile.tsx index 9fcad8853..6fa930ae6 100644 --- a/frontend/src/components/files/UpdateFile.tsx +++ b/frontend/src/components/files/UpdateFile.tsx @@ -22,17 +22,18 @@ export const UpdateFile: React.FC = ( props: UpdateFileProps ) => { const dispatch = useDispatch(); + const adminMode = useSelector((state : RootState) => state.user.adminMode); const updateFile = async (file: File, fileId: string | undefined) => dispatch(updateFileAction(file, fileId)); const listFileVersions = (fileId: string | undefined) => - dispatch(fetchFileVersions(fileId)); + dispatch(fetchFileVersions(fileId, adminMode)); const listFileMetadata = async (fileId: string | undefined) => - dispatch(fetchFileMetadata(fileId)); + dispatch(fetchFileMetadata(fileId, adminMode)); const createFileMetadata = ( fileId: string | undefined, metadata: MetadataIn - ) => dispatch(postFileMetadata(fileId, metadata)); + ) => dispatch(postFileMetadata(fileId, adminMode, metadata)); const fileMetadataList = useSelector( (state: RootState) => state.metadata.fileMetadataList ); @@ -56,7 +57,7 @@ export const UpdateFile: React.FC = ( const onSave = async (file: File) => { setLoading(true); // TODO: if this fails, the metadata update will also fail - await updateFile(file, fileId); + await updateFile(file, fileId, adminMode); setLoading(false); setOpen(false); diff --git a/frontend/src/components/files/UploadFile.tsx b/frontend/src/components/files/UploadFile.tsx index bb7ace434..3537bf40e 100644 --- a/frontend/src/components/files/UploadFile.tsx +++ b/frontend/src/components/files/UploadFile.tsx @@ -42,6 +42,7 @@ export const UploadFile: React.FC = ( const [loading, setLoading] = useState(false); const dispatch = useDispatch(); + const adminMode = useSelector((state : RootState) => state.user.adminMode); // @ts-ignore const getMetadatDefinitions = ( name: string | null, @@ -50,15 +51,16 @@ export const UploadFile: React.FC = ( ) => dispatch(fetchMetadataDefinitions(name, skip, limit)); const createFileMetadata = ( fileId: string | undefined, + adminMode: boolean, metadata: MetadataIn - ) => dispatch(postFileMetadata(fileId, metadata)); + ) => dispatch(postFileMetadata(fileId, adminMode, metadata)); const uploadFile = ( selectedDatasetId: string | undefined, selectedFolderId: string | undefined, selectedFile: File ) => dispatch( - createFileAction(selectedDatasetId, selectedFolderId, selectedFile) + createFileAction(selectedDatasetId, selectedFile, selectedFolderId, adminMode) ); const newFile = useSelector((state: RootState) => state.dataset.newFile); const metadataDefinitionList = useSelector( diff --git a/frontend/src/components/files/UploadFileMultiple.tsx b/frontend/src/components/files/UploadFileMultiple.tsx index 190f8eb25..c9e48e92d 100644 --- a/frontend/src/components/files/UploadFileMultiple.tsx +++ b/frontend/src/components/files/UploadFileMultiple.tsx @@ -48,6 +48,7 @@ export const UploadFileMultiple: React.FC = ( const [allFilled, setAllFilled] = React.useState(false); const [loading, setLoading] = useState(false); + const adminMode = useSelector((state:RootState) => state.user.adminMode); const dispatch = useDispatch(); // @ts-ignore @@ -59,7 +60,7 @@ export const UploadFileMultiple: React.FC = ( const createFileMetadata = ( fileId: string | undefined, metadata: MetadataIn - ) => dispatch(postFileMetadata(fileId, metadata)); + ) => dispatch(postFileMetadata(fileId, adminMode, metadata)); const uploadFiles = ( selectedDatasetId: string | undefined, @@ -67,7 +68,7 @@ export const UploadFileMultiple: React.FC = ( selectedFolderId: string | undefined ) => dispatch( - createFilesAction(selectedDatasetId, selectedFiles, selectedFolderId) + createFilesAction(selectedDatasetId, adminMode, selectedFiles, selectedFolderId) ); const getFolderPath = (folderId: string | null) => @@ -77,15 +78,15 @@ export const UploadFileMultiple: React.FC = ( folderId: string | null, skip: number | undefined, limit: number | undefined - ) => dispatch(fetchFilesInDataset(datasetId, folderId, skip, limit)); + ) => dispatch(fetchFilesInDataset(datasetId, folderId, skip, limit, adminMode)); const listFoldersInDataset = ( datasetId: string | undefined, parentFolder: string | null, skip: number | undefined, limit: number | undefined - ) => dispatch(fetchFoldersInDataset(datasetId, parentFolder, skip, limit)); + ) => dispatch(fetchFoldersInDataset(datasetId, parentFolder, skip, limit, adminMode)); const listDatasetAbout = (datasetId: string | undefined) => - dispatch(fetchDatasetAbout(datasetId)); + dispatch(fetchDatasetAbout(datasetId, adminMode)); const newFiles = useSelector((state: RootState) => state.dataset.newFiles); const metadataDefinitionList = useSelector( diff --git a/frontend/src/components/folders/CreateFolder.tsx b/frontend/src/components/folders/CreateFolder.tsx index 01e1758ec..b5b12e45a 100644 --- a/frontend/src/components/folders/CreateFolder.tsx +++ b/frontend/src/components/folders/CreateFolder.tsx @@ -10,8 +10,9 @@ import { import LoadingOverlay from "react-loading-overlay-ts"; -import {useDispatch,} from "react-redux"; +import {useDispatch, useSelector,} from "react-redux"; import {folderAdded} from "../../actions/folder"; +import {RootState} from "../../types/data"; type CreateFolderProps = { @@ -23,7 +24,9 @@ type CreateFolderProps = { export const CreateFolder: React.FC = (props: CreateFolderProps) => { const dispatch = useDispatch(); - const addFolder = (datasetId:string|undefined, folderName:string, parentFolder:string|null) => dispatch(folderAdded(datasetId, folderName, parentFolder)); + + const adminMode = useSelector((state: RootState) => state.user.adminMode); + const addFolder = (datasetId:string|undefined, folderName:string, parentFolder:string|null) => dispatch(folderAdded(datasetId, adminMode, folderName, parentFolder)); const {datasetId, parentFolder, open, handleClose} = props; const [loading, setLoading] = useState(false); diff --git a/frontend/src/components/groups/AddMemberModal.tsx b/frontend/src/components/groups/AddMemberModal.tsx index 4fd6479c4..84dc50423 100644 --- a/frontend/src/components/groups/AddMemberModal.tsx +++ b/frontend/src/components/groups/AddMemberModal.tsx @@ -33,10 +33,12 @@ export default function AddMemberModal(props: AddMemberModalProps) { const prefixSearchAllUsers = (text: string, skip: number, limit: number) => dispatch(prefixSearchAllUsersAction(text, skip, limit)); + const adminMode = useSelector((state : RootState) => state.user.adminMode); + const groupMemberAdded = ( groupId: string | undefined, username: string | undefined - ) => dispatch(addGroupMember(groupId, username)); + ) => dispatch(addGroupMember(groupId, username, adminMode)); const users = useSelector((state: RootState) => state.group.users); const [email, setEmail] = useState(""); diff --git a/frontend/src/components/groups/DeleteGroupModal.tsx b/frontend/src/components/groups/DeleteGroupModal.tsx index a551394a5..2ee1c3fd8 100644 --- a/frontend/src/components/groups/DeleteGroupModal.tsx +++ b/frontend/src/components/groups/DeleteGroupModal.tsx @@ -3,7 +3,8 @@ import React from "react"; import { ActionModal } from "../dialog/ActionModal"; import { deleteGroup } from "../../actions/group"; import { useNavigate } from "react-router-dom"; -import { useDispatch } from "react-redux"; +import {useDispatch, useSelector} from "react-redux"; +import {RootState} from "../../types/data"; type DeleteGroupModalProps = { deleteGroupConfirmOpen: any; @@ -15,8 +16,10 @@ export default function DeleteGroupModal(props: DeleteGroupModalProps) { const { deleteGroupConfirmOpen, setDeleteGroupConfirmOpen, groupId } = props; const history = useNavigate(); const dispatch = useDispatch(); + + const adminMode = useSelector((state: RootState) => state.user.adminMode); const groupDeleted = (groupId: string | undefined) => - dispatch(deleteGroup(groupId)); + dispatch(deleteGroup(groupId, adminMode)); return ( state.user.adminMode); const editGroup = (groupId: string | undefined, formData: GroupIn) => - dispatch(updateGroup(groupId, formData)); + dispatch(updateGroup(groupId, formData, adminMode)); const groupAbout = useSelector((state: RootState) => state.group.about); const about = useSelector((state: RootState) => state.dataset.about); diff --git a/frontend/src/components/groups/EditNameModal.tsx b/frontend/src/components/groups/EditNameModal.tsx index 06b9ab3d8..2be35143f 100644 --- a/frontend/src/components/groups/EditNameModal.tsx +++ b/frontend/src/components/groups/EditNameModal.tsx @@ -26,8 +26,9 @@ type EditNameModalProps = { export default function EditNameModal(props: EditNameModalProps) { const { open, handleClose, groupName, groupDescription, groupId } = props; const dispatch = useDispatch(); + const adminMode = useSelector((state : RootState) => state.user.adminMode); const editGroup = (groupId: string | undefined, formData: GroupIn) => - dispatch(updateGroup(groupId, formData)); + dispatch(updateGroup(groupId, formData, adminMode)); const groupAbout = useSelector((state: RootState) => state.group.about); diff --git a/frontend/src/components/groups/Group.tsx b/frontend/src/components/groups/Group.tsx index de461e1b3..53d2300a0 100644 --- a/frontend/src/components/groups/Group.tsx +++ b/frontend/src/components/groups/Group.tsx @@ -23,8 +23,10 @@ export function Group() { // Redux connect equivalent const dispatch = useDispatch(); + + const adminMode = useSelector((state : RootState) => state.user.adminMode); const fetchGroupInfo = (groupId: string | undefined) => - dispatch(fetchGroupAbout(groupId)); + dispatch(fetchGroupAbout(groupId, adminMode)); const fetchCurrentGroupRole = (groupId: string | undefined) => dispatch(fetchGroupRole(groupId)); diff --git a/frontend/src/components/groups/MembersTable.tsx b/frontend/src/components/groups/MembersTable.tsx index e6ee6b696..3d3a1baee 100644 --- a/frontend/src/components/groups/MembersTable.tsx +++ b/frontend/src/components/groups/MembersTable.tsx @@ -26,7 +26,9 @@ export default function MembersTable(props: MembersTableProps) { const groupCreatorEmail = useSelector((state: RootState) => state.group.about.creator) // dispatch const dispatch = useDispatch(); - const groupMemberDeleted = (groupId: string|undefined, username: string|undefined) => dispatch(deleteGroupMember(groupId, username)) + + const adminMode = useSelector((state : RootState) => state.user.adminMode); + const groupMemberDeleted = (groupId: string|undefined, username: string|undefined) => dispatch(deleteGroupMember(groupId, username, adminMode)) const [deleteMemberConfirmOpen, setDeleteMemberConfirmOpen] = useState(false); const [selectMemberUsername, setSelectMemberUsername] = useState(); diff --git a/frontend/src/components/groups/MembersTableUserEntry.tsx b/frontend/src/components/groups/MembersTableUserEntry.tsx index c089e75da..ad8d2b937 100644 --- a/frontend/src/components/groups/MembersTableUserEntry.tsx +++ b/frontend/src/components/groups/MembersTableUserEntry.tsx @@ -35,8 +35,9 @@ export function MembersTableUserEntry(props: MembersTableUserEntryProps) { const {groupId, member, creatorEmail, setDeleteMemberConfirmOpen, setSelectMemberUsername} = props; const dispatch = useDispatch(); + const adminMode = useSelector((state : RootState) => state.user.adminMode); const groupMemberRoleAssigned = (groupId: string|undefined, username: string|undefined, - role: string|undefined) => dispatch(assignGroupMemberRole(groupId, username, role)); + role: string|undefined) => dispatch(assignGroupMemberRole(groupId, username, role, adminMode)); const role = useSelector((state: RootState) => state.group.role); const [selectedRole, setSelectedRole] = useState(member.editor ? "editor": "member"); diff --git a/frontend/src/components/listeners/SubmitExtraction.tsx b/frontend/src/components/listeners/SubmitExtraction.tsx index bb85db712..cb55a329a 100644 --- a/frontend/src/components/listeners/SubmitExtraction.tsx +++ b/frontend/src/components/listeners/SubmitExtraction.tsx @@ -42,12 +42,13 @@ export default function SubmitExtraction(props: SubmitExtractionProps) { const {fileId, datasetId, open, handleClose, selectedExtractor} = props; const dispatch = useDispatch(); + const adminMode = useSelector((state: RootState) => state.user.adminMode); const submitFileExtraction = - (fileId: string | undefined, extractorName: string | undefined, requestBody: FormData) => dispatch(submitFileExtractionAction(fileId, extractorName, requestBody)); + (fileId: string | undefined, extractorName: string | undefined, requestBody: FormData) => dispatch(submitFileExtractionAction(fileId, extractorName, adminMode, requestBody)); const submitDatasetExtraction = (datasetId: string | undefined, extractorName: string | undefined, requestBody: FormData) => dispatch(submitDatasetExtractionAction(datasetId, extractorName, requestBody)); - + const job_id = useSelector((state: RootState) => state.listener.currJobId); const onSubmit = (formData: FormData) => { diff --git a/frontend/src/components/metadata/DisplayListenerMetadata.tsx b/frontend/src/components/metadata/DisplayListenerMetadata.tsx index b1d94a8c8..3791c33f6 100644 --- a/frontend/src/components/metadata/DisplayListenerMetadata.tsx +++ b/frontend/src/components/metadata/DisplayListenerMetadata.tsx @@ -30,8 +30,9 @@ export const DisplayListenerMetadata = (props: MetadataType) => { const getMetadatDefinitions = (name: string | null, skip: number, limit: number) => dispatch(fetchMetadataDefinitions(name, skip, limit)); const metadataDefinitionList = useSelector((state: RootState) => state.metadata.metadataDefinitionList); - const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId)); - const listFileMetadata = (fileId: string | undefined, version: number | undefined) => dispatch(fetchFileMetadata(fileId, version)); + const adminMode = useSelector((state : RootState) => state.user.adminMode); + const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId, adminMode)); + const listFileMetadata = (fileId: string | undefined, version: number | undefined) => dispatch(fetchFileMetadata(fileId, adminMode, version)); const datasetMetadataList = useSelector((state: RootState) => state.metadata.datasetMetadataList); const fileMetadataList = useSelector((state: RootState) => state.metadata.fileMetadataList); diff --git a/frontend/src/components/metadata/DisplayMetadata.tsx b/frontend/src/components/metadata/DisplayMetadata.tsx index 5e100b77e..ca97e4d9c 100644 --- a/frontend/src/components/metadata/DisplayMetadata.tsx +++ b/frontend/src/components/metadata/DisplayMetadata.tsx @@ -26,8 +26,9 @@ export const DisplayMetadata = (props: MetadataType) => { const getMetadatDefinitions = (name:string|null, skip:number, limit:number) => dispatch(fetchMetadataDefinitions(name, skip,limit)); const metadataDefinitionList = useSelector((state: RootState) => state.metadata.metadataDefinitionList); - const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId)); - const listFileMetadata = (fileId: string | undefined) => dispatch(fetchFileMetadata(fileId)); + const adminMode = useSelector((state : RootState) => state.user.adminMode); + const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId, adminMode)); + const listFileMetadata = (fileId: string | undefined) => dispatch(fetchFileMetadata(fileId, adminMode)); const datasetMetadataList = useSelector((state: RootState) => state.metadata.datasetMetadataList); const fileMetadataList = useSelector((state: RootState) => state.metadata.fileMetadataList); diff --git a/frontend/src/components/metadata/EditMetadata.tsx b/frontend/src/components/metadata/EditMetadata.tsx index 015c073fe..b23551c93 100644 --- a/frontend/src/components/metadata/EditMetadata.tsx +++ b/frontend/src/components/metadata/EditMetadata.tsx @@ -23,8 +23,9 @@ export const EditMetadata = (props: MetadataType) => { const dispatch = useDispatch(); const getMetadatDefinitions = (name:string|null, skip:number, limit:number) => dispatch(fetchMetadataDefinitions(name, skip,limit)); const metadataDefinitionList = useSelector((state: RootState) => state.metadata.metadataDefinitionList); - const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId)); - const listFileMetadata = (fileId: string | undefined) => dispatch(fetchFileMetadata(fileId)); + const adminMode = useSelector((state : RootState) => state.user.adminMode); + const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId, adminMode)); + const listFileMetadata = (fileId: string | undefined) => dispatch(fetchFileMetadata(fileId, adminMode)); const datasetMetadataList = useSelector((state: RootState) => state.metadata.datasetMetadataList); const fileMetadataList = useSelector((state: RootState) => state.metadata.fileMetadataList); diff --git a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx index 3f0fc73cf..30ecac339 100644 --- a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx @@ -54,12 +54,15 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { ); const [expand, setExpand] = React.useState(false); + const adminMode = useSelector( + (state: RootState) => state.user.adminMode + ); const groupRoleAssigned = ( dataset_id: string | undefined, group_id: string | undefined, role: string | undefined - ) => dispatch(setDatasetGroupRole(dataset_id, group_id, role)); + ) => dispatch(setDatasetGroupRole(dataset_id, group_id, role, adminMode)); const removeGroupRole = async ( dataset_id: string | undefined, @@ -86,7 +89,7 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { }; const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId)); + dispatch(fetchDatasetRoles(datasetId, adminMode)); const handleRoleDelete = async () => { await removeGroupRole(datasetId, group_role.group.id); diff --git a/frontend/src/components/sharing/SharingTab.tsx b/frontend/src/components/sharing/SharingTab.tsx index 9367896d5..11e3bc21c 100644 --- a/frontend/src/components/sharing/SharingTab.tsx +++ b/frontend/src/components/sharing/SharingTab.tsx @@ -1,19 +1,21 @@ import React, { useEffect } from "react"; import Card from "@mui/material/Card"; import { fetchDatasetRoles } from "../../actions/dataset"; -import { useDispatch } from "react-redux"; +import {useDispatch, useSelector} from "react-redux"; import { useParams } from "react-router-dom"; import { UserAndRoleTable } from "./UserAndRoleTable"; import { Box, CardContent } from "@mui/material"; import Typography from "@mui/material/Typography"; +import {RootState} from "../../types/data"; export const SharingTab = (): JSX.Element => { const { datasetId } = useParams<{ datasetId?: string }>(); const dispatch = useDispatch(); + const adminMode = useSelector((state: RootState) => state.user.adminMode) const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId)); + dispatch(fetchDatasetRoles(datasetId, adminMode)); useEffect(() => { getRoles(datasetId); diff --git a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx index e184e7120..407e81f23 100644 --- a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx @@ -49,20 +49,23 @@ export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { const datasetRole = useSelector( (state: RootState) => state.dataset.datasetRole ); + const adminMode = useSelector( + (state: RootState) => state.user.adminMode + ); const userRoleAssigned = ( dataset_id: string | undefined, username: string | undefined, role: string | undefined - ) => dispatch(setDatasetUserRole(dataset_id, username, role)); + ) => dispatch(setDatasetUserRole(dataset_id, username, role, adminMode)); const removeUserRole = async ( dataset_id: string | undefined, username: string | undefined - ) => dispatch(removeDatasetUserRole(dataset_id, username)); + ) => dispatch(removeDatasetUserRole(dataset_id, username, adminMode)); const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId)); + dispatch(fetchDatasetRoles(datasetId, adminMode)); const [selectedRole, setSelectedRole] = useState(user_role.role); const [editRoleOn, setEditRoleOn] = useState(false); diff --git a/frontend/src/components/users/Profile.tsx b/frontend/src/components/users/Profile.tsx index ebed22427..b77738c73 100644 --- a/frontend/src/components/users/Profile.tsx +++ b/frontend/src/components/users/Profile.tsx @@ -42,7 +42,7 @@ export const Profile = (): JSX.Element => { {profile.first_name} {profile.last_name} {profile.email} - {"false"} + {user.admin? Admin: Not admin} diff --git a/frontend/src/components/visualizations/Visualization.tsx b/frontend/src/components/visualizations/Visualization.tsx index 6d886e4a7..a78d5348a 100644 --- a/frontend/src/components/visualizations/Visualization.tsx +++ b/frontend/src/components/visualizations/Visualization.tsx @@ -28,10 +28,13 @@ export const Visualization = (props: previewProps) => { const visConfig = useSelector( (state: RootState) => state.visualization.visConfig ); + const adminMode = useSelector( + (state: RootState) => state.user.adminMode + ); const dispatch = useDispatch(); const listFileSummary = (fileId: string | undefined) => - dispatch(fetchFileSummary(fileId)); + dispatch(fetchFileSummary(fileId, adminMode)); const getVisConfig = (resourceId: string | undefined) => dispatch(getVisConfigAction(resourceId)); diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index a44faa63a..8855c4f02 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -16,16 +16,21 @@ export class AuthorizationService { * Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id. * @param datasetId * @param requestBody + * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static saveAuthorizationApiV2AuthorizationsDatasetsDatasetIdPost( datasetId: string, requestBody: AuthorizationBase, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}`, + query: { + 'admin_mode': adminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -38,15 +43,20 @@ export class AuthorizationService { * Get Dataset Role * Retrieve role of user for a specific dataset. * @param datasetId + * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static getDatasetRoleApiV2AuthorizationsDatasetsDatasetIdRoleGet( datasetId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -58,15 +68,20 @@ export class AuthorizationService { * Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. * See `routers/authorization.py` for more info. * @param datasetId + * @param adminMode * @returns any Successful Response * @throws ApiError */ public static getDatasetRoleViewerApiV2AuthorizationsDatasetsDatasetIdRoleViewerGet( datasetId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/datasets/${datasetId}/role/viewer`, + path: `/api/v2/authorizations/datasets/${datasetId}/role/viewer}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -78,15 +93,20 @@ export class AuthorizationService { * Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. * See `routers/authorization.py` for more info. * @param datasetId + * @param adminMode * @returns any Successful Response * @throws ApiError */ public static getDatasetRoleOwnerApiV2AuthorizationsDatasetsDatasetIdRoleOwnerGet( datasetId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/datasets/${datasetId}/role/owner`, + path: `/api/v2/authorizations/datasets/${datasetId}/role/owner}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -96,18 +116,21 @@ export class AuthorizationService { /** * Get File Role * @param fileId + * @param adminMode * @param datasetId * @returns RoleType Successful Response * @throws ApiError */ public static getFileRoleApiV2AuthorizationsFilesFileIdRoleGet( fileId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/files/${fileId}/role`, + path: `/api/v2/authorizations/files/${fileId}/role}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -119,18 +142,21 @@ export class AuthorizationService { /** * Get Metadata Role * @param metadataId + * @param adminMode * @param datasetId * @returns AuthorizationMetadata Successful Response * @throws ApiError */ public static getMetadataRoleApiV2AuthorizationsMetadataMetadataIdRoleGet( metadataId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/metadata/${metadataId}/role`, + path: `/api/v2/authorizations/metadata/${metadataId}/role}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -142,18 +168,21 @@ export class AuthorizationService { /** * Get Group Role * @param groupId + * @param adminMode * @param datasetId * @returns RoleType Successful Response * @throws ApiError */ public static getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleGet( groupId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/groups/${groupId}/role`, + path: `/api/v2/authorizations/groups/${groupId}/role}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -168,6 +197,7 @@ export class AuthorizationService { * @param datasetId * @param groupId * @param role + * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ @@ -175,10 +205,14 @@ export class AuthorizationService { datasetId: string, groupId: string, role: RoleType, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}/group_role/${groupId}/${role}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -191,6 +225,7 @@ export class AuthorizationService { * @param datasetId * @param username * @param role + * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ @@ -198,10 +233,14 @@ export class AuthorizationService { datasetId: string, username: string, role: RoleType, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}/user_role/${username}/${role}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -213,16 +252,21 @@ export class AuthorizationService { * Remove any role the group has with a specific dataset. * @param datasetId * @param groupId + * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static removeDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdDelete( datasetId: string, groupId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/authorizations/datasets/${datasetId}/group_role/${groupId}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -234,16 +278,21 @@ export class AuthorizationService { * Remove any role the user has with a specific dataset. * @param datasetId * @param username + * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static removeDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameDelete( datasetId: string, username: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/authorizations/datasets/${datasetId}/user_role/${username}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -254,15 +303,20 @@ export class AuthorizationService { * Get Dataset Roles * Get a list of all users and groups that have assigned roles on this dataset. * @param datasetId + * @param adminMode * @returns DatasetRoles Successful Response * @throws ApiError */ public static getDatasetRolesApiV2AuthorizationsDatasetsDatasetIdRolesGet( datasetId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/datasets/${datasetId}/roles`, + path: `/api/v2/authorizations/datasets/${datasetId}/roles}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index 0f39baa7b..3ec983dce 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -18,6 +18,7 @@ export class DatasetsService { /** * Get Datasets + * @param adminMode * @param skip * @param limit * @param mine @@ -26,6 +27,7 @@ export class DatasetsService { * @throws ApiError */ public static getDatasetsApiV2DatasetsGet( + adminMode: boolean = false, skip?: number, limit: number = 10, mine: boolean = false, @@ -35,6 +37,7 @@ export class DatasetsService { method: 'GET', path: `/api/v2/datasets`, query: { + 'admin_mode': adminMode, 'skip': skip, 'limit': limit, 'mine': mine, @@ -69,15 +72,20 @@ export class DatasetsService { /** * Get Dataset * @param datasetId + * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static getDatasetApiV2DatasetsDatasetIdGet( datasetId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -88,16 +96,21 @@ export class DatasetsService { * Edit Dataset * @param datasetId * @param requestBody + * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static editDatasetApiV2DatasetsDatasetIdPut( datasetId: string, requestBody: DatasetBase, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/datasets/${datasetId}`, + query: { + 'admin_mode': adminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -109,15 +122,20 @@ export class DatasetsService { /** * Delete Dataset * @param datasetId + * @param adminMode * @returns any Successful Response * @throws ApiError */ public static deleteDatasetApiV2DatasetsDatasetIdDelete( datasetId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -128,16 +146,21 @@ export class DatasetsService { * Patch Dataset * @param datasetId * @param requestBody + * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static patchDatasetApiV2DatasetsDatasetIdPatch( datasetId: string, requestBody: DatasetPatch, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}`, + query: { + 'admin_mode': adminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -149,6 +172,7 @@ export class DatasetsService { /** * Get Dataset Files * @param datasetId + * @param adminMode * @param folderId * @param skip * @param limit @@ -157,6 +181,7 @@ export class DatasetsService { */ public static getDatasetFilesApiV2DatasetsDatasetIdFilesGet( datasetId: string, + adminMode: boolean = false, folderId?: string, skip?: number, limit: number = 10, @@ -165,6 +190,7 @@ export class DatasetsService { method: 'GET', path: `/api/v2/datasets/${datasetId}/files`, query: { + 'admin_mode': adminMode, 'folder_id': folderId, 'skip': skip, 'limit': limit, @@ -180,6 +206,7 @@ export class DatasetsService { * @param datasetId * @param formData * @param folderId + * @param adminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -187,12 +214,14 @@ export class DatasetsService { datasetId: string, formData: Body_save_file_api_v2_datasets__dataset_id__files_post, folderId?: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/files`, query: { 'folder_id': folderId, + 'admin_mode': adminMode, }, formData: formData, mediaType: 'multipart/form-data', @@ -205,6 +234,7 @@ export class DatasetsService { /** * Get Dataset Folders * @param datasetId + * @param adminMode * @param parentFolder * @param skip * @param limit @@ -213,6 +243,7 @@ export class DatasetsService { */ public static getDatasetFoldersApiV2DatasetsDatasetIdFoldersGet( datasetId: string, + adminMode: boolean = false, parentFolder?: string, skip?: number, limit: number = 10, @@ -221,6 +252,7 @@ export class DatasetsService { method: 'GET', path: `/api/v2/datasets/${datasetId}/folders`, query: { + 'admin_mode': adminMode, 'parent_folder': parentFolder, 'skip': skip, 'limit': limit, @@ -235,16 +267,21 @@ export class DatasetsService { * Add Folder * @param datasetId * @param requestBody + * @param adminMode * @returns FolderOut Successful Response * @throws ApiError */ public static addFolderApiV2DatasetsDatasetIdFoldersPost( datasetId: string, requestBody: FolderIn, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/folders`, + query: { + 'admin_mode': adminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -257,16 +294,21 @@ export class DatasetsService { * Delete Folder * @param datasetId * @param folderId + * @param adminMode * @returns any Successful Response * @throws ApiError */ public static deleteFolderApiV2DatasetsDatasetIdFoldersFolderIdDelete( datasetId: string, folderId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}/folders/${folderId}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -277,6 +319,7 @@ export class DatasetsService { * Save Files * @param datasetId * @param formData + * @param adminMode * @param folderId * @returns FileOut Successful Response * @throws ApiError @@ -284,12 +327,14 @@ export class DatasetsService { public static saveFilesApiV2DatasetsDatasetIdFilesMultiplePost( datasetId: string, formData: Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post, + adminMode: boolean = false, folderId?: string, ): CancelablePromise> { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/filesMultiple`, query: { + 'admin_mode': adminMode, 'folder_id': folderId, }, formData: formData, @@ -323,15 +368,20 @@ export class DatasetsService { /** * Download Dataset * @param datasetId + * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static downloadDatasetApiV2DatasetsDatasetIdDownloadGet( datasetId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/download`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -342,6 +392,7 @@ export class DatasetsService { * Get Dataset Extract * @param datasetId * @param extractorName + * @param adminMode * @param requestBody * @returns any Successful Response * @throws ApiError @@ -349,6 +400,7 @@ export class DatasetsService { public static getDatasetExtractApiV2DatasetsDatasetIdExtractPost( datasetId: string, extractorName: string, + adminMode: boolean = false, requestBody?: any, ): CancelablePromise { return __request({ @@ -356,6 +408,7 @@ export class DatasetsService { path: `/api/v2/datasets/${datasetId}/extract`, query: { 'extractorName': extractorName, + 'admin_mode': adminMode, }, body: requestBody, mediaType: 'application/json', @@ -368,15 +421,20 @@ export class DatasetsService { /** * Download Dataset Thumbnail * @param datasetId + * @param adminMode * @returns any Successful Response * @throws ApiError */ public static downloadDatasetThumbnailApiV2DatasetsDatasetIdThumbnailGet( datasetId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/thumbnail`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, @@ -387,16 +445,21 @@ export class DatasetsService { * Add Dataset Thumbnail * @param datasetId * @param thumbnailId + * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static addDatasetThumbnailApiV2DatasetsDatasetIdThumbnailThumbnailIdPatch( datasetId: string, thumbnailId: string, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}/thumbnail/${thumbnailId}`, + query: { + 'admin_mode': adminMode, + }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/FilesService.ts b/frontend/src/openapi/v2/services/FilesService.ts index 6a48df5f9..2566d480e 100644 --- a/frontend/src/openapi/v2/services/FilesService.ts +++ b/frontend/src/openapi/v2/services/FilesService.ts @@ -12,6 +12,7 @@ export class FilesService { /** * Download File * @param fileId + * @param adminMode * @param version * @param increment * @param datasetId @@ -20,6 +21,7 @@ export class FilesService { */ public static downloadFileApiV2FilesFileIdGet( fileId: string, + adminMode: boolean = false, version?: number, increment: boolean = true, datasetId?: string, @@ -28,6 +30,7 @@ export class FilesService { method: 'GET', path: `/api/v2/files/${fileId}`, query: { + 'admin_mode': adminMode, 'version': version, 'increment': increment, 'dataset_id': datasetId, @@ -42,6 +45,7 @@ export class FilesService { * Update File * @param fileId * @param formData + * @param adminMode * @param datasetId * @returns FileOut Successful Response * @throws ApiError @@ -49,12 +53,14 @@ export class FilesService { public static updateFileApiV2FilesFileIdPut( fileId: string, formData: Body_update_file_api_v2_files__file_id__put, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, formData: formData, @@ -68,18 +74,21 @@ export class FilesService { /** * Delete File * @param fileId + * @param adminMode * @param datasetId * @returns any Successful Response * @throws ApiError */ public static deleteFileApiV2FilesFileIdDelete( fileId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -91,6 +100,7 @@ export class FilesService { /** * Download File Url * @param fileId + * @param adminMode * @param version * @param expiresInSeconds * @param datasetId @@ -99,6 +109,7 @@ export class FilesService { */ public static downloadFileUrlApiV2FilesFileIdUrlGet( fileId: string, + adminMode: boolean = false, version?: number, expiresInSeconds: number = 3600, datasetId?: string, @@ -107,6 +118,7 @@ export class FilesService { method: 'GET', path: `/api/v2/files/${fileId}/url/`, query: { + 'admin_mode': adminMode, 'version': version, 'expires_in_seconds': expiresInSeconds, 'dataset_id': datasetId, @@ -120,18 +132,21 @@ export class FilesService { /** * Get File Summary * @param fileId + * @param adminMode * @param datasetId * @returns FileOut Successful Response * @throws ApiError */ public static getFileSummaryApiV2FilesFileIdSummaryGet( fileId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/summary`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -143,6 +158,7 @@ export class FilesService { /** * Get File Version Details * @param fileId + * @param adminMode * @param versionNum * @param datasetId * @returns FileOut Successful Response @@ -150,6 +166,7 @@ export class FilesService { */ public static getFileVersionDetailsApiV2FilesFileIdVersionDetailsGet( fileId: string, + adminMode: boolean = false, versionNum?: number, datasetId?: string, ): CancelablePromise { @@ -157,6 +174,7 @@ export class FilesService { method: 'GET', path: `/api/v2/files/${fileId}/version_details`, query: { + 'admin_mode': adminMode, 'version_num': versionNum, 'dataset_id': datasetId, }, @@ -169,6 +187,7 @@ export class FilesService { /** * Get File Versions * @param fileId + * @param adminMode * @param skip * @param limit * @param datasetId @@ -177,6 +196,7 @@ export class FilesService { */ public static getFileVersionsApiV2FilesFileIdVersionsGet( fileId: string, + adminMode: boolean = false, skip?: number, limit: number = 20, datasetId?: string, @@ -185,6 +205,7 @@ export class FilesService { method: 'GET', path: `/api/v2/files/${fileId}/versions`, query: { + 'admin_mode': adminMode, 'skip': skip, 'limit': limit, 'dataset_id': datasetId, @@ -199,6 +220,7 @@ export class FilesService { * Post File Extract * @param fileId * @param extractorName + * @param adminMode * @param datasetId * @param requestBody * @returns any Successful Response @@ -207,6 +229,7 @@ export class FilesService { public static postFileExtractApiV2FilesFileIdExtractPost( fileId: string, extractorName: string, + adminMode: boolean = false, datasetId?: string, requestBody?: any, ): CancelablePromise { @@ -215,6 +238,7 @@ export class FilesService { path: `/api/v2/files/${fileId}/extract`, query: { 'extractorName': extractorName, + 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -236,18 +260,21 @@ export class FilesService { * credentials: credentials of logged in user * rabbitmq_client: Rabbitmq Client * @param fileId + * @param adminMode * @param datasetId * @returns any Successful Response * @throws ApiError */ public static resubmitFileExtractionsApiV2FilesFileIdResubmitExtractPost( fileId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/resubmit_extract`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -259,18 +286,21 @@ export class FilesService { /** * Download File Thumbnail * @param fileId + * @param adminMode * @param datasetId * @returns any Successful Response * @throws ApiError */ public static downloadFileThumbnailApiV2FilesFileIdThumbnailGet( fileId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/thumbnail`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -283,6 +313,7 @@ export class FilesService { * Add File Thumbnail * @param fileId * @param thumbnailId + * @param adminMode * @param datasetId * @returns FileOut Successful Response * @throws ApiError @@ -290,12 +321,14 @@ export class FilesService { public static addFileThumbnailApiV2FilesFileIdThumbnailThumbnailIdPatch( fileId: string, thumbnailId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/thumbnail/${thumbnailId}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { diff --git a/frontend/src/openapi/v2/services/GroupsService.ts b/frontend/src/openapi/v2/services/GroupsService.ts index 6c20074ce..9d5e7d46b 100644 --- a/frontend/src/openapi/v2/services/GroupsService.ts +++ b/frontend/src/openapi/v2/services/GroupsService.ts @@ -93,18 +93,21 @@ export class GroupsService { /** * Get Group * @param groupId + * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static getGroupApiV2GroupsGroupIdGet( groupId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/groups/${groupId}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -117,6 +120,7 @@ export class GroupsService { * Edit Group * @param groupId * @param requestBody + * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError @@ -124,12 +128,14 @@ export class GroupsService { public static editGroupApiV2GroupsGroupIdPut( groupId: string, requestBody: GroupBase, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/groups/${groupId}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -143,18 +149,21 @@ export class GroupsService { /** * Delete Group * @param groupId + * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static deleteGroupApiV2GroupsGroupIdDelete( groupId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/groups/${groupId}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -168,6 +177,7 @@ export class GroupsService { * Add a new user to a group. * @param groupId * @param username + * @param adminMode * @param role * @param datasetId * @returns GroupOut Successful Response @@ -176,6 +186,7 @@ export class GroupsService { public static addMemberApiV2GroupsGroupIdAddUsernamePost( groupId: string, username: string, + adminMode: boolean = false, role?: string, datasetId?: string, ): CancelablePromise { @@ -183,6 +194,7 @@ export class GroupsService { method: 'POST', path: `/api/v2/groups/${groupId}/add/${username}`, query: { + 'admin_mode': adminMode, 'role': role, 'dataset_id': datasetId, }, @@ -197,6 +209,7 @@ export class GroupsService { * Remove a user from a group. * @param groupId * @param username + * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError @@ -204,12 +217,14 @@ export class GroupsService { public static removeMemberApiV2GroupsGroupIdRemoveUsernamePost( groupId: string, username: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/groups/${groupId}/remove/${username}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -224,6 +239,7 @@ export class GroupsService { * @param groupId * @param username * @param role + * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError @@ -232,6 +248,7 @@ export class GroupsService { groupId: string, username: string, role: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ @@ -239,6 +256,7 @@ export class GroupsService { path: `/api/v2/groups/${groupId}/update/${username}`, query: { 'role': role, + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { diff --git a/frontend/src/openapi/v2/services/LoginService.ts b/frontend/src/openapi/v2/services/LoginService.ts index 7d982cc2d..2a1bdd772 100644 --- a/frontend/src/openapi/v2/services/LoginService.ts +++ b/frontend/src/openapi/v2/services/LoginService.ts @@ -49,6 +49,27 @@ export class LoginService { }); } + /** + * Get Admin + * @param datasetId + * @returns any Successful Response + * @throws ApiError + */ + public static getAdminApiV2AdminGet( + datasetId?: string, + ): CancelablePromise { + return __request({ + method: 'GET', + path: `/api/v2/admin`, + query: { + 'dataset_id': datasetId, + }, + errors: { + 422: `Validation Error`, + }, + }); + } + /** * Set Admin * @param useremail diff --git a/frontend/src/openapi/v2/services/MetadataService.ts b/frontend/src/openapi/v2/services/MetadataService.ts index 3090fc9c6..145a3b783 100644 --- a/frontend/src/openapi/v2/services/MetadataService.ts +++ b/frontend/src/openapi/v2/services/MetadataService.ts @@ -134,18 +134,21 @@ export class MetadataService { * Delete Metadata * Delete metadata by specific ID. * @param metadataId + * @param adminMode * @param datasetId * @returns any Successful Response * @throws ApiError */ public static deleteMetadataApiV2MetadataMetadataIdDelete( metadataId: string, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/metadata/${metadataId}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -163,6 +166,7 @@ export class MetadataService { * Metadata document that was updated * @param metadataId * @param requestBody + * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -170,12 +174,14 @@ export class MetadataService { public static updateMetadataApiV2MetadataMetadataIdPatch( metadataId: string, requestBody: MetadataPatch, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/metadata/${metadataId}`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -190,6 +196,7 @@ export class MetadataService { * Get File Metadata * Get file metadata. * @param fileId + * @param adminMode * @param version * @param allVersions * @param datasetId @@ -199,6 +206,7 @@ export class MetadataService { */ public static getFileMetadataApiV2FilesFileIdMetadataGet( fileId: string, + adminMode: boolean = false, version?: number, allVersions: boolean = false, datasetId?: string, @@ -208,6 +216,7 @@ export class MetadataService { method: 'GET', path: `/api/v2/files/${fileId}/metadata`, query: { + 'admin_mode': adminMode, 'version': version, 'all_versions': allVersions, 'dataset_id': datasetId, @@ -228,6 +237,7 @@ export class MetadataService { * Metadata document that was updated * @param fileId * @param requestBody + * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -235,12 +245,14 @@ export class MetadataService { public static replaceFileMetadataApiV2FilesFileIdMetadataPut( fileId: string, requestBody: MetadataPatch, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}/metadata`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -260,6 +272,7 @@ export class MetadataService { * Metadata document that was added to database * @param fileId * @param requestBody + * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -267,12 +280,14 @@ export class MetadataService { public static addFileMetadataApiV2FilesFileIdMetadataPost( fileId: string, requestBody: MetadataIn, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/metadata`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -287,6 +302,7 @@ export class MetadataService { * Delete File Metadata * @param fileId * @param requestBody + * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -294,12 +310,14 @@ export class MetadataService { public static deleteFileMetadataApiV2FilesFileIdMetadataDelete( fileId: string, requestBody: MetadataDelete, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}/metadata`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -319,6 +337,7 @@ export class MetadataService { * Metadata document that was updated * @param fileId * @param requestBody + * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -326,12 +345,14 @@ export class MetadataService { public static updateFileMetadataApiV2FilesFileIdMetadataPatch( fileId: string, requestBody: MetadataPatch, + adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/metadata`, query: { + 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -345,17 +366,22 @@ export class MetadataService { /** * Get Dataset Metadata * @param datasetId + * @param adminMode * @param formData * @returns MetadataOut Successful Response * @throws ApiError */ public static getDatasetMetadataApiV2DatasetsDatasetIdMetadataGet( datasetId: string, + adminMode: boolean = false, formData?: Body_get_dataset_metadata_api_v2_datasets__dataset_id__metadata_get, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/metadata`, + query: { + 'admin_mode': adminMode, + }, formData: formData, mediaType: 'application/x-www-form-urlencoded', errors: { @@ -373,16 +399,21 @@ export class MetadataService { * Metadata document that was updated * @param datasetId * @param requestBody + * @param adminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static replaceDatasetMetadataApiV2DatasetsDatasetIdMetadataPut( datasetId: string, requestBody: MetadataIn, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/datasets/${datasetId}/metadata`, + query: { + 'admin_mode': adminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -400,16 +431,21 @@ export class MetadataService { * Metadata document that was added to database * @param datasetId * @param requestBody + * @param adminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static addDatasetMetadataApiV2DatasetsDatasetIdMetadataPost( datasetId: string, requestBody: MetadataIn, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/metadata`, + query: { + 'admin_mode': adminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -422,16 +458,21 @@ export class MetadataService { * Delete Dataset Metadata * @param datasetId * @param requestBody + * @param adminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static deleteDatasetMetadataApiV2DatasetsDatasetIdMetadataDelete( datasetId: string, requestBody: MetadataDelete, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}/metadata`, + query: { + 'admin_mode': adminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -449,16 +490,21 @@ export class MetadataService { * Metadata document that was updated * @param datasetId * @param requestBody + * @param adminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static updateDatasetMetadataApiV2DatasetsDatasetIdMetadataPatch( datasetId: string, requestBody: MetadataPatch, + adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}/metadata`, + query: { + 'admin_mode': adminMode, + }, body: requestBody, mediaType: 'application/json', errors: { diff --git a/frontend/src/reducers/user.ts b/frontend/src/reducers/user.ts index 2a83e5984..f4ce76271 100644 --- a/frontend/src/reducers/user.ts +++ b/frontend/src/reducers/user.ts @@ -1,4 +1,5 @@ import { + ADMIN, ADMIN_MODE, DELETE_API_KEY, GENERATE_API_KEY, LIST_API_KEYS, @@ -15,6 +16,8 @@ import { DataAction } from "../types/action"; const defaultState: UserState = { Authorization: null, loginError: false, + admin: false, + adminMode: false, registerSucceeded: false, errorMsg: "", hashedKey: "", @@ -24,6 +27,15 @@ const defaultState: UserState = { const user = (state = defaultState, action: DataAction) => { switch (action.type) { + case ADMIN_MODE: + console.log("the action is called: ", action.adminMode) +; return Object.assign({}, state, { + adminMode: action.adminMode + }); + case ADMIN: + return Object.assign({}, state, { + admin: action.admin + }); case SET_USER: return Object.assign({}, state, { Authorization: action.Authorization, diff --git a/frontend/src/routes.tsx b/frontend/src/routes.tsx index 2cdd433ef..b91b83485 100644 --- a/frontend/src/routes.tsx +++ b/frontend/src/routes.tsx @@ -43,10 +43,11 @@ const PrivateRoute = (props): JSX.Element => { const loggedOut = useSelector((state: RootState) => state.error.loggedOut); const reason = useSelector((state: RootState) => state.error.reason); + const adminMode = useSelector((state: RootState) => state.user.adminMode); const dismissLogout = () => dispatch(resetLogout()); const listDatasetRole = (datasetId: string | undefined) => - dispatch(fetchDatasetRole(datasetId)); + dispatch(fetchDatasetRole(datasetId, adminMode)); const listFileRole = (fileId: string | undefined) => dispatch(fetchFileRole(fileId)); const { datasetId } = useParams<{ datasetId?: string }>(); @@ -122,6 +123,14 @@ export const AppRoutes = (): JSX.Element => { } /> + + + + } + /> Date: Fri, 17 Nov 2023 11:57:56 -0600 Subject: [PATCH 15/43] removing redundant things --- frontend/src/components/Explore.tsx | 7 +++---- frontend/src/components/Layout.tsx | 9 +++++---- frontend/src/components/datasets/Dataset.tsx | 2 +- frontend/src/reducers/user.ts | 3 +-- frontend/src/routes.tsx | 8 -------- 5 files changed, 10 insertions(+), 19 deletions(-) diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index 7a011fefc..591386922 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -39,7 +39,7 @@ export const Explore = (): JSX.Element => { const [limit] = useState(20); const [skip, setSkip] = useState(); // TODO add switch to turn on and off "mine" dataset - const [mine] = useState(true); + const [mine] = useState(false); const [prevDisabled, setPrevDisabled] = useState(true); const [nextDisabled, setNextDisabled] = useState(false); const [selectedTabIndex, setSelectedTabIndex] = useState(0); @@ -48,8 +48,7 @@ export const Explore = (): JSX.Element => { // component did mount useEffect(() => { - console.log("Rendered: ", adminMode); - listDatasets(0, limit, mine); + listDatasets(0, limit, mine, adminMode); }, []); // fetch thumbnails from each individual dataset/id calls @@ -83,7 +82,7 @@ export const Explore = (): JSX.Element => { }; useEffect(() => { if (skip !== null && skip !== undefined) { - listDatasets(skip, limit, mine); + listDatasets(skip, limit, mine, adminMode); if (skip === 0) setPrevDisabled(true); else setPrevDisabled(false); } diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index df098a903..19642f199 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -21,7 +21,7 @@ import { Link, Menu, MenuItem, MenuList } from "@mui/material"; import { Link as RouterLink, useLocation } from "react-router-dom"; import {useDispatch, useSelector} from "react-redux"; import { RootState } from "../types/data"; -import {AddBox, Explore, SupervisorAccount} from "@material-ui/icons"; +import {AddBox, Explore} from "@material-ui/icons"; import HistoryIcon from "@mui/icons-material/History"; import GroupIcon from "@mui/icons-material/Group"; import Gravatar from "react-gravatar"; @@ -32,6 +32,7 @@ import VpnKeyIcon from "@mui/icons-material/VpnKey"; import LogoutIcon from "@mui/icons-material/Logout"; import { EmbeddedSearch } from "./search/EmbeddedSearch"; import {setAdmin, toggleAdminMode} from "../actions/user"; +import {AdminPanelSettings} from "@mui/icons-material"; const drawerWidth = 240; @@ -238,7 +239,7 @@ export default function PersistentDrawerLeft(props) { - + Admin Mode : <>} @@ -246,9 +247,9 @@ export default function PersistentDrawerLeft(props) { - + - Normal Mode + Drop Admin Mode : <>} diff --git a/frontend/src/components/datasets/Dataset.tsx b/frontend/src/components/datasets/Dataset.tsx index fe47b8b77..92d30e36c 100644 --- a/frontend/src/components/datasets/Dataset.tsx +++ b/frontend/src/components/datasets/Dataset.tsx @@ -123,7 +123,7 @@ export const Dataset = (): JSX.Element => { listFoldersInDataset(datasetId, folderId, skip, limit); listDatasetAbout(datasetId); getFolderPath(folderId); - }, [searchParams]); + }, [searchParams, adminMode]); useEffect(() => { getMetadatDefinitions(null, 0, 100); diff --git a/frontend/src/reducers/user.ts b/frontend/src/reducers/user.ts index f4ce76271..5af98bb04 100644 --- a/frontend/src/reducers/user.ts +++ b/frontend/src/reducers/user.ts @@ -28,8 +28,7 @@ const defaultState: UserState = { const user = (state = defaultState, action: DataAction) => { switch (action.type) { case ADMIN_MODE: - console.log("the action is called: ", action.adminMode) -; return Object.assign({}, state, { + return Object.assign({}, state, { adminMode: action.adminMode }); case ADMIN: diff --git a/frontend/src/routes.tsx b/frontend/src/routes.tsx index b91b83485..6b1638dec 100644 --- a/frontend/src/routes.tsx +++ b/frontend/src/routes.tsx @@ -123,14 +123,6 @@ export const AppRoutes = (): JSX.Element => { } /> - - - - } - /> Date: Wed, 22 Nov 2023 13:41:58 -0600 Subject: [PATCH 16/43] small fix. the name of the method for file was off so there was an error getting file role --- frontend/src/actions/authorization.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/actions/authorization.js b/frontend/src/actions/authorization.js index 8ede08f3d..59f2ae8f7 100644 --- a/frontend/src/actions/authorization.js +++ b/frontend/src/actions/authorization.js @@ -32,7 +32,7 @@ export const RECEIVE_FILE_ROLE = "RECEIVE_FILE_ROLE"; export function fetchFileRole(fileId, adminMode) { return (dispatch) => { - return V2.AuthorizationService.getFileRoleApiV2AuthorizationsFilesFileIdRoleAdminModeAdminModeGet( + return V2.AuthorizationService.getFileRoleApiV2AuthorizationsFilesFileIdRoleGet( fileId, adminMode ) From a2b9004add56e31e4afda87f05b48433445fab58 Mon Sep 17 00:00:00 2001 From: Dipannita Dey Date: Wed, 22 Nov 2023 13:55:33 -0600 Subject: [PATCH 17/43] fix to visualization rendering --- frontend/src/actions/authorization.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/actions/authorization.js b/frontend/src/actions/authorization.js index 59f2ae8f7..453333c09 100644 --- a/frontend/src/actions/authorization.js +++ b/frontend/src/actions/authorization.js @@ -56,7 +56,7 @@ export const RECEIVE_GROUP_ROLE = "RECEIVE_GROUP_ROLE"; export function fetchGroupRole(groupId, adminMode) { return (dispatch) => { - return V2.AuthorizationService.getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleAdminModeAdminModeGet( + return V2.AuthorizationService.getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleGet( groupId, adminMode ) From 444bc1ba647a52674c39efe742b73d7306f9edf6 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 11 Dec 2023 14:21:11 -0600 Subject: [PATCH 18/43] backend change to depends --- backend/app/deps/authorization_deps.py | 10 +- backend/app/keycloak_auth.py | 74 +++++----- backend/app/routers/authorization.py | 26 ++-- backend/app/routers/datasets.py | 32 ++--- backend/app/routers/elasticsearch.py | 6 +- backend/app/routers/files.py | 176 +++++++++++------------ backend/app/routers/groups.py | 72 +++++----- backend/app/routers/metadata.py | 54 +++---- backend/app/routers/metadata_datasets.py | 90 ++++++------ backend/app/routers/metadata_files.py | 138 +++++++++--------- 10 files changed, 345 insertions(+), 333 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index b196d530c..5924ed4ae 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -2,7 +2,7 @@ from beanie.operators import Or from fastapi import Depends, HTTPException -from app.keycloak_auth import get_current_username +from app.keycloak_auth import get_current_username, get_admin_mode from app.models.authorization import RoleType, AuthorizationDB from app.models.datasets import DatasetDB, DatasetStatus from app.models.files import FileDB @@ -146,7 +146,7 @@ def __init__(self, role: str): async def __call__( self, dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), current_user: str = Depends(get_current_username), admin: bool = Depends(get_admin), ): @@ -203,7 +203,7 @@ def __init__(self, role: str): async def __call__( self, file_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), current_user: str = Depends(get_current_username), admin: bool = Depends(get_admin), ): @@ -241,7 +241,7 @@ def __init__(self, role: str): async def __call__( self, metadata_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), current_user: str = Depends(get_current_username), admin: bool = Depends(get_admin), ): @@ -308,7 +308,7 @@ def __init__(self, role: str): async def __call__( self, group_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), current_user: str = Depends(get_current_username), admin: bool = Depends(get_admin), ): diff --git a/backend/app/keycloak_auth.py b/backend/app/keycloak_auth.py index a19645de7..3e252deca 100644 --- a/backend/app/keycloak_auth.py +++ b/backend/app/keycloak_auth.py @@ -49,13 +49,16 @@ async def get_idp_public_key(): # Passing in API key via header. `auto_error=False` makes it so `get_current_user()` runs even if it doesn't find it api_key_header = APIKeyHeader(name="X-API-KEY", auto_error=False) +# Passing in admin mode via header. +admin_mode_header = APIKeyHeader(name="X-ADMIN-MODE", auto_error=False) + # Passing in JWT token via cookie. `auto_error=False` makes it so `get_current_user()` runs even if it doesn't find it. jwt_header = APIKeyCookie(name="Authorization", auto_error=False) async def get_token( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), ) -> Json: """Decode token. Use to secure endpoints.""" if token: @@ -91,17 +94,17 @@ async def get_token( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: return {"preferred_username": payload["user"]} elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: current_time = datetime.utcnow() if key.expires is not None and current_time >= key.expires: @@ -140,9 +143,9 @@ async def get_user(identity: Json = Depends(get_token)): async def get_current_user( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), - token_cookie: str = Security(jwt_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), + token_cookie: str = Security(jwt_header), ) -> UserOut: """Retrieve the user object from Mongo by first getting user id from JWT and then querying Mongo. Potentially expensive. Use `get_current_username` if all you need is user name. @@ -178,18 +181,18 @@ async def get_current_user( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - key := await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + key := await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: user = await UserDB.find_one(UserDB.email == key.user) return UserOut(**user.dict()) elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: current_time = datetime.utcnow() @@ -224,10 +227,17 @@ async def get_current_user( ) +async def get_admin_mode( + admin_mode: bool = Security(admin_mode_header), +) -> bool: + """Get Admin mode from Header.""" + return admin_mode + + async def get_current_username( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), - token_cookie: str = Security(jwt_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), + token_cookie: str = Security(jwt_header), ) -> str: """Retrieve the user id from the JWT token. Does not query MongoDB.""" if token: @@ -260,18 +270,18 @@ async def get_current_username( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - key := await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + key := await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: # Key is coming from a listener job return key.user elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: # Key is coming from a user request current_time = datetime.utcnow() diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 23062c263..3bb5f7329 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -11,7 +11,7 @@ get_role_by_metadata, get_role_by_group, ) -from app.keycloak_auth import get_current_username, get_user +from app.keycloak_auth import get_current_username, get_user, get_admin_mode from app.models.authorization import ( AuthorizationBase, AuthorizationMetadata, @@ -40,7 +40,7 @@ async def save_authorization( dataset_id: str, authorization_in: AuthorizationBase, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_username), allow: bool = Depends(Authorization("editor")), ): @@ -70,7 +70,7 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationOut) async def get_dataset_role( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), current_user=Depends(get_current_username), admin=Depends(get_admin), ): @@ -113,7 +113,7 @@ async def get_dataset_role( @router.get("/datasets/{dataset_id}/role/viewer}") async def get_dataset_role_viewer( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("viewer")), ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. @@ -124,7 +124,7 @@ async def get_dataset_role_viewer( @router.get("/datasets/{dataset_id}/role/owner}") async def get_dataset_role_owner( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("owner")), ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. @@ -135,7 +135,7 @@ async def get_dataset_role_owner( @router.get("/files/{file_id}/role}", response_model=RoleType) async def get_file_role( file_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_file), admin=Depends(get_admin), @@ -150,7 +150,7 @@ async def get_file_role( @router.get("/metadata/{metadata_id}/role}", response_model=AuthorizationMetadata) async def get_metadata_role( metadata_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_metadata), admin=Depends(get_admin), @@ -165,7 +165,7 @@ async def get_metadata_role( @router.get("/groups/{group_id}/role}", response_model=RoleType) async def get_group_role( group_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_group), admin=Depends(get_admin), @@ -185,7 +185,7 @@ async def set_dataset_group_role( dataset_id: PydanticObjectId, group_id: PydanticObjectId, role: RoleType, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -239,7 +239,7 @@ async def set_dataset_user_role( dataset_id: str, username: str, role: RoleType, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -298,7 +298,7 @@ async def set_dataset_user_role( async def remove_dataset_group_role( dataset_id: PydanticObjectId, group_id: PydanticObjectId, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -334,7 +334,7 @@ async def remove_dataset_group_role( async def remove_dataset_user_role( dataset_id: str, username: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -363,7 +363,7 @@ async def remove_dataset_user_role( @router.get("/datasets/{dataset_id}/roles}", response_model=DatasetRoles) async def get_dataset_roles( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index eb44da57a..e3c883b9a 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -35,7 +35,7 @@ from app.keycloak_auth import ( get_token, get_user, - get_current_user, + get_current_user, get_admin_mode, ) from app.models.authorization import AuthorizationDB, RoleType from app.models.datasets import ( @@ -207,7 +207,7 @@ async def save_dataset( @router.get("", response_model=List[DatasetOut]) async def get_datasets( - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), user_id=Depends(get_user), skip: int = 0, limit: int = 10, @@ -246,7 +246,7 @@ async def get_datasets( async def get_dataset( dataset_id: str, authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -257,7 +257,7 @@ async def get_dataset( @router.get("/{dataset_id}/files", response_model=List[FileOut]) async def get_dataset_files( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), folder_id: Optional[str] = None, authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), user_id=Depends(get_user), @@ -287,7 +287,7 @@ async def get_dataset_files( async def edit_dataset( dataset_id: str, dataset_info: DatasetBase, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), es=Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), @@ -308,7 +308,7 @@ async def edit_dataset( async def patch_dataset( dataset_id: str, dataset_info: DatasetPatch, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), @@ -332,7 +332,7 @@ async def patch_dataset( @router.delete("/{dataset_id}") async def delete_dataset( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), fs: Minio = Depends(dependencies.get_fs), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), @@ -363,7 +363,7 @@ async def delete_dataset( async def add_folder( dataset_id: str, folder_in: FolderIn, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), allow: bool = Depends(Authorization("uploader")), ): @@ -385,7 +385,7 @@ async def add_folder( @router.get("/{dataset_id}/folders", response_model=List[FolderOut]) async def get_dataset_folders( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), parent_folder: Optional[str] = None, user_id=Depends(get_user), authenticated: bool = Depends(CheckStatus("authenticated")), @@ -419,7 +419,7 @@ async def get_dataset_folders( async def delete_folder( dataset_id: str, folder_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), fs: Minio = Depends(dependencies.get_fs), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(Authorization("editor")), @@ -466,7 +466,7 @@ async def save_file( user=Depends(get_current_user), fs: Minio = Depends(dependencies.get_fs), file: UploadFile = File(...), - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), es=Depends(dependencies.get_elasticsearchclient), rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), allow: bool = Depends(Authorization("uploader")), @@ -504,7 +504,7 @@ async def save_file( async def save_files( dataset_id: str, files: List[UploadFile], - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), folder_id: Optional[str] = None, user=Depends(get_current_user), fs: Minio = Depends(dependencies.get_fs), @@ -680,7 +680,7 @@ async def create_dataset_from_zip( @router.get("/{dataset_id}/download", response_model=DatasetOut) async def download_dataset( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), fs: Minio = Depends(dependencies.get_fs), allow: bool = Depends(Authorization("viewer")), @@ -841,7 +841,7 @@ async def get_dataset_extract( dataset_id: str, extractorName: str, request: Request, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), # parameters don't have a fixed model shape parameters: dict = None, user=Depends(get_current_user), @@ -867,7 +867,7 @@ async def get_dataset_extract( @router.get("/{dataset_id}/thumbnail") async def download_dataset_thumbnail( dataset_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), fs: Minio = Depends(dependencies.get_fs), allow: bool = Depends(Authorization("viewer")), ): @@ -896,7 +896,7 @@ async def download_dataset_thumbnail( async def add_dataset_thumbnail( dataset_id: str, thumbnail_id: str, - admin_mode: bool = False, + admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: diff --git a/backend/app/routers/elasticsearch.py b/backend/app/routers/elasticsearch.py index f9db94190..1b38b7a10 100644 --- a/backend/app/routers/elasticsearch.py +++ b/backend/app/routers/elasticsearch.py @@ -4,7 +4,7 @@ from fastapi.routing import APIRouter, Request from app.config import settings -from app.keycloak_auth import get_current_username +from app.keycloak_auth import get_current_username, get_admin_mode from app.routers.authentication import get_admin from app.search.connect import connect_elasticsearch, search_index @@ -12,7 +12,9 @@ def _add_permissions_clause( - query, username: str, admin_mode: bool = False, admin: bool = Depends(get_admin) + query, username: str, + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin) ): """Append filter to Elasticsearch object that restricts permissions based on the requesting user.""" # TODO: Add public filter once added diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 3d7791565..5f3b11053 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -24,7 +24,7 @@ from app import dependencies from app.config import settings from app.deps.authorization_deps import FileAuthorization -from app.keycloak_auth import get_current_user, get_token +from app.keycloak_auth import get_current_user, get_token, get_admin_mode from app.models.files import FileOut, FileVersion, FileDB, FileVersionDB, StorageType from app.models.metadata import MetadataDB from app.models.thumbnails import ThumbnailDB @@ -44,10 +44,10 @@ async def _resubmit_file_extractors( - file: FileOut, - rabbitmq_client: BlockingChannel, - user: UserOut, - credentials: HTTPAuthorizationCredentials = Security(security), + file: FileOut, + rabbitmq_client: BlockingChannel, + user: UserOut, + credentials: HTTPAuthorizationCredentials = Security(security), ): """This helper method will check metadata. We get the extractors run from metadata from extractors. Then they are resubmitted. At present parameters are not stored. This will change once Jobs are @@ -61,8 +61,8 @@ async def _resubmit_file_extractors( """ resubmitted_jobs = [] async for job in EventListenerJobDB.find( - EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), - EventListenerJobDB.resource_ref.version == file.version_num - 1, + EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), + EventListenerJobDB.resource_ref.version == file.version_num - 1, ): resubmitted_job = {"listener_id": job.listener_id, "parameters": job.parameters} try: @@ -86,13 +86,13 @@ async def _resubmit_file_extractors( # TODO: Move this to MongoDB middle layer async def add_file_entry( - new_file: FileDB, - user: UserOut, - fs: Minio, - es: Elasticsearch, - rabbitmq_client: BlockingChannel, - file: Optional[io.BytesIO] = None, - content_type: Optional[str] = None, + new_file: FileDB, + user: UserOut, + fs: Minio, + es: Elasticsearch, + rabbitmq_client: BlockingChannel, + file: Optional[io.BytesIO] = None, + content_type: Optional[str] = None, ): """Insert FileDB object into MongoDB (makes Clowder ID), then Minio (makes version ID), then update MongoDB with the version ID from Minio. @@ -151,11 +151,11 @@ async def add_file_entry( async def add_local_file_entry( - new_file: FileDB, - user: UserOut, - es: Elasticsearch, - rabbitmq_client: BlockingChannel, - content_type: Optional[str] = None, + new_file: FileDB, + user: UserOut, + es: Elasticsearch, + rabbitmq_client: BlockingChannel, + content_type: Optional[str] = None, ): """Insert FileDB object into MongoDB (makes Clowder ID). Bytes are not stored in DB and versioning not supported for local files.""" @@ -181,7 +181,7 @@ async def add_local_file_entry( # TODO: Move this to MongoDB middle layer async def remove_file_entry( - file_id: Union[str, ObjectId], fs: Minio, es: Elasticsearch + file_id: Union[str, ObjectId], fs: Minio, es: Elasticsearch ): """Remove FileDB object into MongoDB, Minio, and associated metadata and version information.""" # TODO: Deleting individual versions will require updating version_id in mongo, or deleting entire document @@ -208,16 +208,16 @@ async def remove_local_file_entry(file_id: Union[str, ObjectId], es: Elasticsear @router.put("/{file_id}", response_model=FileOut) async def update_file( - file_id: str, - admin_mode: bool = False, - token=Depends(get_token), - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("uploader")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + token=Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("uploader")), ): # Check all connection and abort if any one of them is not available if fs is None or es is None: @@ -226,8 +226,8 @@ async def update_file( if (updated_file := await FileDB.get(PydanticObjectId(file_id))) is not None: if ( - file.filename != updated_file.name - or file.content_type != updated_file.content_type.content_type + file.filename != updated_file.name + or file.content_type != updated_file.content_type.content_type ): raise HTTPException( status_code=400, @@ -299,12 +299,12 @@ async def update_file( @router.get("/{file_id}") async def download_file( - file_id: str, - admin_mode: bool = False, - version: Optional[int] = None, - increment: Optional[bool] = True, - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + increment: Optional[bool] = True, + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -334,7 +334,7 @@ async def download_file( content.stream(settings.MINIO_UPLOAD_CHUNK_SIZE) ) response.headers["Content-Disposition"] = ( - "attachment; filename=%s" % file.name + "attachment; filename=%s" % file.name ) elif file.storage_type == StorageType.LOCAL: @@ -361,12 +361,12 @@ async def download_file( @router.get("/{file_id}/url/") async def download_file_url( - file_id: str, - admin_mode: bool = False, - version: Optional[int] = None, - expires_in_seconds: Optional[int] = 3600, - external_fs: Minio = Depends(dependencies.get_external_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + expires_in_seconds: Optional[int] = 3600, + external_fs: Minio = Depends(dependencies.get_external_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -414,11 +414,11 @@ async def download_file_url( @router.delete("/{file_id}") async def delete_file( - file_id: str, - admin_mode: bool = False, - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: if file.storage_type == StorageType.LOCAL: @@ -432,9 +432,9 @@ async def delete_file( @router.get("/{file_id}/summary", response_model=FileOut) async def get_file_summary( - file_id: str, - admin_mode: bool = False, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: # TODO: Incrementing too often (3x per page view) @@ -447,10 +447,10 @@ async def get_file_summary( @router.get("/{file_id}/version_details", response_model=FileOut) async def get_file_version_details( - file_id: str, - admin_mode: bool = False, - version_num: Optional[int] = 0, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version_num: Optional[int] = 0, + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: # TODO: Incrementing too often (3x per page view) @@ -470,17 +470,17 @@ async def get_file_version_details( @router.get("/{file_id}/versions", response_model=List[FileVersion]) async def get_file_versions( - file_id: str, - admin_mode: bool = False, - skip: int = 0, - limit: int = 20, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + skip: int = 0, + limit: int = 20, + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: mongo_versions = [] if file.storage_type == StorageType.MINIO: async for ver in FileVersionDB.find( - FileVersionDB.file_id == ObjectId(file_id) + FileVersionDB.file_id == ObjectId(file_id) ).sort(-FileVersionDB.created).skip(skip).limit(limit): mongo_versions.append(FileVersion(**ver.dict())) return mongo_versions @@ -491,15 +491,15 @@ async def get_file_versions( # submits file to extractor @router.post("/{file_id}/extract") async def post_file_extract( - file_id: str, - extractorName: str, - admin_mode: bool = False, - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("uploader")), + file_id: str, + extractorName: str, + admin_mode: bool = Depends(get_admin_mode), + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -524,12 +524,12 @@ async def post_file_extract( @router.post("/{file_id}/resubmit_extract") async def resubmit_file_extractions( - file_id: str, - admin_mode: bool = False, - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("editor")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("editor")), ): """This route will check metadata. We get the extractors run from metadata from extractors. Then they are resubmitted. At present parameters are not stored. This will change once Jobs are @@ -551,10 +551,10 @@ async def resubmit_file_extractions( @router.get("/{file_id}/thumbnail") async def download_file_thumbnail( - file_id: str, - admin_mode: bool = False, - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -576,15 +576,15 @@ async def download_file_thumbnail( @router.patch("/{file_id}/thumbnail/{thumbnail_id}", response_model=FileOut) async def add_file_thumbnail( - file_id: str, - thumbnail_id: str, - admin_mode: bool = False, - allow: bool = Depends(FileAuthorization("editor")), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + file_id: str, + thumbnail_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(FileAuthorization("editor")), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? file.thumbnail_id = thumbnail_id diff --git a/backend/app/routers/groups.py b/backend/app/routers/groups.py index 9819ebe63..0032e915b 100644 --- a/backend/app/routers/groups.py +++ b/backend/app/routers/groups.py @@ -7,7 +7,7 @@ from fastapi import HTTPException, Depends, APIRouter from app.deps.authorization_deps import AuthorizationDB, GroupAuthorization -from app.keycloak_auth import get_current_user, get_user +from app.keycloak_auth import get_current_user, get_user, get_admin_mode from app.models.authorization import RoleType from app.models.groups import GroupOut, GroupIn, GroupDB, GroupBase, Member from app.models.users import UserOut, UserDB @@ -17,8 +17,8 @@ @router.post("", response_model=GroupOut) async def save_group( - group_in: GroupIn, - user=Depends(get_current_user), + group_in: GroupIn, + user=Depends(get_current_user), ): group_db = GroupDB(**group_in.dict(), creator=user.email) user_member = Member(user=user, editor=True) @@ -30,9 +30,9 @@ async def save_group( @router.get("", response_model=List[GroupOut]) async def get_groups( - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, ): """Get a list of all Groups in the db the user is a member/owner of. @@ -56,10 +56,10 @@ async def get_groups( @router.get("/search/{search_term}", response_model=List[GroupOut]) async def search_group( - search_term: str, - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + search_term: str, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, ): """Search all groups in the db based on text. @@ -85,9 +85,9 @@ async def search_group( @router.get("/{group_id}", response_model=GroupOut) async def get_group( - group_id: str, - admin_mode: bool = False, - allow: bool = Depends(GroupAuthorization("viewer")), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("viewer")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: return group.dict() @@ -96,11 +96,11 @@ async def get_group( @router.put("/{group_id}", response_model=GroupOut) async def edit_group( - group_id: str, - group_info: GroupBase, - admin_mode: bool = False, - user_id=Depends(get_user), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + group_info: GroupBase, + admin_mode: bool = Depends(get_admin_mode), + user_id=Depends(get_user), + allow: bool = Depends(GroupAuthorization("editor")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: group_dict = dict(group_info) if group_info is not None else {} @@ -123,7 +123,7 @@ async def edit_group( if original_user not in groups_users: # remove them from auth async for auth in AuthorizationDB.find( - {"group_ids": ObjectId(group_id)} + {"group_ids": ObjectId(group_id)} ): auth.user_ids.remove(original_user.user.email) await auth.replace() @@ -167,9 +167,9 @@ async def edit_group( @router.delete("/{group_id}", response_model=GroupOut) async def delete_group( - group_id: str, - admin_mode: bool = False, - allow: bool = Depends(GroupAuthorization("owner")), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("owner")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: await group.delete() @@ -180,11 +180,11 @@ async def delete_group( @router.post("/{group_id}/add/{username}", response_model=GroupOut) async def add_member( - group_id: str, - username: str, - admin_mode: bool = False, - role: Optional[str] = None, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + role: Optional[str] = None, + allow: bool = Depends(GroupAuthorization("editor")), ): """Add a new user to a group.""" if (user := await UserDB.find_one(UserDB.email == username)) is not None: @@ -218,10 +218,10 @@ async def add_member( @router.post("/{group_id}/remove/{username}", response_model=GroupOut) async def remove_member( - group_id: str, - username: str, - admin_mode: bool = False, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("editor")), ): """Remove a user from a group.""" @@ -251,11 +251,11 @@ async def remove_member( @router.put("/{group_id}/update/{username}", response_model=GroupOut) async def update_member( - group_id: str, - username: str, - role: str, - admin_mode: bool = False, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + role: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("editor")), ): """Update user role.""" if (user := await UserDB.find_one({"email": username})) is not None: diff --git a/backend/app/routers/metadata.py b/backend/app/routers/metadata.py index d5cfd4de9..a0d5c8958 100644 --- a/backend/app/routers/metadata.py +++ b/backend/app/routers/metadata.py @@ -12,7 +12,7 @@ from app import dependencies from app.deps.authorization_deps import MetadataAuthorization -from app.keycloak_auth import get_current_user +from app.keycloak_auth import get_current_user, get_admin_mode from app.models.metadata import ( MetadataDefinitionIn, MetadataDefinitionDB, @@ -29,8 +29,8 @@ @router.post("/definition", response_model=MetadataDefinitionOut) async def save_metadata_definition( - definition_in: MetadataDefinitionIn, - user=Depends(get_current_user), + definition_in: MetadataDefinitionIn, + user=Depends(get_current_user), ): existing = await MetadataDefinitionDB.find_one( MetadataDefinitionDB.name == definition_in.name @@ -48,10 +48,10 @@ async def save_metadata_definition( @router.get("/definition", response_model=List[MetadataDefinitionOut]) async def get_metadata_definition_list( - name: Optional[str] = None, - user=Depends(get_current_user), - skip: int = 0, - limit: int = 2, + name: Optional[str] = None, + user=Depends(get_current_user), + skip: int = 0, + limit: int = 2, ): if name is None: defs = await MetadataDefinitionDB.find( @@ -71,11 +71,11 @@ async def get_metadata_definition_list( "/definition/{metadata_definition_id}", response_model=MetadataDefinitionOut ) async def get_metadata_definition( - metadata_definition_id: str, - user=Depends(get_current_user), + metadata_definition_id: str, + user=Depends(get_current_user), ): if ( - mdd := await MetadataDefinitionDB.get(PydanticObjectId(metadata_definition_id)) + mdd := await MetadataDefinitionDB.get(PydanticObjectId(metadata_definition_id)) ) is not None: return mdd.dict() raise HTTPException( @@ -88,8 +88,8 @@ async def get_metadata_definition( "/definition/{metadata_definition_id}", response_model=MetadataDefinitionOut ) async def delete_metadata_definition( - metadata_definition_id: str, - user=Depends(get_current_user), + metadata_definition_id: str, + user=Depends(get_current_user), ): """Delete metadata definition by specific ID.""" mdd = await MetadataDefinitionDB.find_one( @@ -105,7 +105,7 @@ async def delete_metadata_definition( raise HTTPException( status_code=400, detail=f"Metadata definition: {mdd.name} ({metadata_definition_id}) in use. " - f"You cannot delete it until all metadata records using it are deleted.", + f"You cannot delete it until all metadata records using it are deleted.", ) # TODO: Refactor this with permissions checks etc. @@ -122,10 +122,10 @@ async def delete_metadata_definition( "/definition/search/{search_term}", response_model=List[MetadataDefinitionOut] ) async def search_metadata_definition( - search_term: str, - skip: int = 0, - limit: int = 10, - user=Depends(get_current_user), + search_term: str, + skip: int = 0, + limit: int = 10, + user=Depends(get_current_user), ): """Search all metadata definition in the db based on text. @@ -151,12 +151,12 @@ async def search_metadata_definition( @router.patch("/{metadata_id}", response_model=MetadataOut) async def update_metadata( - metadata_in: MetadataPatch, - metadata_id: str, - admin_mode: bool = False, - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_in: MetadataPatch, + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -174,10 +174,10 @@ async def update_metadata( @router.delete("/{metadata_id}") async def delete_metadata( - metadata_id: str, - admin_mode: bool = False, - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Delete metadata by specific ID.""" md = await MetadataDB.find_one(MetadataDB.id == PyObjectId(metadata_id)) diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index 217f9c468..8987c7bc4 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -10,9 +10,9 @@ from app import dependencies from app.config import settings from app.deps.authorization_deps import Authorization -from app.keycloak_auth import get_current_user, UserOut +from app.keycloak_auth import get_current_user, UserOut, get_admin_mode from app.models.datasets import DatasetOut, DatasetDB -from app.models.listeners import LegacyEventListenerIn, EventListenerDB +from app.models.listeners import EventListenerDB from app.models.metadata import ( MongoDBRef, MetadataAgent, @@ -34,10 +34,10 @@ async def _build_metadata_db_obj( - metadata_in: MetadataIn, - dataset: DatasetOut, - user: UserOut, - agent: MetadataAgent = None, + metadata_in: MetadataIn, + dataset: DatasetOut, + user: UserOut, + agent: MetadataAgent = None, ): """Convenience function for converting MetadataIn to MetadataDB object.""" content = await validate_context( @@ -69,12 +69,12 @@ async def _build_metadata_db_obj( @router.post("/{dataset_id}/metadata", response_model=MetadataOut) async def add_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = False, - allow: bool = Depends(Authorization("uploader")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("uploader")), ): """Attach new metadata to a dataset. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -119,12 +119,12 @@ async def add_dataset_metadata( @router.put("/{dataset_id}/metadata", response_model=MetadataOut) async def replace_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = False, - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -174,12 +174,12 @@ async def replace_dataset_metadata( @router.patch("/{dataset_id}/metadata", response_model=MetadataOut) async def update_dataset_metadata( - metadata_in: MetadataPatch, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = False, - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataPatch, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -194,9 +194,9 @@ async def update_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing := await MetadataDB.get( - PydanticObjectId(metadata_in.metadata_id) - ) + existing := await MetadataDB.get( + PydanticObjectId(metadata_in.metadata_id) + ) ) is not None: content = await validate_context( metadata_in.content, @@ -244,12 +244,12 @@ async def update_dataset_metadata( @router.get("/{dataset_id}/metadata", response_model=List[MetadataOut]) async def get_dataset_metadata( - dataset_id: str, - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - admin_mode: bool = False, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(dataset_id)] @@ -263,9 +263,9 @@ async def get_dataset_metadata( async for md in MetadataDB.find(*query): if md.definition is not None: if ( - md_def := await MetadataDefinitionDB.find_one( - MetadataDefinitionDB.name == md.definition - ) + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) ) is not None: md.description = md_def.description metadata.append(md) @@ -276,12 +276,12 @@ async def get_dataset_metadata( @router.delete("/{dataset_id}/metadata", response_model=MetadataOut) async def delete_dataset_metadata( - metadata_in: MetadataDelete, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = False, - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataDelete, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # filter by metadata_id or definition @@ -289,9 +289,9 @@ async def delete_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - existing_md := await MetadataDB.find_one( - MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) - ) + existing_md := await MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) ) is not None: query.append(MetadataDB.metadata_id == metadata_in.metadata_id) else: diff --git a/backend/app/routers/metadata_files.py b/backend/app/routers/metadata_files.py index f3802b29e..21cd1af4d 100644 --- a/backend/app/routers/metadata_files.py +++ b/backend/app/routers/metadata_files.py @@ -13,7 +13,7 @@ from app import dependencies from app.config import settings from app.deps.authorization_deps import FileAuthorization -from app.keycloak_auth import get_current_user, UserOut +from app.keycloak_auth import get_current_user, UserOut, get_admin_mode from app.models.files import FileOut, FileDB, FileVersionDB from app.models.listeners import EventListenerDB from app.models.metadata import ( @@ -36,11 +36,11 @@ async def _build_metadata_db_obj( - metadata_in: MetadataIn, - file: FileOut, - user: UserOut, - agent: MetadataAgent = None, - version: int = None, + metadata_in: MetadataIn, + file: FileOut, + user: UserOut, + agent: MetadataAgent = None, + version: int = None, ): """Convenience function for building a MetadataDB object from incoming metadata plus a file. Agent and file version will be determined based on inputs if they are not provided directly.""" @@ -56,10 +56,10 @@ async def _build_metadata_db_obj( file_version = metadata_in.file_version if file_version is not None and file_version > 0: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == file.id, - FileVersionDB.version_num == file_version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == file.id, + FileVersionDB.version_num == file_version, + ) ) is None: raise HTTPException( status_code=404, @@ -103,12 +103,12 @@ async def _build_metadata_db_obj( @router.post("/{file_id}/metadata", response_model=MetadataOut) async def add_file_metadata( - metadata_in: MetadataIn, - file_id: str, - admin_mode: bool = False, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("uploader")), + metadata_in: MetadataIn, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("uploader")), ): """Attach new metadata to a file. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -156,12 +156,12 @@ async def add_file_metadata( @router.put("/{file_id}/metadata", response_model=MetadataOut) async def replace_file_metadata( - metadata_in: MetadataPatch, - file_id: str, - admin_mode: bool = False, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataPatch, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): """Replace metadata, including agent and context. If only metadata contents should be updated, use PATCH instead. @@ -175,10 +175,10 @@ async def replace_file_metadata( version = metadata_in.file_version if version is not None: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == version, + ) ) is None: raise HTTPException( status_code=404, @@ -230,12 +230,12 @@ async def replace_file_metadata( @router.patch("/{file_id}/metadata", response_model=MetadataOut) async def update_file_metadata( - metadata_in: MetadataPatch, - file_id: str, - admin_mode: bool = False, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataPatch, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -246,10 +246,10 @@ async def update_file_metadata( # check if metadata with file version exists, replace metadata if none exists if ( - await MetadataDB.find_one( - MetadataDB.resource.resource_id == ObjectId(file_id), - MetadataDB.resource.version == metadata_in.file_version, - ) + await MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(file_id), + MetadataDB.resource.version == metadata_in.file_version, + ) ) is None: result = await replace_file_metadata(metadata_in, file_id, user, es) return result @@ -261,9 +261,9 @@ async def update_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing_md := await MetadataDB.find_one( - MetadataDB.id == ObjectId(metadata_in.metadata_id) - ) + existing_md := await MetadataDB.find_one( + MetadataDB.id == ObjectId(metadata_in.metadata_id) + ) ) is not None: content = await validate_context( metadata_in.content, @@ -281,10 +281,10 @@ async def update_file_metadata( if metadata_in.file_version is not None: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == metadata_in.file_version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == metadata_in.file_version, + ) ) is None: raise HTTPException( status_code=404, @@ -328,15 +328,15 @@ async def update_file_metadata( @router.get("/{file_id}/metadata", response_model=List[MetadataOut]) async def get_file_metadata( - file_id: str, - admin_mode: bool = False, - version: Optional[int] = None, - all_versions: Optional[bool] = False, - definition: Optional[str] = Form(None), - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + all_versions: Optional[bool] = False, + definition: Optional[str] = Form(None), + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + allow: bool = Depends(FileAuthorization("viewer")), ): """Get file metadata.""" if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -346,10 +346,10 @@ async def get_file_metadata( if not all_versions: if version is not None and version > 0: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == version, + ) ) is None: raise HTTPException( status_code=404, @@ -373,9 +373,9 @@ async def get_file_metadata( async for md in MetadataDB.find(*query): if md.definition is not None: if ( - md_def := await MetadataDefinitionDB.find_one( - MetadataDefinitionDB.name == md.definition - ) + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) ) is not None: md_def = MetadataDefinitionOut(**md_def.dict()) md.description = md_def.description @@ -387,13 +387,13 @@ async def get_file_metadata( @router.delete("/{file_id}/metadata", response_model=MetadataOut) async def delete_file_metadata( - metadata_in: MetadataDelete, - file_id: str, - admin_mode: bool = False, - # version: Optional[int] = Form(None), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataDelete, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + # version: Optional[int] = Form(None), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(file_id)] @@ -419,9 +419,9 @@ async def delete_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - await MetadataDB.find_one( - MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) - ) + await MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) ) is not None: query.append(MetadataDB.metadata_id == metadata_in.metadata_id) else: From 8e87b1268e0111fa3642f0dd8391a6246fe40033 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 11 Dec 2023 14:38:05 -0600 Subject: [PATCH 19/43] codegen --- .../v2/services/AuthorizationService.ts | 54 --------------- .../openapi/v2/services/DatasetsService.ts | 66 ------------------- .../src/openapi/v2/services/FilesService.ts | 33 ---------- .../src/openapi/v2/services/GroupsService.ts | 18 ----- .../openapi/v2/services/MetadataService.ts | 46 ------------- 5 files changed, 217 deletions(-) diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index 8855c4f02..22e2fe267 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -16,21 +16,16 @@ export class AuthorizationService { * Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id. * @param datasetId * @param requestBody - * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static saveAuthorizationApiV2AuthorizationsDatasetsDatasetIdPost( datasetId: string, requestBody: AuthorizationBase, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}`, - query: { - 'admin_mode': adminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -43,20 +38,15 @@ export class AuthorizationService { * Get Dataset Role * Retrieve role of user for a specific dataset. * @param datasetId - * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static getDatasetRoleApiV2AuthorizationsDatasetsDatasetIdRoleGet( datasetId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -68,20 +58,15 @@ export class AuthorizationService { * Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. * See `routers/authorization.py` for more info. * @param datasetId - * @param adminMode * @returns any Successful Response * @throws ApiError */ public static getDatasetRoleViewerApiV2AuthorizationsDatasetsDatasetIdRoleViewerGet( datasetId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role/viewer}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -93,20 +78,15 @@ export class AuthorizationService { * Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. * See `routers/authorization.py` for more info. * @param datasetId - * @param adminMode * @returns any Successful Response * @throws ApiError */ public static getDatasetRoleOwnerApiV2AuthorizationsDatasetsDatasetIdRoleOwnerGet( datasetId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role/owner}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -116,21 +96,18 @@ export class AuthorizationService { /** * Get File Role * @param fileId - * @param adminMode * @param datasetId * @returns RoleType Successful Response * @throws ApiError */ public static getFileRoleApiV2AuthorizationsFilesFileIdRoleGet( fileId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/files/${fileId}/role}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -142,21 +119,18 @@ export class AuthorizationService { /** * Get Metadata Role * @param metadataId - * @param adminMode * @param datasetId * @returns AuthorizationMetadata Successful Response * @throws ApiError */ public static getMetadataRoleApiV2AuthorizationsMetadataMetadataIdRoleGet( metadataId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/metadata/${metadataId}/role}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -168,21 +142,18 @@ export class AuthorizationService { /** * Get Group Role * @param groupId - * @param adminMode * @param datasetId * @returns RoleType Successful Response * @throws ApiError */ public static getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleGet( groupId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/groups/${groupId}/role}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -197,7 +168,6 @@ export class AuthorizationService { * @param datasetId * @param groupId * @param role - * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ @@ -205,14 +175,10 @@ export class AuthorizationService { datasetId: string, groupId: string, role: RoleType, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}/group_role/${groupId}/${role}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -225,7 +191,6 @@ export class AuthorizationService { * @param datasetId * @param username * @param role - * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ @@ -233,14 +198,10 @@ export class AuthorizationService { datasetId: string, username: string, role: RoleType, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}/user_role/${username}/${role}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -252,21 +213,16 @@ export class AuthorizationService { * Remove any role the group has with a specific dataset. * @param datasetId * @param groupId - * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static removeDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdDelete( datasetId: string, groupId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/authorizations/datasets/${datasetId}/group_role/${groupId}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -278,21 +234,16 @@ export class AuthorizationService { * Remove any role the user has with a specific dataset. * @param datasetId * @param username - * @param adminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static removeDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameDelete( datasetId: string, username: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/authorizations/datasets/${datasetId}/user_role/${username}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -303,20 +254,15 @@ export class AuthorizationService { * Get Dataset Roles * Get a list of all users and groups that have assigned roles on this dataset. * @param datasetId - * @param adminMode * @returns DatasetRoles Successful Response * @throws ApiError */ public static getDatasetRolesApiV2AuthorizationsDatasetsDatasetIdRolesGet( datasetId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/roles}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index 6dad25bee..e4c725d43 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -19,7 +19,6 @@ export class DatasetsService { /** * Get Datasets - * @param adminMode * @param skip * @param limit * @param mine @@ -28,7 +27,6 @@ export class DatasetsService { * @throws ApiError */ public static getDatasetsApiV2DatasetsGet( - adminMode: boolean = false, skip?: number, limit: number = 10, mine: boolean = false, @@ -38,7 +36,6 @@ export class DatasetsService { method: 'GET', path: `/api/v2/datasets`, query: { - 'admin_mode': adminMode, 'skip': skip, 'limit': limit, 'mine': mine, @@ -73,20 +70,15 @@ export class DatasetsService { /** * Get Dataset * @param datasetId - * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static getDatasetApiV2DatasetsDatasetIdGet( datasetId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -97,21 +89,16 @@ export class DatasetsService { * Edit Dataset * @param datasetId * @param requestBody - * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static editDatasetApiV2DatasetsDatasetIdPut( datasetId: string, requestBody: DatasetBase, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/datasets/${datasetId}`, - query: { - 'admin_mode': adminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -123,20 +110,15 @@ export class DatasetsService { /** * Delete Dataset * @param datasetId - * @param adminMode * @returns any Successful Response * @throws ApiError */ public static deleteDatasetApiV2DatasetsDatasetIdDelete( datasetId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -147,21 +129,16 @@ export class DatasetsService { * Patch Dataset * @param datasetId * @param requestBody - * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static patchDatasetApiV2DatasetsDatasetIdPatch( datasetId: string, requestBody: DatasetPatch, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}`, - query: { - 'admin_mode': adminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -173,7 +150,6 @@ export class DatasetsService { /** * Get Dataset Files * @param datasetId - * @param adminMode * @param folderId * @param skip * @param limit @@ -182,7 +158,6 @@ export class DatasetsService { */ public static getDatasetFilesApiV2DatasetsDatasetIdFilesGet( datasetId: string, - adminMode: boolean = false, folderId?: string, skip?: number, limit: number = 10, @@ -191,7 +166,6 @@ export class DatasetsService { method: 'GET', path: `/api/v2/datasets/${datasetId}/files`, query: { - 'admin_mode': adminMode, 'folder_id': folderId, 'skip': skip, 'limit': limit, @@ -207,7 +181,6 @@ export class DatasetsService { * @param datasetId * @param formData * @param folderId - * @param adminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -215,14 +188,12 @@ export class DatasetsService { datasetId: string, formData: Body_save_file_api_v2_datasets__dataset_id__files_post, folderId?: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/files`, query: { 'folder_id': folderId, - 'admin_mode': adminMode, }, formData: formData, mediaType: 'multipart/form-data', @@ -235,7 +206,6 @@ export class DatasetsService { /** * Get Dataset Folders * @param datasetId - * @param adminMode * @param parentFolder * @param skip * @param limit @@ -244,7 +214,6 @@ export class DatasetsService { */ public static getDatasetFoldersApiV2DatasetsDatasetIdFoldersGet( datasetId: string, - adminMode: boolean = false, parentFolder?: string, skip?: number, limit: number = 10, @@ -253,7 +222,6 @@ export class DatasetsService { method: 'GET', path: `/api/v2/datasets/${datasetId}/folders`, query: { - 'admin_mode': adminMode, 'parent_folder': parentFolder, 'skip': skip, 'limit': limit, @@ -268,21 +236,16 @@ export class DatasetsService { * Add Folder * @param datasetId * @param requestBody - * @param adminMode * @returns FolderOut Successful Response * @throws ApiError */ public static addFolderApiV2DatasetsDatasetIdFoldersPost( datasetId: string, requestBody: FolderIn, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/folders`, - query: { - 'admin_mode': adminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -295,21 +258,16 @@ export class DatasetsService { * Delete Folder * @param datasetId * @param folderId - * @param adminMode * @returns any Successful Response * @throws ApiError */ public static deleteFolderApiV2DatasetsDatasetIdFoldersFolderIdDelete( datasetId: string, folderId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}/folders/${folderId}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -320,7 +278,6 @@ export class DatasetsService { * Save Files * @param datasetId * @param formData - * @param adminMode * @param folderId * @returns FileOut Successful Response * @throws ApiError @@ -328,14 +285,12 @@ export class DatasetsService { public static saveFilesApiV2DatasetsDatasetIdFilesMultiplePost( datasetId: string, formData: Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post, - adminMode: boolean = false, folderId?: string, ): CancelablePromise> { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/filesMultiple`, query: { - 'admin_mode': adminMode, 'folder_id': folderId, }, formData: formData, @@ -351,7 +306,6 @@ export class DatasetsService { * @param datasetId * @param requestBody * @param folderId - * @param adminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -359,14 +313,12 @@ export class DatasetsService { datasetId: string, requestBody: LocalFileIn, folderId?: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/local_files`, query: { 'folder_id': folderId, - 'admin_mode': adminMode, }, body: requestBody, mediaType: 'application/json', @@ -399,20 +351,15 @@ export class DatasetsService { /** * Download Dataset * @param datasetId - * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static downloadDatasetApiV2DatasetsDatasetIdDownloadGet( datasetId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/download`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -423,7 +370,6 @@ export class DatasetsService { * Get Dataset Extract * @param datasetId * @param extractorName - * @param adminMode * @param requestBody * @returns any Successful Response * @throws ApiError @@ -431,7 +377,6 @@ export class DatasetsService { public static getDatasetExtractApiV2DatasetsDatasetIdExtractPost( datasetId: string, extractorName: string, - adminMode: boolean = false, requestBody?: any, ): CancelablePromise { return __request({ @@ -439,7 +384,6 @@ export class DatasetsService { path: `/api/v2/datasets/${datasetId}/extract`, query: { 'extractorName': extractorName, - 'admin_mode': adminMode, }, body: requestBody, mediaType: 'application/json', @@ -452,20 +396,15 @@ export class DatasetsService { /** * Download Dataset Thumbnail * @param datasetId - * @param adminMode * @returns any Successful Response * @throws ApiError */ public static downloadDatasetThumbnailApiV2DatasetsDatasetIdThumbnailGet( datasetId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/thumbnail`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, @@ -476,21 +415,16 @@ export class DatasetsService { * Add Dataset Thumbnail * @param datasetId * @param thumbnailId - * @param adminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static addDatasetThumbnailApiV2DatasetsDatasetIdThumbnailThumbnailIdPatch( datasetId: string, thumbnailId: string, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}/thumbnail/${thumbnailId}`, - query: { - 'admin_mode': adminMode, - }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/FilesService.ts b/frontend/src/openapi/v2/services/FilesService.ts index 2566d480e..6a48df5f9 100644 --- a/frontend/src/openapi/v2/services/FilesService.ts +++ b/frontend/src/openapi/v2/services/FilesService.ts @@ -12,7 +12,6 @@ export class FilesService { /** * Download File * @param fileId - * @param adminMode * @param version * @param increment * @param datasetId @@ -21,7 +20,6 @@ export class FilesService { */ public static downloadFileApiV2FilesFileIdGet( fileId: string, - adminMode: boolean = false, version?: number, increment: boolean = true, datasetId?: string, @@ -30,7 +28,6 @@ export class FilesService { method: 'GET', path: `/api/v2/files/${fileId}`, query: { - 'admin_mode': adminMode, 'version': version, 'increment': increment, 'dataset_id': datasetId, @@ -45,7 +42,6 @@ export class FilesService { * Update File * @param fileId * @param formData - * @param adminMode * @param datasetId * @returns FileOut Successful Response * @throws ApiError @@ -53,14 +49,12 @@ export class FilesService { public static updateFileApiV2FilesFileIdPut( fileId: string, formData: Body_update_file_api_v2_files__file_id__put, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, formData: formData, @@ -74,21 +68,18 @@ export class FilesService { /** * Delete File * @param fileId - * @param adminMode * @param datasetId * @returns any Successful Response * @throws ApiError */ public static deleteFileApiV2FilesFileIdDelete( fileId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -100,7 +91,6 @@ export class FilesService { /** * Download File Url * @param fileId - * @param adminMode * @param version * @param expiresInSeconds * @param datasetId @@ -109,7 +99,6 @@ export class FilesService { */ public static downloadFileUrlApiV2FilesFileIdUrlGet( fileId: string, - adminMode: boolean = false, version?: number, expiresInSeconds: number = 3600, datasetId?: string, @@ -118,7 +107,6 @@ export class FilesService { method: 'GET', path: `/api/v2/files/${fileId}/url/`, query: { - 'admin_mode': adminMode, 'version': version, 'expires_in_seconds': expiresInSeconds, 'dataset_id': datasetId, @@ -132,21 +120,18 @@ export class FilesService { /** * Get File Summary * @param fileId - * @param adminMode * @param datasetId * @returns FileOut Successful Response * @throws ApiError */ public static getFileSummaryApiV2FilesFileIdSummaryGet( fileId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/summary`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -158,7 +143,6 @@ export class FilesService { /** * Get File Version Details * @param fileId - * @param adminMode * @param versionNum * @param datasetId * @returns FileOut Successful Response @@ -166,7 +150,6 @@ export class FilesService { */ public static getFileVersionDetailsApiV2FilesFileIdVersionDetailsGet( fileId: string, - adminMode: boolean = false, versionNum?: number, datasetId?: string, ): CancelablePromise { @@ -174,7 +157,6 @@ export class FilesService { method: 'GET', path: `/api/v2/files/${fileId}/version_details`, query: { - 'admin_mode': adminMode, 'version_num': versionNum, 'dataset_id': datasetId, }, @@ -187,7 +169,6 @@ export class FilesService { /** * Get File Versions * @param fileId - * @param adminMode * @param skip * @param limit * @param datasetId @@ -196,7 +177,6 @@ export class FilesService { */ public static getFileVersionsApiV2FilesFileIdVersionsGet( fileId: string, - adminMode: boolean = false, skip?: number, limit: number = 20, datasetId?: string, @@ -205,7 +185,6 @@ export class FilesService { method: 'GET', path: `/api/v2/files/${fileId}/versions`, query: { - 'admin_mode': adminMode, 'skip': skip, 'limit': limit, 'dataset_id': datasetId, @@ -220,7 +199,6 @@ export class FilesService { * Post File Extract * @param fileId * @param extractorName - * @param adminMode * @param datasetId * @param requestBody * @returns any Successful Response @@ -229,7 +207,6 @@ export class FilesService { public static postFileExtractApiV2FilesFileIdExtractPost( fileId: string, extractorName: string, - adminMode: boolean = false, datasetId?: string, requestBody?: any, ): CancelablePromise { @@ -238,7 +215,6 @@ export class FilesService { path: `/api/v2/files/${fileId}/extract`, query: { 'extractorName': extractorName, - 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -260,21 +236,18 @@ export class FilesService { * credentials: credentials of logged in user * rabbitmq_client: Rabbitmq Client * @param fileId - * @param adminMode * @param datasetId * @returns any Successful Response * @throws ApiError */ public static resubmitFileExtractionsApiV2FilesFileIdResubmitExtractPost( fileId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/resubmit_extract`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -286,21 +259,18 @@ export class FilesService { /** * Download File Thumbnail * @param fileId - * @param adminMode * @param datasetId * @returns any Successful Response * @throws ApiError */ public static downloadFileThumbnailApiV2FilesFileIdThumbnailGet( fileId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/thumbnail`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -313,7 +283,6 @@ export class FilesService { * Add File Thumbnail * @param fileId * @param thumbnailId - * @param adminMode * @param datasetId * @returns FileOut Successful Response * @throws ApiError @@ -321,14 +290,12 @@ export class FilesService { public static addFileThumbnailApiV2FilesFileIdThumbnailThumbnailIdPatch( fileId: string, thumbnailId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/thumbnail/${thumbnailId}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { diff --git a/frontend/src/openapi/v2/services/GroupsService.ts b/frontend/src/openapi/v2/services/GroupsService.ts index 9d5e7d46b..6c20074ce 100644 --- a/frontend/src/openapi/v2/services/GroupsService.ts +++ b/frontend/src/openapi/v2/services/GroupsService.ts @@ -93,21 +93,18 @@ export class GroupsService { /** * Get Group * @param groupId - * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static getGroupApiV2GroupsGroupIdGet( groupId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/groups/${groupId}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -120,7 +117,6 @@ export class GroupsService { * Edit Group * @param groupId * @param requestBody - * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError @@ -128,14 +124,12 @@ export class GroupsService { public static editGroupApiV2GroupsGroupIdPut( groupId: string, requestBody: GroupBase, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/groups/${groupId}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -149,21 +143,18 @@ export class GroupsService { /** * Delete Group * @param groupId - * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static deleteGroupApiV2GroupsGroupIdDelete( groupId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/groups/${groupId}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -177,7 +168,6 @@ export class GroupsService { * Add a new user to a group. * @param groupId * @param username - * @param adminMode * @param role * @param datasetId * @returns GroupOut Successful Response @@ -186,7 +176,6 @@ export class GroupsService { public static addMemberApiV2GroupsGroupIdAddUsernamePost( groupId: string, username: string, - adminMode: boolean = false, role?: string, datasetId?: string, ): CancelablePromise { @@ -194,7 +183,6 @@ export class GroupsService { method: 'POST', path: `/api/v2/groups/${groupId}/add/${username}`, query: { - 'admin_mode': adminMode, 'role': role, 'dataset_id': datasetId, }, @@ -209,7 +197,6 @@ export class GroupsService { * Remove a user from a group. * @param groupId * @param username - * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError @@ -217,14 +204,12 @@ export class GroupsService { public static removeMemberApiV2GroupsGroupIdRemoveUsernamePost( groupId: string, username: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/groups/${groupId}/remove/${username}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -239,7 +224,6 @@ export class GroupsService { * @param groupId * @param username * @param role - * @param adminMode * @param datasetId * @returns GroupOut Successful Response * @throws ApiError @@ -248,7 +232,6 @@ export class GroupsService { groupId: string, username: string, role: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ @@ -256,7 +239,6 @@ export class GroupsService { path: `/api/v2/groups/${groupId}/update/${username}`, query: { 'role': role, - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { diff --git a/frontend/src/openapi/v2/services/MetadataService.ts b/frontend/src/openapi/v2/services/MetadataService.ts index 145a3b783..3090fc9c6 100644 --- a/frontend/src/openapi/v2/services/MetadataService.ts +++ b/frontend/src/openapi/v2/services/MetadataService.ts @@ -134,21 +134,18 @@ export class MetadataService { * Delete Metadata * Delete metadata by specific ID. * @param metadataId - * @param adminMode * @param datasetId * @returns any Successful Response * @throws ApiError */ public static deleteMetadataApiV2MetadataMetadataIdDelete( metadataId: string, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/metadata/${metadataId}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, errors: { @@ -166,7 +163,6 @@ export class MetadataService { * Metadata document that was updated * @param metadataId * @param requestBody - * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -174,14 +170,12 @@ export class MetadataService { public static updateMetadataApiV2MetadataMetadataIdPatch( metadataId: string, requestBody: MetadataPatch, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/metadata/${metadataId}`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -196,7 +190,6 @@ export class MetadataService { * Get File Metadata * Get file metadata. * @param fileId - * @param adminMode * @param version * @param allVersions * @param datasetId @@ -206,7 +199,6 @@ export class MetadataService { */ public static getFileMetadataApiV2FilesFileIdMetadataGet( fileId: string, - adminMode: boolean = false, version?: number, allVersions: boolean = false, datasetId?: string, @@ -216,7 +208,6 @@ export class MetadataService { method: 'GET', path: `/api/v2/files/${fileId}/metadata`, query: { - 'admin_mode': adminMode, 'version': version, 'all_versions': allVersions, 'dataset_id': datasetId, @@ -237,7 +228,6 @@ export class MetadataService { * Metadata document that was updated * @param fileId * @param requestBody - * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -245,14 +235,12 @@ export class MetadataService { public static replaceFileMetadataApiV2FilesFileIdMetadataPut( fileId: string, requestBody: MetadataPatch, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}/metadata`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -272,7 +260,6 @@ export class MetadataService { * Metadata document that was added to database * @param fileId * @param requestBody - * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -280,14 +267,12 @@ export class MetadataService { public static addFileMetadataApiV2FilesFileIdMetadataPost( fileId: string, requestBody: MetadataIn, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/metadata`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -302,7 +287,6 @@ export class MetadataService { * Delete File Metadata * @param fileId * @param requestBody - * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -310,14 +294,12 @@ export class MetadataService { public static deleteFileMetadataApiV2FilesFileIdMetadataDelete( fileId: string, requestBody: MetadataDelete, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}/metadata`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -337,7 +319,6 @@ export class MetadataService { * Metadata document that was updated * @param fileId * @param requestBody - * @param adminMode * @param datasetId * @returns MetadataOut Successful Response * @throws ApiError @@ -345,14 +326,12 @@ export class MetadataService { public static updateFileMetadataApiV2FilesFileIdMetadataPatch( fileId: string, requestBody: MetadataPatch, - adminMode: boolean = false, datasetId?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/metadata`, query: { - 'admin_mode': adminMode, 'dataset_id': datasetId, }, body: requestBody, @@ -366,22 +345,17 @@ export class MetadataService { /** * Get Dataset Metadata * @param datasetId - * @param adminMode * @param formData * @returns MetadataOut Successful Response * @throws ApiError */ public static getDatasetMetadataApiV2DatasetsDatasetIdMetadataGet( datasetId: string, - adminMode: boolean = false, formData?: Body_get_dataset_metadata_api_v2_datasets__dataset_id__metadata_get, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/metadata`, - query: { - 'admin_mode': adminMode, - }, formData: formData, mediaType: 'application/x-www-form-urlencoded', errors: { @@ -399,21 +373,16 @@ export class MetadataService { * Metadata document that was updated * @param datasetId * @param requestBody - * @param adminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static replaceDatasetMetadataApiV2DatasetsDatasetIdMetadataPut( datasetId: string, requestBody: MetadataIn, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/datasets/${datasetId}/metadata`, - query: { - 'admin_mode': adminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -431,21 +400,16 @@ export class MetadataService { * Metadata document that was added to database * @param datasetId * @param requestBody - * @param adminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static addDatasetMetadataApiV2DatasetsDatasetIdMetadataPost( datasetId: string, requestBody: MetadataIn, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/metadata`, - query: { - 'admin_mode': adminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -458,21 +422,16 @@ export class MetadataService { * Delete Dataset Metadata * @param datasetId * @param requestBody - * @param adminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static deleteDatasetMetadataApiV2DatasetsDatasetIdMetadataDelete( datasetId: string, requestBody: MetadataDelete, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}/metadata`, - query: { - 'admin_mode': adminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -490,21 +449,16 @@ export class MetadataService { * Metadata document that was updated * @param datasetId * @param requestBody - * @param adminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static updateDatasetMetadataApiV2DatasetsDatasetIdMetadataPatch( datasetId: string, requestBody: MetadataPatch, - adminMode: boolean = false, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}/metadata`, - query: { - 'admin_mode': adminMode, - }, body: requestBody, mediaType: 'application/json', errors: { From 7a32a1bf2041bcf159d7d91056cf9156d73823d4 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 11 Dec 2023 15:02:23 -0600 Subject: [PATCH 20/43] remove all the adminMode parameter --- frontend/src/actions/authorization.js | 22 +- frontend/src/actions/dataset.js | 97 ++++---- frontend/src/actions/file.js | 47 ++-- frontend/src/actions/folder.js | 47 ++-- frontend/src/actions/group.js | 41 ++-- frontend/src/actions/metadata.js | 52 ++--- frontend/src/components/Explore.tsx | 19 +- frontend/src/components/Layout.tsx | 60 ++--- .../datasets/ChangeDatasetRoleModal.tsx | 76 ++++-- .../datasets/ChangeGroupDatasetRoleModal.tsx | 8 +- .../src/components/datasets/CreateDataset.tsx | 3 +- frontend/src/components/datasets/Dataset.tsx | 134 ++++++----- .../components/datasets/ShareDatasetModal.tsx | 3 +- .../datasets/ShareGroupDatasetModal.tsx | 7 +- frontend/src/components/files/File.tsx | 14 +- .../src/components/files/FileActionsMenu.tsx | 11 +- frontend/src/components/files/FileMenu.tsx | 4 +- frontend/src/components/files/UpdateFile.tsx | 10 +- frontend/src/components/files/UploadFile.tsx | 6 +- .../components/files/UploadFileMultiple.tsx | 11 +- .../src/components/folders/CreateFolder.tsx | 68 +++--- .../src/components/groups/AddMemberModal.tsx | 5 +- .../components/groups/DeleteGroupModal.tsx | 6 +- .../groups/EditDescriptionModal.tsx | 3 +- .../src/components/groups/EditNameModal.tsx | 3 +- frontend/src/components/groups/Group.tsx | 3 +- .../src/components/groups/MembersTable.tsx | 103 +++++---- .../groups/MembersTableUserEntry.tsx | 198 +++++++++------- .../components/listeners/SubmitExtraction.tsx | 191 ++++++++------- .../metadata/DisplayListenerMetadata.tsx | 117 ++++++---- .../components/metadata/DisplayMetadata.tsx | 173 +++++++------- .../src/components/metadata/EditMetadata.tsx | 217 ++++++++++-------- .../sharing/GroupAndRoleTableEntry.tsx | 7 +- .../src/components/sharing/SharingTab.tsx | 6 +- .../sharing/UserAndRoleTableEntry.tsx | 9 +- .../visualizations/Visualization.tsx | 26 ++- frontend/src/routes.tsx | 3 +- 37 files changed, 968 insertions(+), 842 deletions(-) diff --git a/frontend/src/actions/authorization.js b/frontend/src/actions/authorization.js index 453333c09..48bcb4123 100644 --- a/frontend/src/actions/authorization.js +++ b/frontend/src/actions/authorization.js @@ -1,14 +1,12 @@ import { V2 } from "../openapi"; import { handleErrorsAuthorization, resetFailedReason } from "./common"; -import {fetchDatasets} from "./dataset"; export const RECEIVE_DATASET_ROLE = "RECEIVE_DATASET_ROLE"; -export function fetchDatasetRole(datasetId, adminMode) { +export function fetchDatasetRole(datasetId) { return (dispatch) => { return V2.AuthorizationService.getDatasetRoleApiV2AuthorizationsDatasetsDatasetIdRoleGet( - datasetId, - adminMode + datasetId ) .then((json) => { dispatch({ @@ -22,7 +20,7 @@ export function fetchDatasetRole(datasetId, adminMode) { }) .catch((reason) => { dispatch( - handleErrorsAuthorization(reason, fetchDatasetRole(datasetId, adminMode)) + handleErrorsAuthorization(reason, fetchDatasetRole(datasetId)) ); }); }; @@ -30,11 +28,10 @@ export function fetchDatasetRole(datasetId, adminMode) { export const RECEIVE_FILE_ROLE = "RECEIVE_FILE_ROLE"; -export function fetchFileRole(fileId, adminMode) { +export function fetchFileRole(fileId) { return (dispatch) => { return V2.AuthorizationService.getFileRoleApiV2AuthorizationsFilesFileIdRoleGet( - fileId, - adminMode + fileId ) .then((json) => { dispatch({ @@ -47,18 +44,17 @@ export function fetchFileRole(fileId, adminMode) { dispatch(resetFailedReason()); }) .catch((reason) => { - dispatch(handleErrorsAuthorization(reason, fetchFileRole(fileId, adminMode))); + dispatch(handleErrorsAuthorization(reason, fetchFileRole(fileId))); }); }; } export const RECEIVE_GROUP_ROLE = "RECEIVE_GROUP_ROLE"; -export function fetchGroupRole(groupId, adminMode) { +export function fetchGroupRole(groupId) { return (dispatch) => { return V2.AuthorizationService.getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleGet( - groupId, - adminMode + groupId ) .then((json) => { dispatch({ @@ -71,7 +67,7 @@ export function fetchGroupRole(groupId, adminMode) { dispatch(resetFailedReason()); }) .catch((reason) => { - dispatch(handleErrorsAuthorization(reason, fetchGroupRole(groupId, adminMode))); + dispatch(handleErrorsAuthorization(reason, fetchGroupRole(groupId))); }); }; } diff --git a/frontend/src/actions/dataset.js b/frontend/src/actions/dataset.js index 4f917fcf7..2024eefca 100644 --- a/frontend/src/actions/dataset.js +++ b/frontend/src/actions/dataset.js @@ -8,13 +8,12 @@ import { export const SET_DATASET_GROUP_ROLE = "SET_DATASET_GROUP_ROLE"; -export function setDatasetGroupRole(datasetId, groupId, roleType, adminMode) { +export function setDatasetGroupRole(datasetId, groupId, roleType) { return (dispatch) => { return V2.AuthorizationService.setDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdRolePost( datasetId, groupId, - roleType, - adminMode + roleType ) .then((json) => { dispatch({ @@ -26,7 +25,7 @@ export function setDatasetGroupRole(datasetId, groupId, roleType, adminMode) { dispatch( handleErrors( reason, - setDatasetGroupRole(datasetId, groupId, roleType, adminMode) + setDatasetGroupRole(datasetId, groupId, roleType) ) ); }); @@ -35,13 +34,12 @@ export function setDatasetGroupRole(datasetId, groupId, roleType, adminMode) { export const SET_DATASET_USER_ROLE = "SET_DATASET_USER_ROLE"; -export function setDatasetUserRole(datasetId, username, roleType, adminMode) { +export function setDatasetUserRole(datasetId, username, roleType) { return (dispatch) => { return V2.AuthorizationService.setDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameRolePost( datasetId, username, - roleType, - adminMode + roleType ) .then((json) => { dispatch({ @@ -53,7 +51,7 @@ export function setDatasetUserRole(datasetId, username, roleType, adminMode) { dispatch( handleErrorsInline( reason, - setDatasetUserRole(datasetId, username, roleType, adminMode) + setDatasetUserRole(datasetId, username, roleType) ) ); }); @@ -62,12 +60,11 @@ export function setDatasetUserRole(datasetId, username, roleType, adminMode) { export const REMOVE_DATASET_GROUP_ROLE = "REMOVE_DATASET_GROUP_ROLE"; -export function removeDatasetGroupRole(datasetId, groupId, adminMode) { +export function removeDatasetGroupRole(datasetId, groupId) { return (dispatch) => { return V2.AuthorizationService.removeDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdDelete( datasetId, - groupId, - adminMode + groupId ) .then((json) => { dispatch({ @@ -77,7 +74,7 @@ export function removeDatasetGroupRole(datasetId, groupId, adminMode) { }) .catch((reason) => { dispatch( - handleErrors(reason, removeDatasetGroupRole(datasetId, groupId, adminMode)) + handleErrors(reason, removeDatasetGroupRole(datasetId, groupId)) ); }); }; @@ -85,12 +82,11 @@ export function removeDatasetGroupRole(datasetId, groupId, adminMode) { export const REMOVE_DATASET_USER_ROLE = "REMOVE_DATASET_USER_ROLE"; -export function removeDatasetUserRole(datasetId, username, adminMode) { +export function removeDatasetUserRole(datasetId, username) { return (dispatch) => { return V2.AuthorizationService.removeDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameDelete( datasetId, - username, - adminMode + username ) .then((json) => { dispatch({ @@ -100,7 +96,7 @@ export function removeDatasetUserRole(datasetId, username, adminMode) { }) .catch((reason) => { dispatch( - handleErrors(reason, removeDatasetUserRole(datasetId, username, adminMode)) + handleErrors(reason, removeDatasetUserRole(datasetId, username)) ); }); }; @@ -108,14 +104,14 @@ export function removeDatasetUserRole(datasetId, username, adminMode) { export const RECEIVE_FILES_IN_DATASET = "RECEIVE_FILES_IN_DATASET"; -export function fetchFilesInDataset(datasetId, folderId, skip, limit, adminMode) { +export function fetchFilesInDataset(datasetId, folderId, skip, limit) { return (dispatch) => { return V2.DatasetsService.getDatasetFilesApiV2DatasetsDatasetIdFilesGet( datasetId, - adminMode, folderId, skip, - limit) + limit + ) .then((json) => { dispatch({ type: RECEIVE_FILES_IN_DATASET, @@ -125,7 +121,10 @@ export function fetchFilesInDataset(datasetId, folderId, skip, limit, adminMode) }) .catch((reason) => { dispatch( - handleErrors(reason, fetchFilesInDataset(datasetId, adminMode, folderId, skip, limit, adminMode)) + handleErrors( + reason, + fetchFilesInDataset(datasetId, folderId, skip, limit) + ) ); }); }; @@ -133,11 +132,10 @@ export function fetchFilesInDataset(datasetId, folderId, skip, limit, adminMode) export const RECEIVE_FOLDERS_IN_DATASET = "RECEIVE_FOLDERS_IN_DATASET"; -export function fetchFoldersInDataset(datasetId, parentFolder, skip, limit, adminMode) { +export function fetchFoldersInDataset(datasetId, parentFolder, skip, limit) { return (dispatch) => { return V2.DatasetsService.getDatasetFoldersApiV2DatasetsDatasetIdFoldersGet( datasetId, - adminMode, parentFolder, skip, limit @@ -151,7 +149,10 @@ export function fetchFoldersInDataset(datasetId, parentFolder, skip, limit, admi }) .catch((reason) => { dispatch( - handleErrors(reason, fetchFoldersInDataset(datasetId, parentFolder, skip, limit, adminMode)) + handleErrors( + reason, + fetchFoldersInDataset(datasetId, parentFolder, skip, limit) + ) ); }); }; @@ -162,14 +163,12 @@ export const SUBMIT_DATASET_EXTRACTION = "SUBMIT_DATASET_EXTRACTION"; export function submitDatasetExtractionAction( datasetId, extractorName, - adminMode, requestBody ) { return (dispatch) => { return V2.DatasetsService.getDatasetExtractApiV2DatasetsDatasetIdExtractPost( datasetId, extractorName, - adminMode, requestBody ) .then((json) => { @@ -183,7 +182,7 @@ export function submitDatasetExtractionAction( dispatch( handleErrors( reason, - submitDatasetExtractionAction(datasetId, extractorName,adminMode, requestBody) + submitDatasetExtractionAction(datasetId, extractorName, requestBody) ) ); }); @@ -192,12 +191,11 @@ export function submitDatasetExtractionAction( export const UPDATE_DATASET = "UPDATE_DATASET"; -export function updateDataset(datasetId, formData, adminMode) { +export function updateDataset(datasetId, formData) { return (dispatch) => { return V2.DatasetsService.patchDatasetApiV2DatasetsDatasetIdPatch( datasetId, - formData, - adminMode + formData ) .then((json) => { dispatch({ @@ -207,16 +205,16 @@ export function updateDataset(datasetId, formData, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, updateDataset(datasetId, formData, adminMode))); + dispatch(handleErrors(reason, updateDataset(datasetId, formData))); }); }; } export const RECEIVE_DATASET_ABOUT = "RECEIVE_DATASET_ABOUT"; -export function fetchDatasetAbout(id, adminMode) { +export function fetchDatasetAbout(id) { return (dispatch) => { - return V2.DatasetsService.getDatasetApiV2DatasetsDatasetIdGet(id, adminMode) + return V2.DatasetsService.getDatasetApiV2DatasetsDatasetIdGet(id) .then((json) => { dispatch({ type: RECEIVE_DATASET_ABOUT, @@ -225,17 +223,17 @@ export function fetchDatasetAbout(id, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchDatasetAbout(id, adminMode))); + dispatch(handleErrors(reason, fetchDatasetAbout(id))); }); }; } export const RECEIVE_DATASETS = "RECEIVE_DATASETS"; -export function fetchDatasets(skip = 0, limit = 21, mine = false, adminMode = false) { +export function fetchDatasets(skip = 0, limit = 21, mine = false) { return (dispatch) => { // TODO: Parameters for dates? paging? - return V2.DatasetsService.getDatasetsApiV2DatasetsGet(adminMode, skip, limit, mine) + return V2.DatasetsService.getDatasetsApiV2DatasetsGet(skip, limit, mine) .then((json) => { dispatch({ type: RECEIVE_DATASETS, @@ -244,7 +242,7 @@ export function fetchDatasets(skip = 0, limit = 21, mine = false, adminMode = fa }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchDatasets(skip, limit, mine, adminMode))); + dispatch(handleErrors(reason, fetchDatasets(skip, limit, mine))); }); }; } @@ -280,11 +278,10 @@ export function resetDatsetCreated() { export const DELETE_DATASET = "DELETE_DATASET"; -export function datasetDeleted(datasetId, adminMode) { +export function datasetDeleted(datasetId) { return (dispatch) => { return V2.DatasetsService.deleteDatasetApiV2DatasetsDatasetIdDelete( - datasetId, - adminMode + datasetId ) .then((json) => { dispatch({ @@ -294,19 +291,18 @@ export function datasetDeleted(datasetId, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, datasetDeleted(datasetId, adminMode))); + dispatch(handleErrors(reason, datasetDeleted(datasetId))); }); }; } export const FOLDER_ADDED = "FOLDER_ADDED"; -export function folderAdded(datasetId, adminMode, folderName, parentFolder = null) { +export function folderAdded(datasetId, folderName, parentFolder = null) { return (dispatch) => { const folder = { name: folderName, parent_folder: parentFolder }; return V2.DatasetsService.addFolderApiV2DatasetsDatasetIdFoldersPost( datasetId, - adminMode, folder ) .then((json) => { @@ -318,7 +314,7 @@ export function folderAdded(datasetId, adminMode, folderName, parentFolder = nul }) .catch((reason) => { dispatch( - handleErrors(reason, folderAdded(datasetId, adminMode, folderName, parentFolder)) + handleErrors(reason, folderAdded(datasetId, folderName, parentFolder)) ); }); }; @@ -326,12 +322,11 @@ export function folderAdded(datasetId, adminMode, folderName, parentFolder = nul export const GET_FOLDER_PATH = "GET_FOLDER_PATH"; -export function fetchFolderPath(folderId, adminMode) { +export function fetchFolderPath(folderId) { return (dispatch) => { if (folderId != null) { return V2.FoldersService.downloadFolderApiV2FoldersFolderIdPathGet( - folderId, - adminMode + folderId ) .then((json) => { dispatch({ @@ -341,7 +336,7 @@ export function fetchFolderPath(folderId, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchFolderPath(folderId, adminMode))); + dispatch(handleErrors(reason, fetchFolderPath(folderId))); }); } else { dispatch({ @@ -355,12 +350,10 @@ export function fetchFolderPath(folderId, adminMode) { export const RECEIVE_DATASET_ROLES = "RECEIVE_DATASET_ROLES"; -export function fetchDatasetRoles(datasetId, adminMode) { +export function fetchDatasetRoles(datasetId) { return (dispatch) => { - console.log("adminMode: ", adminMode); return V2.AuthorizationService.getDatasetRolesApiV2AuthorizationsDatasetsDatasetIdRolesGet( - datasetId, - adminMode + datasetId ) .then((json) => { dispatch({ @@ -374,7 +367,7 @@ export function fetchDatasetRoles(datasetId, adminMode) { }) .catch((reason) => { dispatch( - handleErrorsAuthorization(reason, fetchDatasetRoles(datasetId, adminMode)) + handleErrorsAuthorization(reason, fetchDatasetRoles(datasetId)) ); }); }; diff --git a/frontend/src/actions/file.js b/frontend/src/actions/file.js index b756334ee..f3d58510f 100644 --- a/frontend/src/actions/file.js +++ b/frontend/src/actions/file.js @@ -11,7 +11,7 @@ export const RECEIVE_FILE_EXTRACTED_METADATA = export function fetchFileExtractedMetadata(id) { const url = `${config.hostname}/api/v2/files/${id}/metadata`; return (dispatch) => { - return fetch(url, { mode: "cors", headers: getHeader()}) + return fetch(url, { mode: "cors", headers: getHeader() }) .then((response) => { if (response.status === 200) { response.json().then((json) => { @@ -33,9 +33,9 @@ export function fetchFileExtractedMetadata(id) { export const RECEIVE_FILE_SUMMARY = "RECEIVE_FILE_SUMMARY"; -export function fetchFileSummary(id, admin_mode) { +export function fetchFileSummary(id) { return (dispatch) => { - return V2.FilesService.getFileSummaryApiV2FilesFileIdSummaryGet(id, admin_mode) + return V2.FilesService.getFileSummaryApiV2FilesFileIdSummaryGet(id) .then((json) => { dispatch({ type: RECEIVE_FILE_SUMMARY, @@ -44,7 +44,7 @@ export function fetchFileSummary(id, admin_mode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchFileSummary(id, admin_mode))); + dispatch(handleErrors(reason, fetchFileSummary(id))); }); }; } @@ -101,9 +101,9 @@ export function fetchFilePreviews(id) { export const DELETE_FILE = "DELETE_FILE"; -export function fileDeleted(fileId, adminMode) { +export function fileDeleted(fileId) { return (dispatch) => { - return V2.FilesService.deleteFileApiV2FilesFileIdDelete(fileId, adminMode) + return V2.FilesService.deleteFileApiV2FilesFileIdDelete(fileId) .then((json) => { dispatch({ type: DELETE_FILE, @@ -112,22 +112,21 @@ export function fileDeleted(fileId, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fileDeleted(fileId, adminMode))); + dispatch(handleErrors(reason, fileDeleted(fileId))); }); }; } export const CREATE_FILE = "CREATE_FILE"; -export function createFile(selectedDatasetId, selectedFile, folderId, adminMode) { +export function createFile(selectedDatasetId, selectedFile, folderId) { return (dispatch) => { const formData = new FormData(); formData["file"] = selectedFile; return V2.DatasetsService.saveFileApiV2DatasetsDatasetIdFilesPost( selectedDatasetId, formData, - folderId, - adminMode + folderId ) .then((file) => { dispatch({ @@ -140,7 +139,7 @@ export function createFile(selectedDatasetId, selectedFile, folderId, adminMode) dispatch( handleErrors( reason, - createFile(selectedDatasetId, selectedFile, folderId, adminMode) + createFile(selectedDatasetId, selectedFile, folderId) ) ); }); @@ -149,7 +148,7 @@ export function createFile(selectedDatasetId, selectedFile, folderId, adminMode) export const CREATE_FILES = "CREATE_FILES"; -export function createFiles(selectedDatasetId, adminMode, selectedFiles, folderId) { +export function createFiles(selectedDatasetId, selectedFiles, folderId) { return (dispatch) => { let formData = new FormData(); let tmp = []; @@ -163,7 +162,6 @@ export function createFiles(selectedDatasetId, adminMode, selectedFiles, folderI return V2.DatasetsService.saveFilesApiV2DatasetsDatasetIdFilesMultiplePost( selectedDatasetId, formData, - adminMode, folderId ) .then((files) => { @@ -177,7 +175,7 @@ export function createFiles(selectedDatasetId, adminMode, selectedFiles, folderI dispatch( handleErrors( reason, - createFiles(selectedDatasetId, adminMode, selectedFiles, folderId) + createFiles(selectedDatasetId, selectedFiles, folderId) ) ); }); @@ -208,11 +206,11 @@ export function resetFilesCreated() { export const UPDATE_FILE = "UPDATE_FILE"; -export function updateFile(selectedFile, fileId, adminMode) { +export function updateFile(selectedFile, fileId) { return (dispatch) => { const formData = new FormData(); formData["file"] = selectedFile; - return V2.FilesService.updateFileApiV2FilesFileIdPut(fileId, adminMode, formData) + return V2.FilesService.updateFileApiV2FilesFileIdPut(fileId, formData) .then((file) => { dispatch({ type: UPDATE_FILE, @@ -221,7 +219,7 @@ export function updateFile(selectedFile, fileId, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, updateFile(selectedFile, fileId, adminMode))); + dispatch(handleErrors(reason, updateFile(selectedFile, fileId))); }); }; } @@ -241,9 +239,9 @@ export function changeSelectedVersion(fileId, selectedVersion) { export const RECEIVE_VERSIONS = "RECEIVE_VERSIONS"; -export function fetchFileVersions(fileId, adminMode) { +export function fetchFileVersions(fileId) { return (dispatch) => { - return V2.FilesService.getFileVersionsApiV2FilesFileIdVersionsGet(fileId, adminMode) + return V2.FilesService.getFileVersionsApiV2FilesFileIdVersionsGet(fileId) .then((json) => { // sort by decending order const version = json.sort( @@ -256,7 +254,7 @@ export function fetchFileVersions(fileId, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchFileVersions(fileId, adminMode))); + dispatch(handleErrors(reason, fetchFileVersions(fileId))); }); }; } @@ -317,14 +315,12 @@ export const RESET_FILE_PRESIGNED_URL = "RESET_FILE_PRESIGNED_URL"; export function generateFilePresignedUrl( fileId, - adminMode, fileVersionNum = null, expiresInSeconds = 7 * 24 * 3600 ) { return async (dispatch) => { return V2.FilesService.downloadFileUrlApiV2FilesFileIdUrlGet( fileId, - adminMode, fileVersionNum, expiresInSeconds ) @@ -339,7 +335,7 @@ export function generateFilePresignedUrl( dispatch( handleErrors( reason, - generateFilePresignedUrl(fileId, adminMode, fileVersionNum, expiresInSeconds) + generateFilePresignedUrl(fileId, fileVersionNum, expiresInSeconds) ) ); }); @@ -348,12 +344,11 @@ export function generateFilePresignedUrl( export const SUBMIT_FILE_EXTRACTION = "SUBMIT_FILE_EXTRACTION"; -export function submitFileExtractionAction(fileId, extractorName, adminMode, requestBody) { +export function submitFileExtractionAction(fileId, extractorName, requestBody) { return (dispatch) => { return V2.FilesService.postFileExtractApiV2FilesFileIdExtractPost( fileId, extractorName, - adminMode, requestBody ) .then((json) => { @@ -367,7 +362,7 @@ export function submitFileExtractionAction(fileId, extractorName, adminMode, req dispatch( handleErrors( reason, - submitFileExtractionAction(fileId, extractorName, adminMode, requestBody) + submitFileExtractionAction(fileId, extractorName, requestBody) ) ); }); diff --git a/frontend/src/actions/folder.js b/frontend/src/actions/folder.js index 057d95ea3..35ce92d21 100644 --- a/frontend/src/actions/folder.js +++ b/frontend/src/actions/folder.js @@ -1,37 +1,46 @@ -import {V2} from "../openapi"; -import {handleErrors} from "./common"; +import { V2 } from "../openapi"; +import { handleErrors } from "./common"; export const FOLDER_ADDED = "FOLDER_ADDED"; -export function folderAdded(datasetId, adminMode, folderName, parentFolder = null){ + +export function folderAdded(datasetId, folderName, parentFolder = null) { return (dispatch) => { - const folder = {"name": folderName, "parent_folder": parentFolder} - return V2.DatasetsService.addFolderApiV2DatasetsDatasetIdFoldersPost(datasetId, folder, adminMode) - .then(json => { + const folder = { name: folderName, parent_folder: parentFolder }; + return V2.DatasetsService.addFolderApiV2DatasetsDatasetIdFoldersPost( + datasetId, + folder + ) + .then((json) => { dispatch({ type: FOLDER_ADDED, folder: json, receivedAt: Date.now(), }); }) - .catch(reason => { - dispatch(handleErrors(reason, folderAdded(datasetId, adminMode, folderName, parentFolder))); + .catch((reason) => { + dispatch( + handleErrors(reason, folderAdded(datasetId, folderName, parentFolder)) + ); }); }; } export const GET_FOLDER_PATH = "GET_FOLDER_PATH"; -export function fetchFolderPath(folderId){ + +export function fetchFolderPath(folderId) { return (dispatch) => { if (folderId != null) { - return V2.FoldersService.downloadFolderApiV2FoldersFolderIdPathGet(folderId) - .then(json => { + return V2.FoldersService.downloadFolderApiV2FoldersFolderIdPathGet( + folderId + ) + .then((json) => { dispatch({ type: GET_FOLDER_PATH, folderPath: json, receivedAt: Date.now(), }); }) - .catch(reason => { + .catch((reason) => { dispatch(handleErrors(reason, fetchFolderPath(folderId))); }); } else { @@ -45,17 +54,21 @@ export function fetchFolderPath(folderId){ } export const FOLDER_DELETED = "FOLDER_DELETED"; -export function folderDeleted(datasetId, folderId){ + +export function folderDeleted(datasetId, folderId) { return (dispatch) => { - return V2.DatasetsService.deleteFolderApiV2DatasetsDatasetIdFoldersFolderIdDelete(datasetId, folderId) - .then(json => { + return V2.DatasetsService.deleteFolderApiV2DatasetsDatasetIdFoldersFolderIdDelete( + datasetId, + folderId + ) + .then((json) => { dispatch({ type: FOLDER_DELETED, - folder: {"id":folderId}, + folder: { id: folderId }, receivedAt: Date.now(), }); }) - .catch(reason => { + .catch((reason) => { dispatch(handleErrors(reason, folderDeleted(datasetId, folderId))); }); }; diff --git a/frontend/src/actions/group.js b/frontend/src/actions/group.js index 91051e5ae..01070b691 100644 --- a/frontend/src/actions/group.js +++ b/frontend/src/actions/group.js @@ -39,9 +39,9 @@ export function fetchGroups(skip = 0, limit = 21) { export const DELETE_GROUP = "DELETE_GROUP"; -export function deleteGroup(groupId, adminMode) { +export function deleteGroup(groupId) { return (dispatch) => { - return V2.GroupsService.deleteGroupApiV2GroupsGroupIdDelete(groupId, adminMode) + return V2.GroupsService.deleteGroupApiV2GroupsGroupIdDelete(groupId) .then((json) => { dispatch({ type: DELETE_GROUP, @@ -50,7 +50,7 @@ export function deleteGroup(groupId, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, deleteGroup(groupId, adminMode))); + dispatch(handleErrors(reason, deleteGroup(groupId))); }); }; } @@ -79,9 +79,9 @@ export function searchGroups(searchTerm, skip = 0, limit = 21) { export const RECEIVE_GROUP_ABOUT = "RECEIVE_GROUP_ABOUT"; -export function fetchGroupAbout(id, adminMode) { +export function fetchGroupAbout(id) { return (dispatch) => { - return V2.GroupsService.getGroupApiV2GroupsGroupIdGet(id, adminMode) + return V2.GroupsService.getGroupApiV2GroupsGroupIdGet(id) .then((json) => { dispatch({ type: RECEIVE_GROUP_ABOUT, @@ -90,19 +90,18 @@ export function fetchGroupAbout(id, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchGroupAbout(id, adminMode))); + dispatch(handleErrors(reason, fetchGroupAbout(id))); }); }; } export const DELETE_GROUP_MEMBER = "DELETE_GROUP_MEMBER"; -export function deleteGroupMember(groupId, username, adminMode) { +export function deleteGroupMember(groupId, username) { return (dispatch) => { return V2.GroupsService.removeMemberApiV2GroupsGroupIdRemoveUsernamePost( groupId, - username, - adminMode + username ) .then((json) => { dispatch({ @@ -112,19 +111,18 @@ export function deleteGroupMember(groupId, username, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, deleteGroupMember(groupId, username, adminMode))); + dispatch(handleErrors(reason, deleteGroupMember(groupId, username))); }); }; } export const ADD_GROUP_MEMBER = "ADD_GROUP_MEMBER"; -export function addGroupMember(groupId, username, adminMode, role = "viewer") { +export function addGroupMember(groupId, username, role = "viewer") { return (dispatch) => { return V2.GroupsService.addMemberApiV2GroupsGroupIdAddUsernamePost( groupId, username, - adminMode, role ) .then((json) => { @@ -136,7 +134,7 @@ export function addGroupMember(groupId, username, adminMode, role = "viewer") { }) .catch((reason) => { dispatch( - handleErrorsInline(reason, addGroupMember(groupId, username, adminMode, role)) + handleErrorsInline(reason, addGroupMember(groupId, username, role)) ); }); }; @@ -144,13 +142,12 @@ export function addGroupMember(groupId, username, adminMode, role = "viewer") { export const ASSIGN_GROUP_MEMBER_ROLE = "ASSIGN_GROUP_MEMBER_ROLE"; -export function assignGroupMemberRole(groupId, username, role = "viewer", adminMode) { +export function assignGroupMemberRole(groupId, username, role = "viewer") { return (dispatch) => { return V2.GroupsService.updateMemberApiV2GroupsGroupIdUpdateUsernamePut( groupId, username, - role, - adminMode + role ) .then((json) => { dispatch({ @@ -161,7 +158,7 @@ export function assignGroupMemberRole(groupId, username, role = "viewer", adminM }) .catch((reason) => { dispatch( - handleErrors(reason, assignGroupMemberRole(groupId, username, role, adminMode)) + handleErrors(reason, assignGroupMemberRole(groupId, username, role)) ); }); }; @@ -169,13 +166,9 @@ export function assignGroupMemberRole(groupId, username, role = "viewer", adminM export const UPDATE_GROUP = "UPDATE_GROUP"; -export function updateGroup(groupId, adminMode, formData) { +export function updateGroup(groupId, formData) { return (dispatch) => { - return V2.GroupsService.editGroupApiV2GroupsGroupIdPut( - groupId, - adminMode, - formData - ) + return V2.GroupsService.editGroupApiV2GroupsGroupIdPut(groupId, formData) .then((json) => { dispatch({ type: UPDATE_GROUP, @@ -184,7 +177,7 @@ export function updateGroup(groupId, adminMode, formData) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, updateGroup(groupId, adminMode, formData))); + dispatch(handleErrors(reason, updateGroup(groupId, formData))); }); }; } diff --git a/frontend/src/actions/metadata.js b/frontend/src/actions/metadata.js index 1bcf69355..8314de07d 100644 --- a/frontend/src/actions/metadata.js +++ b/frontend/src/actions/metadata.js @@ -94,11 +94,11 @@ export function deleteMetadataDefinition(metadataDefinitionId) { export const SEARCH_METADATA_DEFINITIONS = "SEARCH_METADATA_DEFINITIONS"; export function searchMetadataDefinitions(searchTerm, skip, limit) { - if (searchTerm.trim() === '') { - // Search term is empty. - console.log('Please enter a search term'); - return; - } + if (searchTerm.trim() === "") { + // Search term is empty. + console.log("Please enter a search term"); + return; + } return (dispatch) => { return V2.MetadataService.searchMetadataDefinitionApiV2MetadataDefinitionSearchSearchTermGet( searchTerm, @@ -125,11 +125,10 @@ export function searchMetadataDefinitions(searchTerm, skip, limit) { export const RECEIVE_DATASET_METADATA = "RECEIVE_DATASET_METADATA"; -export function fetchDatasetMetadata(datasetId, adminMode) { +export function fetchDatasetMetadata(datasetId) { return (dispatch) => { return V2.MetadataService.getDatasetMetadataApiV2DatasetsDatasetIdMetadataGet( - datasetId, - adminMode + datasetId ) .then((json) => { dispatch({ @@ -139,18 +138,17 @@ export function fetchDatasetMetadata(datasetId, adminMode) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchDatasetMetadata(datasetId, adminMode))); + dispatch(handleErrors(reason, fetchDatasetMetadata(datasetId))); }); }; } export const RECEIVE_FILE_METADATA = "RECEIVE_FILE_METADATA"; -export function fetchFileMetadata(fileId, adminMode, version) { +export function fetchFileMetadata(fileId, version) { return (dispatch) => { return V2.MetadataService.getFileMetadataApiV2FilesFileIdMetadataGet( fileId, - adminMode, version, false ) @@ -162,18 +160,17 @@ export function fetchFileMetadata(fileId, adminMode, version) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, fetchFileMetadata(fileId, adminMode, version))); + dispatch(handleErrors(reason, fetchFileMetadata(fileId, version))); }); }; } export const POST_DATASET_METADATA = "POST_DATASET_METADATA"; -export function postDatasetMetadata(datasetId, adminMode, metadata) { +export function postDatasetMetadata(datasetId, metadata) { return (dispatch) => { return V2.MetadataService.addDatasetMetadataApiV2DatasetsDatasetIdMetadataPost( datasetId, - adminMode, metadata ) .then((json) => { @@ -185,7 +182,7 @@ export function postDatasetMetadata(datasetId, adminMode, metadata) { }) .catch((reason) => { dispatch( - handleErrors(reason, postDatasetMetadata(datasetId, adminMode, metadata)) + handleErrors(reason, postDatasetMetadata(datasetId, metadata)) ); }); }; @@ -193,11 +190,10 @@ export function postDatasetMetadata(datasetId, adminMode, metadata) { export const POST_FILE_METADATA = "POST_FILE_METADATA"; -export function postFileMetadata(fileId, adminMode, metadata) { +export function postFileMetadata(fileId, metadata) { return (dispatch) => { return V2.MetadataService.addFileMetadataApiV2FilesFileIdMetadataPost( fileId, - adminMode, metadata ) .then((json) => { @@ -208,18 +204,17 @@ export function postFileMetadata(fileId, adminMode, metadata) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, postFileMetadata(fileId, adminMode, metadata))); + dispatch(handleErrors(reason, postFileMetadata(fileId, metadata))); }); }; } export const DELETE_DATASET_METADATA = "DELETE_DATASET_METADATA"; -export function deleteDatasetMetadata(datasetId, adminMode, metadata) { +export function deleteDatasetMetadata(datasetId, metadata) { return (dispatch) => { return V2.MetadataService.deleteDatasetMetadataApiV2DatasetsDatasetIdMetadataDelete( datasetId, - adminMode, metadata ) .then((json) => { @@ -231,7 +226,7 @@ export function deleteDatasetMetadata(datasetId, adminMode, metadata) { }) .catch((reason) => { dispatch( - handleErrors(reason, deleteDatasetMetadata(datasetId, adminMode, metadata)) + handleErrors(reason, deleteDatasetMetadata(datasetId, metadata)) ); }); }; @@ -239,11 +234,10 @@ export function deleteDatasetMetadata(datasetId, adminMode, metadata) { export const DELETE_FILE_METADATA = "DELETE_FILE_METADATA"; -export function deleteFileMetadata(fileId, adminMode, metadata) { +export function deleteFileMetadata(fileId, metadata) { return (dispatch) => { return V2.MetadataService.deleteFileMetadataApiV2FilesFileIdMetadataDelete( fileId, - adminMode, metadata ) .then((json) => { @@ -254,18 +248,17 @@ export function deleteFileMetadata(fileId, adminMode, metadata) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, deleteFileMetadata(fileId, adminMode, metadata))); + dispatch(handleErrors(reason, deleteFileMetadata(fileId, metadata))); }); }; } export const UPDATE_DATASET_METADATA = "UPDATE_DATASET_METADATA"; -export function patchDatasetMetadata(datasetId, adminMode, metadata) { +export function patchDatasetMetadata(datasetId, metadata) { return (dispatch) => { return V2.MetadataService.updateDatasetMetadataApiV2DatasetsDatasetIdMetadataPatch( datasetId, - adminMode, metadata ) .then((json) => { @@ -277,7 +270,7 @@ export function patchDatasetMetadata(datasetId, adminMode, metadata) { }) .catch((reason) => { dispatch( - handleErrors(reason, patchDatasetMetadata(datasetId, adminMode, metadata)) + handleErrors(reason, patchDatasetMetadata(datasetId, metadata)) ); }); }; @@ -285,11 +278,10 @@ export function patchDatasetMetadata(datasetId, adminMode, metadata) { export const UPDATE_FILE_METADATA = "UPDATE_FILE_METADATA"; -export function patchFileMetadata(fileId, adminMode, metadata) { +export function patchFileMetadata(fileId, metadata) { return (dispatch) => { return V2.MetadataService.updateFileMetadataApiV2FilesFileIdMetadataPatch( fileId, - adminMode, metadata ) .then((json) => { @@ -300,7 +292,7 @@ export function patchFileMetadata(fileId, adminMode, metadata) { }); }) .catch((reason) => { - dispatch(handleErrors(reason, patchFileMetadata(fileId, adminMode, metadata))); + dispatch(handleErrors(reason, patchFileMetadata(fileId, metadata))); }); }; } diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index 74c04e779..d28b67021 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -9,12 +9,7 @@ import { a11yProps, TabPanel } from "./tabs/TabComponent"; import DatasetCard from "./datasets/DatasetCard"; import { ArrowBack, ArrowForward } from "@material-ui/icons"; import Layout from "./Layout"; -<<<<<<< HEAD import { Link as RouterLink } from "react-router-dom"; -======= -import {Link as RouterLink, useLocation, useParams} from "react-router-dom"; -import { Listeners } from "./listeners/Listeners"; ->>>>>>> 830-create-superadmin-mode import { ErrorModal } from "./errors/ErrorModal"; const tab = { @@ -27,15 +22,11 @@ const tab = { export const Explore = (): JSX.Element => { // Redux connect equivalent const dispatch = useDispatch(); - const adminMode = useSelector( - (state: RootState) => state.user.adminMode - ); const listDatasets = ( skip: number | undefined, limit: number | undefined, - mine: boolean | undefined, - adminMode: boolean| undefined - ) => dispatch(fetchDatasets(skip, limit, mine, adminMode)); + mine: boolean | undefined + ) => dispatch(fetchDatasets(skip, limit, mine)); const datasets = useSelector((state: RootState) => state.dataset.datasets); // TODO add option to determine limit number; default show 5 datasets each time @@ -49,10 +40,9 @@ export const Explore = (): JSX.Element => { const [selectedTabIndex, setSelectedTabIndex] = useState(0); const [errorOpen, setErrorOpen] = useState(false); - // component did mount useEffect(() => { - listDatasets(0, limit, mine, adminMode); + listDatasets(0, limit, mine); }, []); // fetch thumbnails from each individual dataset/id calls @@ -70,7 +60,6 @@ export const Explore = (): JSX.Element => { setSelectedTabIndex(newTabIndex); }; - // for pagination keep flipping until the return dataset is less than the limit const previous = () => { if (currPageNum - 1 >= 0) { @@ -86,7 +75,7 @@ export const Explore = (): JSX.Element => { }; useEffect(() => { if (skip !== null && skip !== undefined) { - listDatasets(skip, limit, mine, adminMode); + listDatasets(skip, limit, mine); if (skip === 0) setPrevDisabled(true); else setPrevDisabled(false); } diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index ed5add01b..7f301e3f0 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -18,9 +18,9 @@ import ListItemIcon from "@mui/material/ListItemIcon"; import ListItemText from "@mui/material/ListItemText"; import { Link, Menu, MenuItem, MenuList, Typography } from "@mui/material"; import { Link as RouterLink, useLocation } from "react-router-dom"; -import {useDispatch, useSelector} from "react-redux"; +import { useDispatch, useSelector } from "react-redux"; import { RootState } from "../types/data"; -import {AddBox, Explore} from "@material-ui/icons"; +import { AddBox, Explore } from "@material-ui/icons"; import HistoryIcon from "@mui/icons-material/History"; import GroupIcon from "@mui/icons-material/Group"; import Gravatar from "react-gravatar"; @@ -30,8 +30,8 @@ import { getCurrEmail } from "../utils/common"; import VpnKeyIcon from "@mui/icons-material/VpnKey"; import LogoutIcon from "@mui/icons-material/Logout"; import { EmbeddedSearch } from "./search/EmbeddedSearch"; -import {setAdmin, toggleAdminMode} from "../actions/user"; -import {AdminPanelSettings} from "@mui/icons-material"; +import { setAdmin, toggleAdminMode } from "../actions/user"; +import { AdminPanelSettings } from "@mui/icons-material"; const drawerWidth = 240; @@ -106,16 +106,10 @@ export default function PersistentDrawerLeft(props) { const [embeddedSearchHidden, setEmbeddedSearchHidden] = React.useState(false); const [anchorEl, setAnchorEl] = React.useState(null); const isMenuOpen = Boolean(anchorEl); - const admin = useSelector( - (state: RootState) => state.user.admin - ); - const adminMode = useSelector( - (state: RootState) => state.user.adminMode - ); - + const admin = useSelector((state: RootState) => state.user.admin); useEffect(() => { dispatch(setAdmin()); - }, [dispatch]); + }, [dispatch]); const handleAdminMode = () => { dispatch(toggleAdminMode(adminMode)); @@ -233,22 +227,32 @@ export default function PersistentDrawerLeft(props) { User Profile - {admin && !adminMode?
- - - - - - Admin Mode -
: <>} - {admin && adminMode?
- - - - - - Drop Admin Mode -
: <>} + {admin && !adminMode ? ( +
+ + + + + + Admin Mode + +
+ ) : ( + <> + )} + {admin && adminMode ? ( +
+ + + + + + Drop Admin Mode + +
+ ) : ( + <> + )} diff --git a/frontend/src/components/datasets/ChangeDatasetRoleModal.tsx b/frontend/src/components/datasets/ChangeDatasetRoleModal.tsx index a8abfa0bd..7009cd9bc 100644 --- a/frontend/src/components/datasets/ChangeDatasetRoleModal.tsx +++ b/frontend/src/components/datasets/ChangeDatasetRoleModal.tsx @@ -1,33 +1,45 @@ import React, { useEffect, useState } from "react"; -import { Alert, Autocomplete, Button, Collapse, Container, Dialog, DialogActions, DialogContent, DialogTitle, Divider, FormControl, IconButton, InputLabel, MenuItem, Select, TextField, Typography } from "@mui/material"; -import {useParams} from "react-router-dom"; +import { + Alert, + Button, + Collapse, + Container, + Dialog, + DialogActions, + DialogContent, + DialogTitle, + Divider, + FormControl, + IconButton, + InputLabel, + MenuItem, + Select, + Typography, +} from "@mui/material"; +import { useParams } from "react-router-dom"; import { setDatasetUserRole } from "../../actions/dataset"; -import {useDispatch, useSelector} from "react-redux"; +import { useDispatch } from "react-redux"; import CloseIcon from "@mui/icons-material/Close"; -import {RootState} from "../../types/data"; - type ChangeDatasetRoleProps = { - open: boolean, - handleClose: any, - datasetName: string, - currentRole: string, + open: boolean; + handleClose: any; + datasetName: string; + currentRole: string; currentUser: string; -} +}; export default function ChangeDatasetRoleModal(props: ChangeDatasetRoleProps) { const dispatch = useDispatch(); - const { open, handleClose, datasetName, currentRole , currentUser} = props; - const {datasetId} = useParams<{ datasetId?: string }>(); + const { open, handleClose, datasetName, currentRole, currentUser } = props; + const { datasetId } = useParams<{ datasetId?: string }>(); const [email, setEmail] = useState(currentUser); const [role, setRole] = useState(currentRole); const [showSuccessAlert, setShowSuccessAlert] = useState(false); - const adminMode = useSelector( - (state: RootState) => state.user.adminMode - ); - const setUserRole = (datasetId: string, username: string, role: string) => dispatch(setDatasetUserRole(datasetId, username, role, adminMode)); + const setUserRole = (datasetId: string, username: string, role: string) => + dispatch(setDatasetUserRole(datasetId, username, role)); // component did mount useEffect(() => { @@ -43,20 +55,27 @@ export default function ChangeDatasetRoleModal(props: ChangeDatasetRoleProps) { return ( - + }} + > Share dataset '{datasetName}' Change role for user {currentUser} -
+
Status Role + - - : - member.editor !== undefined && member.editor ? - "Editor" : "Member" - } - {/*only owner or editor are allowed to modify roles of the member*/} - - { - editRoleOn ? + + + ) : member.editor !== undefined && member.editor ? ( + "Editor" + ) : ( + "Member" + )} + {/*only owner or editor are allowed to modify roles of the member*/} + + {editRoleOn ? ( - - + + - - + + - : - { - setEditRoleOn(true); - }}> + ) : ( + { + setEditRoleOn(true); + }} + > - } - - - } - {member.user.email == creatorEmail && - + )} + - } - {member.user.email != creatorEmail && + )} + {member.user.email == creatorEmail && ( + + )} + {member.user.email != creatorEmail && ( - {/*only owner or editor are allowed to delete*/} - - { - setSelectMemberUsername(member.user.email) - setDeleteMemberConfirmOpen(true); - }}> - - - - - } + {/*only owner or editor are allowed to delete*/} + + { + setSelectMemberUsername(member.user.email); + setDeleteMemberConfirmOpen(true); + }} + > + + + + + )} - ) + ); } diff --git a/frontend/src/components/listeners/SubmitExtraction.tsx b/frontend/src/components/listeners/SubmitExtraction.tsx index cb55a329a..f027854ec 100644 --- a/frontend/src/components/listeners/SubmitExtraction.tsx +++ b/frontend/src/components/listeners/SubmitExtraction.tsx @@ -1,5 +1,5 @@ -import React, {useState} from "react"; -import {useDispatch, useSelector} from "react-redux"; +import React, { useState } from "react"; +import { useDispatch, useSelector } from "react-redux"; import { Box, Button, @@ -15,44 +15,51 @@ import { Stepper, } from "@mui/material"; -import {ListenerInfo} from "./ListenerInfo"; +import { ListenerInfo } from "./ListenerInfo"; import Form from "@rjsf/material-ui"; -import {FormProps} from "@rjsf/core"; -import {submitFileExtractionAction} from "../../actions/file"; -import {submitDatasetExtractionAction} from "../../actions/dataset"; -import {Extractor, RootState} from "../../types/data"; -import {ClowderRjsfSelectWidget} from "../styledComponents/ClowderRjsfSelectWidget"; -import {ClowderRjsfTextWidget} from "../styledComponents/ClowderRjsfTextWidget"; +import { FormProps } from "@rjsf/core"; +import { submitFileExtractionAction } from "../../actions/file"; +import { submitDatasetExtractionAction } from "../../actions/dataset"; +import { Extractor, RootState } from "../../types/data"; +import { ClowderRjsfSelectWidget } from "../styledComponents/ClowderRjsfSelectWidget"; +import { ClowderRjsfTextWidget } from "../styledComponents/ClowderRjsfTextWidget"; import ExtractorStatus from "./ExtractorStatus"; type SubmitExtractionProps = { - fileId: string, - datasetId: string, - open: boolean, - handleClose: any, - selectedExtractor: Extractor -} + fileId: string; + datasetId: string; + open: boolean; + handleClose: any; + selectedExtractor: Extractor; +}; const widgets = { - TextWidget: ClowderRjsfTextWidget, - SelectWidget: ClowderRjsfSelectWidget - }; + TextWidget: ClowderRjsfTextWidget, + SelectWidget: ClowderRjsfSelectWidget, +}; export default function SubmitExtraction(props: SubmitExtractionProps) { - - const {fileId, datasetId, open, handleClose, selectedExtractor} = props; + const { fileId, datasetId, open, handleClose, selectedExtractor } = props; const dispatch = useDispatch(); - const adminMode = useSelector((state: RootState) => state.user.adminMode); - const submitFileExtraction = - (fileId: string | undefined, extractorName: string | undefined, requestBody: FormData) => dispatch(submitFileExtractionAction(fileId, extractorName, adminMode, requestBody)); - const submitDatasetExtraction = - (datasetId: string | undefined, extractorName: string | undefined, requestBody: FormData) => dispatch(submitDatasetExtractionAction(datasetId, extractorName, requestBody)); + const submitFileExtraction = ( + fileId: string | undefined, + extractorName: string | undefined, + requestBody: FormData + ) => dispatch(submitFileExtractionAction(fileId, extractorName, requestBody)); + const submitDatasetExtraction = ( + datasetId: string | undefined, + extractorName: string | undefined, + requestBody: FormData + ) => + dispatch( + submitDatasetExtractionAction(datasetId, extractorName, requestBody) + ); const job_id = useSelector((state: RootState) => state.listener.currJobId); const onSubmit = (formData: FormData) => { - const extractorName = selectedExtractor.name + const extractorName = selectedExtractor.name; if (fileId === undefined && datasetId !== undefined) { submitDatasetExtraction(datasetId, extractorName, formData); handleNext(); @@ -60,7 +67,7 @@ export default function SubmitExtraction(props: SubmitExtractionProps) { submitFileExtraction(fileId, extractorName, formData); handleNext(); } - } + }; const [activeStep, setActiveStep] = useState(0); const handleNext = () => { @@ -68,76 +75,100 @@ export default function SubmitExtraction(props: SubmitExtractionProps) { }; const handleBack = () => { setActiveStep((prevActiveStep) => prevActiveStep - 1); - } + }; const handleFinish = () => { setActiveStep(0); - } + }; const onClose = () => { handleClose(); setActiveStep(0); - } + }; return ( // TODO replace this with submit extraction content - - - + + + + + {/*step 1 fill in parameters and submit extractions*/} Submit Extractions - { - selectedExtractor && - selectedExtractor["properties"] - && selectedExtractor["properties"]["parameters"] - && selectedExtractor["properties"]["parameters"]["schema"] ? - -
["schema"]}} - onSubmit={({formData}) => { - onSubmit(formData); - }}> - - - -
-
- : - -
{ - onSubmit(formData); - }}> - - - -
-
- } + {selectedExtractor && + selectedExtractor["properties"] && + selectedExtractor["properties"]["parameters"] && + selectedExtractor["properties"]["parameters"]["schema"] ? ( + +
["schema"], + }} + onSubmit={({ formData }) => { + onSubmit(formData); + }} + > + + + +
+
+ ) : ( + +
{ + onSubmit(formData); + }} + > + + + +
+
+ )}
{/*step 2 status*/} Extraction Status - + {/*buttons*/} - - @@ -148,12 +179,16 @@ export default function SubmitExtraction(props: SubmitExtractionProps) { Extracted Results {/*buttons*/} - + <> - - diff --git a/frontend/src/components/metadata/DisplayListenerMetadata.tsx b/frontend/src/components/metadata/DisplayListenerMetadata.tsx index 3791c33f6..e899157f1 100644 --- a/frontend/src/components/metadata/DisplayListenerMetadata.tsx +++ b/frontend/src/components/metadata/DisplayListenerMetadata.tsx @@ -1,40 +1,54 @@ -import React, {useEffect, useState} from "react"; -import {Box, Grid, Typography} from "@mui/material"; -import {metadataConfig} from "../../metadata.config"; -import {useSelector, useDispatch} from "react-redux"; -import {RootState} from "../../types/data"; -import {fetchDatasetMetadata, fetchFileMetadata, fetchMetadataDefinitions} from "../../actions/metadata"; -import {Agent} from "./Agent"; -import {MetadataDeleteButton} from "./widgets/MetadataDeleteButton"; -import {ListenerMetadataEntry} from "../metadata/ListenerMetadataEntry"; +import React, { useEffect } from "react"; +import { Grid } from "@mui/material"; +import { useDispatch, useSelector } from "react-redux"; +import { RootState } from "../../types/data"; +import { + fetchDatasetMetadata, + fetchFileMetadata, + fetchMetadataDefinitions, +} from "../../actions/metadata"; +import { ListenerMetadataEntry } from "../metadata/ListenerMetadataEntry"; import Card from "@mui/material/Card"; import CardContent from "@mui/material/CardContent"; type MetadataType = { - updateMetadata: any, - deleteMetadata: any, - resourceType: string | undefined, - resourceId: string | undefined, - version: number | undefined, -} + updateMetadata: any; + deleteMetadata: any; + resourceType: string | undefined; + resourceId: string | undefined; + version: number | undefined; +}; /* This is the interface displayed already created metadata and allow eidts Uses only the list of metadata */ export const DisplayListenerMetadata = (props: MetadataType) => { - - const {updateMetadata, deleteMetadata, resourceType, resourceId, version} = props; + const { updateMetadata, deleteMetadata, resourceType, resourceId, version } = + props; const dispatch = useDispatch(); - const getMetadatDefinitions = (name: string | null, skip: number, limit: number) => dispatch(fetchMetadataDefinitions(name, skip, limit)); - const metadataDefinitionList = useSelector((state: RootState) => state.metadata.metadataDefinitionList); - const adminMode = useSelector((state : RootState) => state.user.adminMode); - const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId, adminMode)); - const listFileMetadata = (fileId: string | undefined, version: number | undefined) => dispatch(fetchFileMetadata(fileId, adminMode, version)); - const datasetMetadataList = useSelector((state: RootState) => state.metadata.datasetMetadataList); - const fileMetadataList = useSelector((state: RootState) => state.metadata.fileMetadataList); + const getMetadatDefinitions = ( + name: string | null, + skip: number, + limit: number + ) => dispatch(fetchMetadataDefinitions(name, skip, limit)); + const metadataDefinitionList = useSelector( + (state: RootState) => state.metadata.metadataDefinitionList + ); + const listDatasetMetadata = (datasetId: string | undefined) => + dispatch(fetchDatasetMetadata(datasetId)); + const listFileMetadata = ( + fileId: string | undefined, + version: number | undefined + ) => dispatch(fetchFileMetadata(fileId, version)); + const datasetMetadataList = useSelector( + (state: RootState) => state.metadata.datasetMetadataList + ); + const fileMetadataList = useSelector( + (state: RootState) => state.metadata.fileMetadataList + ); useEffect(() => { getMetadatDefinitions(null, 0, 100); @@ -51,32 +65,37 @@ export const DisplayListenerMetadata = (props: MetadataType) => { return ( <> - { - (() => { - let metadataList = []; - if (resourceType === "dataset") metadataList = datasetMetadataList; - else if (resourceType === "file") metadataList = fileMetadataList; - let listenerMetadataList = []; - let listenerMetadataContent = []; + {(() => { + let metadataList = []; + if (resourceType === "dataset") metadataList = datasetMetadataList; + else if (resourceType === "file") metadataList = fileMetadataList; + let listenerMetadataList = []; + let listenerMetadataContent = []; - return ( + return ( + {metadataList.map((metadata, idx) => { - if (metadata.agent.listener !== null) { - return ( - - - - ); - } + if (metadata.agent.listener !== null) { + return ( + + + + + + + + ); + } })} - ); - })() - } + + ); + })()} - ) -} + ); +}; diff --git a/frontend/src/components/metadata/DisplayMetadata.tsx b/frontend/src/components/metadata/DisplayMetadata.tsx index 40cd5a119..32ad33c70 100644 --- a/frontend/src/components/metadata/DisplayMetadata.tsx +++ b/frontend/src/components/metadata/DisplayMetadata.tsx @@ -1,111 +1,130 @@ -import React, {useEffect} from "react"; -import {Box, Grid, Typography} from "@mui/material"; -import {metadataConfig} from "../../metadata.config"; -import {useSelector, useDispatch} from "react-redux"; -import {RootState} from "../../types/data"; -import {fetchDatasetMetadata, fetchFileMetadata, fetchMetadataDefinitions} from "../../actions/metadata"; -import {Agent} from "./Agent"; -import {MetadataDeleteButton} from "./widgets/MetadataDeleteButton"; +import React, { useEffect } from "react"; +import { Box, Grid, Typography } from "@mui/material"; +import { metadataConfig } from "../../metadata.config"; +import { useDispatch, useSelector } from "react-redux"; +import { RootState } from "../../types/data"; +import { + fetchDatasetMetadata, + fetchFileMetadata, + fetchMetadataDefinitions, +} from "../../actions/metadata"; +import { Agent } from "./Agent"; +import { MetadataDeleteButton } from "./widgets/MetadataDeleteButton"; type MetadataType = { - updateMetadata: any, - deleteMetadata: any, - resourceType:string|undefined, - resourceId:string|undefined, -} + updateMetadata: any; + deleteMetadata: any; + resourceType: string | undefined; + resourceId: string | undefined; +}; /* This is the interface displayed already created metadata and allow eidts Uses only the list of metadata */ export const DisplayMetadata = (props: MetadataType) => { - - const {updateMetadata, deleteMetadata, resourceType, resourceId} = props; + const { updateMetadata, deleteMetadata, resourceType, resourceId } = props; const dispatch = useDispatch(); - const getMetadatDefinitions = (name:string|null, skip:number, limit:number) => dispatch(fetchMetadataDefinitions(name, skip,limit)); - const metadataDefinitionList = useSelector((state: RootState) => state.metadata.metadataDefinitionList); - const adminMode = useSelector((state : RootState) => state.user.adminMode); - const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId, adminMode)); - const listFileMetadata = (fileId: string | undefined) => dispatch(fetchFileMetadata(fileId, adminMode)); - const datasetMetadataList = useSelector((state: RootState) => state.metadata.datasetMetadataList); - const fileMetadataList = useSelector((state: RootState) => state.metadata.fileMetadataList); + const getMetadatDefinitions = ( + name: string | null, + skip: number, + limit: number + ) => dispatch(fetchMetadataDefinitions(name, skip, limit)); + const metadataDefinitionList = useSelector( + (state: RootState) => state.metadata.metadataDefinitionList + ); + const listDatasetMetadata = (datasetId: string | undefined) => + dispatch(fetchDatasetMetadata(datasetId)); + const listFileMetadata = (fileId: string | undefined) => + dispatch(fetchFileMetadata(fileId)); + const datasetMetadataList = useSelector( + (state: RootState) => state.metadata.datasetMetadataList + ); + const fileMetadataList = useSelector( + (state: RootState) => state.metadata.fileMetadataList + ); const datasetRole = useSelector( (state: RootState) => state.dataset.datasetRole ); - console.log(updateMetadata, 'updateMetadataDisplay'); + console.log(updateMetadata, "updateMetadataDisplay"); useEffect(() => { getMetadatDefinitions(null, 0, 100); }, []); // complete metadata list with both definition and values useEffect(() => { - if (resourceType === "dataset"){ + if (resourceType === "dataset") { listDatasetMetadata(resourceId); - } - else if (resourceType === "file"){ + } else if (resourceType === "file") { listFileMetadata(resourceId); } }, [resourceType, resourceId]); return ( <> - { - (() => { - let metadataList = []; - if (resourceType === "dataset") metadataList = datasetMetadataList; - else if (resourceType === "file") metadataList = fileMetadataList; + {(() => { + let metadataList = []; + if (resourceType === "dataset") metadataList = datasetMetadataList; + else if (resourceType === "file") metadataList = fileMetadataList; - return metadataDefinitionList.map((metadataDef) => { - return metadataList.map((metadata,idx) => { - if (metadataDef.name === metadata.definition) { - return ( - - {metadata.definition} - {metadata.description} - { - // construct metadata using its definition - metadataDef.fields.map((field,idxx) => { - return React.cloneElement( - metadataConfig[field.widgetType ?? "NA"] ?? metadataConfig["NA"], - { - widgetName: metadataDef.name, - fieldName: field.name, - options: field.config.options ?? [], - updateMetadata: updateMetadata, - initialReadOnly: true, - resourceId: resourceId, - content: metadata.content ?? null, - metadataId: metadata.id ?? null, - isRequired: field.required, - key:idxx, - datasetRole: datasetRole - } - ); - }) - } - - - - {datasetRole.role !== undefined && datasetRole.role !== "viewer" ? - : - <> + return metadataDefinitionList.map((metadataDef) => { + return metadataList.map((metadata, idx) => { + if (metadataDef.name === metadata.definition) { + return ( + + {metadata.definition} + + {metadata.description} + + { + // construct metadata using its definition + metadataDef.fields.map((field, idxx) => { + return React.cloneElement( + metadataConfig[field.widgetType ?? "NA"] ?? + metadataConfig["NA"], + { + widgetName: metadataDef.name, + fieldName: field.name, + options: field.config.options ?? [], + updateMetadata: updateMetadata, + initialReadOnly: true, + resourceId: resourceId, + content: metadata.content ?? null, + metadataId: metadata.id ?? null, + isRequired: field.required, + key: idxx, + datasetRole: datasetRole, } - + ); + }) + } + + + + {datasetRole.role !== undefined && + datasetRole.role !== "viewer" ? ( + + ) : ( + <> + )} - - ); - } - }); - + + + ); + } }); - })() - } + }); + })()} ); }; diff --git a/frontend/src/components/metadata/EditMetadata.tsx b/frontend/src/components/metadata/EditMetadata.tsx index 0f9bea469..f17d576fa 100644 --- a/frontend/src/components/metadata/EditMetadata.tsx +++ b/frontend/src/components/metadata/EditMetadata.tsx @@ -1,136 +1,153 @@ -import React, {useEffect} from "react"; -import {Box, Typography} from "@mui/material"; -import {metadataConfig} from "../../metadata.config"; -import {useSelector, useDispatch} from "react-redux"; -import {RootState} from "../../types/data"; -import {fetchDatasetMetadata, fetchFileMetadata, fetchMetadataDefinitions} from "../../actions/metadata"; -import {Agent} from "./Agent"; +import React, { useEffect } from "react"; +import { Box, Typography } from "@mui/material"; +import { metadataConfig } from "../../metadata.config"; +import { useDispatch, useSelector } from "react-redux"; +import { RootState } from "../../types/data"; +import { + fetchDatasetMetadata, + fetchFileMetadata, + fetchMetadataDefinitions, +} from "../../actions/metadata"; +import { Agent } from "./Agent"; type MetadataType = { - setMetadata: any, - resourceType: string, - resourceId: string|undefined, -} + setMetadata: any; + resourceType: string; + resourceId: string | undefined; +}; /* This is the interface add more metadata on a existing resource Uses metadata definition as well as created metadata */ export const EditMetadata = (props: MetadataType) => { - - const {setMetadata, resourceType, resourceId} = props; + const { setMetadata, resourceType, resourceId } = props; const dispatch = useDispatch(); - const getMetadatDefinitions = (name:string|null, skip:number, limit:number) => dispatch(fetchMetadataDefinitions(name, skip,limit)); - const metadataDefinitionList = useSelector((state: RootState) => state.metadata.metadataDefinitionList); - const adminMode = useSelector((state : RootState) => state.user.adminMode); - const listDatasetMetadata = (datasetId: string | undefined) => dispatch(fetchDatasetMetadata(datasetId, adminMode)); - const listFileMetadata = (fileId: string | undefined) => dispatch(fetchFileMetadata(fileId, adminMode)); - const datasetMetadataList = useSelector((state: RootState) => state.metadata.datasetMetadataList); - const fileMetadataList = useSelector((state: RootState) => state.metadata.fileMetadataList); + const getMetadatDefinitions = ( + name: string | null, + skip: number, + limit: number + ) => dispatch(fetchMetadataDefinitions(name, skip, limit)); + const metadataDefinitionList = useSelector( + (state: RootState) => state.metadata.metadataDefinitionList + ); + const listDatasetMetadata = (datasetId: string | undefined) => + dispatch(fetchDatasetMetadata(datasetId)); + const listFileMetadata = (fileId: string | undefined) => + dispatch(fetchFileMetadata(fileId)); + const datasetMetadataList = useSelector( + (state: RootState) => state.metadata.datasetMetadataList + ); + const fileMetadataList = useSelector( + (state: RootState) => state.metadata.fileMetadataList + ); const datasetRole = useSelector( (state: RootState) => state.dataset.datasetRole ); - useEffect(() => { getMetadatDefinitions(null, 0, 100); }, []); // complete metadata list with both definition and values useEffect(() => { - if (resourceType === "dataset"){ + if (resourceType === "dataset") { listDatasetMetadata(resourceId); - } - else if (resourceType === "file"){ + } else if (resourceType === "file") { listFileMetadata(resourceId); } }, [resourceType, resourceId]); return ( <> - { - (() => { - let metadataList = []; - let metadataNameList = []; - if (resourceType === "dataset"){ - metadataList = datasetMetadataList; - metadataNameList = datasetMetadataList.reduce((list:string[], item) => { + {(() => { + let metadataList = []; + let metadataNameList = []; + if (resourceType === "dataset") { + metadataList = datasetMetadataList; + metadataNameList = datasetMetadataList.reduce( + (list: string[], item) => { return [...list, item.definition]; - }, []); - } - else if (resourceType === "file") { - metadataList = fileMetadataList; - metadataNameList = fileMetadataList.reduce((list:string[], item) => { - return [...list, item.definition]; - }, []); - } + }, + [] + ); + } else if (resourceType === "file") { + metadataList = fileMetadataList; + metadataNameList = fileMetadataList.reduce((list: string[], item) => { + return [...list, item.definition]; + }, []); + } - return metadataDefinitionList.map((metadataDef) => { - // filter and only show those do not already created - if (!metadataNameList.includes(metadataDef.name)) { - return ( - - {metadataDef.name} - {metadataDef.description} - - { - // construct metadata using its definition - metadataDef.fields.map(field => { - return React.cloneElement( - metadataConfig[field.widgetType ?? "NA"] ?? metadataConfig["NA"], - { - widgetName: metadataDef.name, - fieldName: field.name, - options: field.config.options ?? [], - setMetadata: setMetadata, - initialReadOnly: false, - isRequired: field.required, - datasetRole: datasetRole - } - ); - }) - } - - ); - } - else{ - return metadataList.map((metadata, idx) => { - if (metadataDef.name === metadata.definition) { - return ( - - {metadata.definition} - {metadata.description} + return metadataDefinitionList.map((metadataDef) => { + // filter and only show those do not already created + if (!metadataNameList.includes(metadataDef.name)) { + return ( + + {metadataDef.name} + + {metadataDef.description} + + { + // construct metadata using its definition + metadataDef.fields.map((field) => { + return React.cloneElement( + metadataConfig[field.widgetType ?? "NA"] ?? + metadataConfig["NA"], { - // construct metadata using its definition - metadataDef.fields.map((field, idxx) => { - return React.cloneElement( - metadataConfig[field.widgetType ?? "NA"] ?? metadataConfig["NA"], - { - widgetName: metadataDef.name, - fieldName: field.name, - options: field.config.options ?? [], - setMetadata: setMetadata, - initialReadOnly: false, - resourceId: resourceId, - content: metadata.content ?? null, - metadataId: metadata.id ?? null, - isRequired: field.required, - key:idxx - } - ); - }) + widgetName: metadataDef.name, + fieldName: field.name, + options: field.config.options ?? [], + setMetadata: setMetadata, + initialReadOnly: false, + isRequired: field.required, + datasetRole: datasetRole, } - - - ); + ); + }) } - }); - } - }); - })() - } + + ); + } else { + return metadataList.map((metadata, idx) => { + if (metadataDef.name === metadata.definition) { + return ( + + {metadata.definition} + + {metadata.description} + + + { + // construct metadata using its definition + metadataDef.fields.map((field, idxx) => { + return React.cloneElement( + metadataConfig[field.widgetType ?? "NA"] ?? + metadataConfig["NA"], + { + widgetName: metadataDef.name, + fieldName: field.name, + options: field.config.options ?? [], + setMetadata: setMetadata, + initialReadOnly: false, + resourceId: resourceId, + content: metadata.content ?? null, + metadataId: metadata.id ?? null, + isRequired: field.required, + key: idxx, + } + ); + }) + } + + + ); + } + }); + } + }); + })()} ); }; diff --git a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx index 30ecac339..3f0fc73cf 100644 --- a/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/GroupAndRoleTableEntry.tsx @@ -54,15 +54,12 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { ); const [expand, setExpand] = React.useState(false); - const adminMode = useSelector( - (state: RootState) => state.user.adminMode - ); const groupRoleAssigned = ( dataset_id: string | undefined, group_id: string | undefined, role: string | undefined - ) => dispatch(setDatasetGroupRole(dataset_id, group_id, role, adminMode)); + ) => dispatch(setDatasetGroupRole(dataset_id, group_id, role)); const removeGroupRole = async ( dataset_id: string | undefined, @@ -89,7 +86,7 @@ export function GroupAndRoleTableEntry(props: GroupAndRoleTableEntryProps) { }; const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId, adminMode)); + dispatch(fetchDatasetRoles(datasetId)); const handleRoleDelete = async () => { await removeGroupRole(datasetId, group_role.group.id); diff --git a/frontend/src/components/sharing/SharingTab.tsx b/frontend/src/components/sharing/SharingTab.tsx index 11e3bc21c..9367896d5 100644 --- a/frontend/src/components/sharing/SharingTab.tsx +++ b/frontend/src/components/sharing/SharingTab.tsx @@ -1,21 +1,19 @@ import React, { useEffect } from "react"; import Card from "@mui/material/Card"; import { fetchDatasetRoles } from "../../actions/dataset"; -import {useDispatch, useSelector} from "react-redux"; +import { useDispatch } from "react-redux"; import { useParams } from "react-router-dom"; import { UserAndRoleTable } from "./UserAndRoleTable"; import { Box, CardContent } from "@mui/material"; import Typography from "@mui/material/Typography"; -import {RootState} from "../../types/data"; export const SharingTab = (): JSX.Element => { const { datasetId } = useParams<{ datasetId?: string }>(); const dispatch = useDispatch(); - const adminMode = useSelector((state: RootState) => state.user.adminMode) const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId, adminMode)); + dispatch(fetchDatasetRoles(datasetId)); useEffect(() => { getRoles(datasetId); diff --git a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx index 407e81f23..e184e7120 100644 --- a/frontend/src/components/sharing/UserAndRoleTableEntry.tsx +++ b/frontend/src/components/sharing/UserAndRoleTableEntry.tsx @@ -49,23 +49,20 @@ export function UserAndRoleTableEntry(props: UserAndRoleTableEntryProps) { const datasetRole = useSelector( (state: RootState) => state.dataset.datasetRole ); - const adminMode = useSelector( - (state: RootState) => state.user.adminMode - ); const userRoleAssigned = ( dataset_id: string | undefined, username: string | undefined, role: string | undefined - ) => dispatch(setDatasetUserRole(dataset_id, username, role, adminMode)); + ) => dispatch(setDatasetUserRole(dataset_id, username, role)); const removeUserRole = async ( dataset_id: string | undefined, username: string | undefined - ) => dispatch(removeDatasetUserRole(dataset_id, username, adminMode)); + ) => dispatch(removeDatasetUserRole(dataset_id, username)); const getRoles = (datasetId: string | undefined) => - dispatch(fetchDatasetRoles(datasetId, adminMode)); + dispatch(fetchDatasetRoles(datasetId)); const [selectedRole, setSelectedRole] = useState(user_role.role); const [editRoleOn, setEditRoleOn] = useState(false); diff --git a/frontend/src/components/visualizations/Visualization.tsx b/frontend/src/components/visualizations/Visualization.tsx index a78d5348a..8a96fcb74 100644 --- a/frontend/src/components/visualizations/Visualization.tsx +++ b/frontend/src/components/visualizations/Visualization.tsx @@ -28,13 +28,10 @@ export const Visualization = (props: previewProps) => { const visConfig = useSelector( (state: RootState) => state.visualization.visConfig ); - const adminMode = useSelector( - (state: RootState) => state.user.adminMode - ); const dispatch = useDispatch(); const listFileSummary = (fileId: string | undefined) => - dispatch(fetchFileSummary(fileId, adminMode)); + dispatch(fetchFileSummary(fileId)); const getVisConfig = (resourceId: string | undefined) => dispatch(getVisConfigAction(resourceId)); @@ -66,11 +63,13 @@ export const Visualization = (props: previewProps) => { // if raw type supported if ( fileSummary && - ((fileSummary.content_type && fileSummary.content_type.content_type !== undefined && - // @ts-ignore + ((fileSummary.content_type && + fileSummary.content_type.content_type !== undefined && + // @ts-ignore supportedMimeType.includes(fileSummary.content_type.content_type)) || - (fileSummary.content_type && fileSummary.content_type.main_type !== undefined && - // @ts-ignore + (fileSummary.content_type && + fileSummary.content_type.main_type !== undefined && + // @ts-ignore supportedMimeType.includes(fileSummary.content_type.main_type))) ) { setIsRawDataSupported(true); @@ -78,9 +77,12 @@ export const Visualization = (props: previewProps) => { setIsRawDataSupported(false); } - if (fileSummary && - fileSummary.bytes && fileSummary.bytes >= config["rawDataVisualizationThreshold"]) { - setIsVisDataGreaterThanMaxSize(true); + if ( + fileSummary && + fileSummary.bytes && + fileSummary.bytes >= config["rawDataVisualizationThreshold"] + ) { + setIsVisDataGreaterThanMaxSize(true); } else { setIsVisDataGreaterThanMaxSize(false); } @@ -90,7 +92,7 @@ export const Visualization = (props: previewProps) => { return ( - {isEmptyVisData && !isRawDataSupported? ( + {isEmptyVisData && !isRawDataSupported ? (
No visualization data or parameters available. Incomplete visualization configuration. diff --git a/frontend/src/routes.tsx b/frontend/src/routes.tsx index 6b1638dec..2cdd433ef 100644 --- a/frontend/src/routes.tsx +++ b/frontend/src/routes.tsx @@ -43,11 +43,10 @@ const PrivateRoute = (props): JSX.Element => { const loggedOut = useSelector((state: RootState) => state.error.loggedOut); const reason = useSelector((state: RootState) => state.error.reason); - const adminMode = useSelector((state: RootState) => state.user.adminMode); const dismissLogout = () => dispatch(resetLogout()); const listDatasetRole = (datasetId: string | undefined) => - dispatch(fetchDatasetRole(datasetId, adminMode)); + dispatch(fetchDatasetRole(datasetId)); const listFileRole = (fileId: string | undefined) => dispatch(fetchFileRole(fileId)); const { datasetId } = useParams<{ datasetId?: string }>(); From 6a86cacba71edf3512904a3ea68b277d0e65d1e0 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 11 Dec 2023 15:06:16 -0600 Subject: [PATCH 21/43] fix bug --- frontend/src/actions/file.js | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/frontend/src/actions/file.js b/frontend/src/actions/file.js index f3d58510f..14e98e0b6 100644 --- a/frontend/src/actions/file.js +++ b/frontend/src/actions/file.js @@ -119,7 +119,7 @@ export function fileDeleted(fileId) { export const CREATE_FILE = "CREATE_FILE"; -export function createFile(selectedDatasetId, selectedFile, folderId) { +export function createFile(selectedDatasetId, folderId, selectedFile) { return (dispatch) => { const formData = new FormData(); formData["file"] = selectedFile; @@ -139,7 +139,7 @@ export function createFile(selectedDatasetId, selectedFile, folderId) { dispatch( handleErrors( reason, - createFile(selectedDatasetId, selectedFile, folderId) + createFile(selectedDatasetId, folderId, selectedFile) ) ); }); From 84e78a44e0381651e6a2dc4fc7425b8d012f9a33 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 11 Dec 2023 15:06:43 -0600 Subject: [PATCH 22/43] black --- backend/app/deps/authorization_deps.py | 84 ++++---- backend/app/keycloak_auth.py | 66 +++--- backend/app/routers/authorization.py | 150 ++++++------- backend/app/routers/datasets.py | 257 ++++++++++++----------- backend/app/routers/elasticsearch.py | 11 +- backend/app/routers/files.py | 174 +++++++-------- backend/app/routers/groups.py | 70 +++--- backend/app/routers/metadata.py | 52 ++--- backend/app/routers/metadata_datasets.py | 86 ++++---- backend/app/routers/metadata_files.py | 136 ++++++------ 10 files changed, 544 insertions(+), 542 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index 5924ed4ae..501e03c24 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -13,8 +13,8 @@ async def get_role( - dataset_id: str, - current_user=Depends(get_current_username), + dataset_id: str, + current_user=Depends(get_current_username), ) -> RoleType: """Returns the role a specific user has on a dataset. If the user is a creator (owner), they are not listed in the user_ids list.""" @@ -29,8 +29,8 @@ async def get_role( async def get_role_by_file( - file_id: str, - current_user=Depends(get_current_username), + file_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: authorization = await AuthorizationDB.find_one( @@ -42,7 +42,7 @@ async def get_role_by_file( ) if authorization is None: if ( - dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) ) is not None: if dataset.status == DatasetStatus.AUTHENTICATED.name: auth_dict = { @@ -63,8 +63,8 @@ async def get_role_by_file( async def get_role_by_metadata( - metadata_id: str, - current_user=Depends(get_current_username), + metadata_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None: resource_type = md_out.resource.collection @@ -81,7 +81,7 @@ async def get_role_by_metadata( return authorization.role elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -94,8 +94,8 @@ async def get_role_by_metadata( async def get_role_by_group( - group_id: str, - current_user=Depends(get_current_username), + group_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (group := await GroupDB.get(group_id)) is not None: if group.creator == current_user: @@ -115,7 +115,7 @@ async def get_role_by_group( async def is_public_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is public.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -126,7 +126,7 @@ async def is_public_dataset( async def is_authenticated_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is authenticated.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -144,11 +144,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here. @@ -174,11 +174,11 @@ async def __call__( ) else: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if ( - current_dataset.status == DatasetStatus.AUTHENTICATED.name - and self.role == "viewer" + current_dataset.status == DatasetStatus.AUTHENTICATED.name + and self.role == "viewer" ): return True else: @@ -201,11 +201,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -239,11 +239,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -255,7 +255,7 @@ async def __call__( resource_id = md_out.resource.resource_id if resource_type == "files": if ( - file := await FileDB.get(PydanticObjectId(resource_id)) + file := await FileDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == file.dataset_id, @@ -277,7 +277,7 @@ async def __call__( ) elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -306,11 +306,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -342,8 +342,8 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - dataset_id: str, + self, + dataset_id: str, ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if dataset.status == self.status: @@ -362,13 +362,13 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - file_id: str, + self, + file_id: str, ): if (file_out := await FileDB.get(PydanticObjectId(file_id))) is not None: dataset_id = file_out.dataset_id if ( - dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if dataset.status == self.status: return True diff --git a/backend/app/keycloak_auth.py b/backend/app/keycloak_auth.py index 3e252deca..24d2e5a54 100644 --- a/backend/app/keycloak_auth.py +++ b/backend/app/keycloak_auth.py @@ -57,8 +57,8 @@ async def get_idp_public_key(): async def get_token( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), ) -> Json: """Decode token. Use to secure endpoints.""" if token: @@ -94,17 +94,17 @@ async def get_token( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: return {"preferred_username": payload["user"]} elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: current_time = datetime.utcnow() if key.expires is not None and current_time >= key.expires: @@ -143,9 +143,9 @@ async def get_user(identity: Json = Depends(get_token)): async def get_current_user( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), - token_cookie: str = Security(jwt_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), + token_cookie: str = Security(jwt_header), ) -> UserOut: """Retrieve the user object from Mongo by first getting user id from JWT and then querying Mongo. Potentially expensive. Use `get_current_username` if all you need is user name. @@ -181,18 +181,18 @@ async def get_current_user( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - key := await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + key := await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: user = await UserDB.find_one(UserDB.email == key.user) return UserOut(**user.dict()) elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: current_time = datetime.utcnow() @@ -228,16 +228,16 @@ async def get_current_user( async def get_admin_mode( - admin_mode: bool = Security(admin_mode_header), + admin_mode: bool = Security(admin_mode_header), ) -> bool: """Get Admin mode from Header.""" return admin_mode async def get_current_username( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), - token_cookie: str = Security(jwt_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), + token_cookie: str = Security(jwt_header), ) -> str: """Retrieve the user id from the JWT token. Does not query MongoDB.""" if token: @@ -270,18 +270,18 @@ async def get_current_username( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - key := await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + key := await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: # Key is coming from a listener job return key.user elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: # Key is coming from a user request current_time = datetime.utcnow() diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 3bb5f7329..e30e23178 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -38,11 +38,11 @@ @router.post("/datasets/{dataset_id}", response_model=AuthorizationOut) async def save_authorization( - dataset_id: str, - authorization_in: AuthorizationBase, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_username), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + authorization_in: AuthorizationBase, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_username), + allow: bool = Depends(Authorization("editor")), ): """Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id.""" @@ -69,10 +69,10 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationOut) async def get_dataset_role( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - admin=Depends(get_admin), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization @@ -90,7 +90,7 @@ async def get_dataset_role( ) if auth_db is None: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if current_dataset.status == DatasetStatus.AUTHENTICATED.name: public_authorization_in = { @@ -112,9 +112,9 @@ async def get_dataset_role( @router.get("/datasets/{dataset_id}/role/viewer}") async def get_dataset_role_viewer( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -123,9 +123,9 @@ async def get_dataset_role_viewer( @router.get("/datasets/{dataset_id}/role/owner}") async def get_dataset_role_owner( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("owner")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("owner")), ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -134,11 +134,11 @@ async def get_dataset_role_owner( @router.get("/files/{file_id}/role}", response_model=RoleType) async def get_file_role( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_file), - admin=Depends(get_admin), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_file), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -149,11 +149,11 @@ async def get_file_role( @router.get("/metadata/{metadata_id}/role}", response_model=AuthorizationMetadata) async def get_metadata_role( - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_metadata), - admin=Depends(get_admin), + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_metadata), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -164,11 +164,11 @@ async def get_metadata_role( @router.get("/groups/{group_id}/role}", response_model=RoleType) async def get_group_role( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_group), - admin=Depends(get_admin), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_group), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -182,13 +182,13 @@ async def get_group_role( response_model=AuthorizationOut, ) async def set_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - role: RoleType, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + role: RoleType, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign an entire group a specific role for a dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: @@ -198,10 +198,10 @@ async def set_dataset_group_role( dataset_id, group_id, admin_mode, es, user_id, allow ) if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.role == role, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.role == role, + ) ) is not None: if group_id not in auth_db.group_ids: auth_db.group_ids.append(group_id) @@ -236,13 +236,13 @@ async def set_dataset_group_role( response_model=AuthorizationOut, ) async def set_dataset_user_role( - dataset_id: str, - username: str, - role: RoleType, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + role: RoleType, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign a single user a specific role for a dataset.""" @@ -296,22 +296,22 @@ async def set_dataset_user_role( response_model=AuthorizationOut, ) async def remove_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the group has with a specific dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: if (group := await GroupDB.get(group_id)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == dataset_id, - AuthorizationDB.group_ids == group_id, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == dataset_id, + AuthorizationDB.group_ids == group_id, + ) ) is not None: auth_db.group_ids.remove(PyObjectId(group_id)) for u in group.users: @@ -332,22 +332,22 @@ async def remove_dataset_group_role( response_model=AuthorizationOut, ) async def remove_dataset_user_role( - dataset_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the user has with a specific dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (await UserDB.find_one(UserDB.email == username)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.user_ids == username, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.user_ids == username, + ) ) is not None: auth_db.user_ids.remove(username) await auth_db.save() @@ -362,16 +362,16 @@ async def remove_dataset_user_role( @router.get("/datasets/{dataset_id}/roles}", response_model=DatasetRoles) async def get_dataset_roles( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: roles = DatasetRoles(dataset_id=str(dataset.id)) async for auth in AuthorizationDB.find( - AuthorizationDB.dataset_id == ObjectId(dataset_id) + AuthorizationDB.dataset_id == ObjectId(dataset_id) ): # First, fetch all groups that have a role on the dataset group_user_counts = {} @@ -389,8 +389,8 @@ async def get_dataset_roles( # Next, get all users but omit those that are included in a group above async for user in UserDB.find(In(UserDB.email, auth.user_ids)): if ( - user.email in group_user_counts - and auth.user_ids.count(user.email) == group_user_counts[user.email] + user.email in group_user_counts + and auth.user_ids.count(user.email) == group_user_counts[user.email] ): continue # TODO: Why is this necessary here but not on root-level ObjectIDs? diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index e3c883b9a..a623bd916 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -35,7 +35,8 @@ from app.keycloak_auth import ( get_token, get_user, - get_current_user, get_admin_mode, + get_current_user, + get_admin_mode, ) from app.models.authorization import AuthorizationDB, RoleType from app.models.datasets import ( @@ -133,12 +134,12 @@ def nested_update(target_dict, update_dict): async def _create_folder_structure( - dataset_id: str, - contents: dict, - folder_path: str, - folder_lookup: dict, - user: UserOut, - parent_folder_id: Optional[str] = None, + dataset_id: str, + contents: dict, + folder_path: str, + folder_lookup: dict, + user: UserOut, + parent_folder_id: Optional[str] = None, ): """Recursively create folders encountered in folder_path until the target folder is created. Arguments: @@ -173,8 +174,8 @@ async def _create_folder_structure( async def _get_folder_hierarchy( - folder_id: str, - hierarchy: str, + folder_id: str, + hierarchy: str, ): """Generate a string of nested path to folder for use in zip file creation.""" folder = await FolderDB.get(PydanticObjectId(folder_id)) @@ -186,9 +187,9 @@ async def _get_folder_hierarchy( @router.post("", response_model=DatasetOut) async def save_dataset( - dataset_in: DatasetIn, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + dataset_in: DatasetIn, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): dataset = DatasetDB(**dataset_in.dict(), creator=user) await dataset.insert() @@ -207,12 +208,12 @@ async def save_dataset( @router.get("", response_model=List[DatasetOut]) async def get_datasets( - admin_mode: bool = Depends(get_admin_mode), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - mine: bool = False, - admin=Depends(get_admin), + admin_mode: bool = Depends(get_admin_mode), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + mine: bool = False, + admin=Depends(get_admin), ): if admin_mode and admin: datasets = await DatasetDBViewList.find( @@ -244,10 +245,10 @@ async def get_datasets( @router.get("/{dataset_id}", response_model=DatasetOut) async def get_dataset( - dataset_id: str, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: return dataset.dict() @@ -256,14 +257,14 @@ async def get_dataset( @router.get("/{dataset_id}/files", response_model=List[FileOut]) async def get_dataset_files( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - folder_id: Optional[str] = None, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + folder_id: Optional[str] = None, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if authenticated: query = [ @@ -285,12 +286,12 @@ async def get_dataset_files( @router.put("/{dataset_id}", response_model=DatasetOut) async def edit_dataset( - dataset_id: str, - dataset_info: DatasetBase, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetBase, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Refactor this with permissions checks etc. @@ -306,12 +307,12 @@ async def edit_dataset( @router.patch("/{dataset_id}", response_model=DatasetOut) async def patch_dataset( - dataset_id: str, - dataset_info: DatasetPatch, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetPatch, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Update method not working properly @@ -331,11 +332,11 @@ async def patch_dataset( @router.delete("/{dataset_id}") async def delete_dataset( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # delete from elasticsearch @@ -346,7 +347,7 @@ async def delete_dataset( MetadataDB.resource.resource_id == PydanticObjectId(dataset_id) ).delete() async for file in FileDB.find( - FileDB.dataset_id == PydanticObjectId(dataset_id) + FileDB.dataset_id == PydanticObjectId(dataset_id) ): await remove_file_entry(file.id, fs, es) await FolderDB.find( @@ -361,11 +362,11 @@ async def delete_dataset( @router.post("/{dataset_id}/folders", response_model=FolderOut) async def add_folder( - dataset_id: str, - folder_in: FolderIn, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_in: FolderIn, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: parent_folder = folder_in.parent_folder @@ -384,14 +385,14 @@ async def add_folder( @router.get("/{dataset_id}/folders", response_model=List[FolderOut]) async def get_dataset_folders( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - parent_folder: Optional[str] = None, - user_id=Depends(get_user), - authenticated: bool = Depends(CheckStatus("authenticated")), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + parent_folder: Optional[str] = None, + user_id=Depends(get_user), + authenticated: bool = Depends(CheckStatus("authenticated")), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if (await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if authenticated: @@ -417,12 +418,12 @@ async def get_dataset_folders( @router.delete("/{dataset_id}/folders/{folder_id}") async def delete_folder( - dataset_id: str, - folder_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + folder_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (folder := await FolderDB.get(PydanticObjectId(folder_id))) is not None: @@ -433,14 +434,14 @@ async def delete_folder( # recursively delete child folder and files async def _delete_nested_folders(parent_folder_id): while ( - await FolderDB.find_one( - FolderDB.dataset_id == ObjectId(dataset_id), - FolderDB.parent_folder == ObjectId(parent_folder_id), - ) + await FolderDB.find_one( + FolderDB.dataset_id == ObjectId(dataset_id), + FolderDB.parent_folder == ObjectId(parent_folder_id), + ) ) is not None: async for subfolder in FolderDB.find( - FolderDB.dataset_id == PydanticObjectId(dataset_id), - FolderDB.parent_folder == PydanticObjectId(parent_folder_id), + FolderDB.dataset_id == PydanticObjectId(dataset_id), + FolderDB.parent_folder == PydanticObjectId(parent_folder_id), ): async for file in FileDB.find(FileDB.folder_id == subfolder.id): await remove_file_entry(file.id, fs, es) @@ -461,15 +462,15 @@ async def _delete_nested_folders(parent_folder_id): @router.post("/{dataset_id}/files", response_model=FileOut) async def save_file( - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - admin_mode: bool = Depends(get_admin_mode), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + admin_mode: bool = Depends(get_admin_mode), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -502,15 +503,15 @@ async def save_file( @router.post("/{dataset_id}/filesMultiple", response_model=List[FileOut]) async def save_files( - dataset_id: str, - files: List[UploadFile], - admin_mode: bool = Depends(get_admin_mode), - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + files: List[UploadFile], + admin_mode: bool = Depends(get_admin_mode), + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: files_added = [] @@ -525,7 +526,7 @@ async def save_files( if folder_id is not None: if ( - folder := await FolderDB.get(PydanticObjectId(folder_id)) + folder := await FolderDB.get(PydanticObjectId(folder_id)) ) is not None: new_file.folder_id = folder.id else: @@ -551,13 +552,13 @@ async def save_files( @router.post("/{dataset_id}/local_files", response_model=FileOut) async def save_local_file( - localfile_in: LocalFileIn, - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + localfile_in: LocalFileIn, + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -606,12 +607,12 @@ async def save_local_file( @router.post("/createFromZip", response_model=DatasetOut) async def create_dataset_from_zip( - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - token: str = Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + token: str = Depends(get_token), ): if file.filename.endswith(".zip") == False: raise HTTPException(status_code=404, detail=f"File is not a zip file") @@ -679,11 +680,11 @@ async def create_dataset_from_zip( @router.get("/{dataset_id}/download", response_model=DatasetOut) async def download_dataset( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload") @@ -838,15 +839,15 @@ async def download_dataset( # can handle parameeters pass in as key/values in info @router.post("/{dataset_id}/extract") async def get_dataset_extract( - dataset_id: str, - extractorName: str, - request: Request, - admin_mode: bool = Depends(get_admin_mode), - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + extractorName: str, + request: Request, + admin_mode: bool = Depends(get_admin_mode), + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -866,10 +867,10 @@ async def get_dataset_extract( @router.get("/{dataset_id}/thumbnail") async def download_dataset_thumbnail( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): # If dataset exists in MongoDB, download from Minio if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -894,14 +895,14 @@ async def download_dataset_thumbnail( @router.patch("/{dataset_id}/thumbnail/{thumbnail_id}", response_model=DatasetOut) async def add_dataset_thumbnail( - dataset_id: str, - thumbnail_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + thumbnail_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? dataset.thumbnail_id = thumbnail_id diff --git a/backend/app/routers/elasticsearch.py b/backend/app/routers/elasticsearch.py index 1b38b7a10..e1ae5d28f 100644 --- a/backend/app/routers/elasticsearch.py +++ b/backend/app/routers/elasticsearch.py @@ -12,9 +12,10 @@ def _add_permissions_clause( - query, username: str, - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin) + query, + username: str, + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): """Append filter to Elasticsearch object that restricts permissions based on the requesting user.""" # TODO: Add public filter once added @@ -54,8 +55,8 @@ async def search(index_name: str, query: str, username=Depends(get_current_usern @router.post("/all/_msearch") async def msearch( - request: Request, - username=Depends(get_current_username), + request: Request, + username=Depends(get_current_username), ): es = await connect_elasticsearch() query = await request.body() diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 5f3b11053..4e967ed87 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -44,10 +44,10 @@ async def _resubmit_file_extractors( - file: FileOut, - rabbitmq_client: BlockingChannel, - user: UserOut, - credentials: HTTPAuthorizationCredentials = Security(security), + file: FileOut, + rabbitmq_client: BlockingChannel, + user: UserOut, + credentials: HTTPAuthorizationCredentials = Security(security), ): """This helper method will check metadata. We get the extractors run from metadata from extractors. Then they are resubmitted. At present parameters are not stored. This will change once Jobs are @@ -61,8 +61,8 @@ async def _resubmit_file_extractors( """ resubmitted_jobs = [] async for job in EventListenerJobDB.find( - EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), - EventListenerJobDB.resource_ref.version == file.version_num - 1, + EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), + EventListenerJobDB.resource_ref.version == file.version_num - 1, ): resubmitted_job = {"listener_id": job.listener_id, "parameters": job.parameters} try: @@ -86,13 +86,13 @@ async def _resubmit_file_extractors( # TODO: Move this to MongoDB middle layer async def add_file_entry( - new_file: FileDB, - user: UserOut, - fs: Minio, - es: Elasticsearch, - rabbitmq_client: BlockingChannel, - file: Optional[io.BytesIO] = None, - content_type: Optional[str] = None, + new_file: FileDB, + user: UserOut, + fs: Minio, + es: Elasticsearch, + rabbitmq_client: BlockingChannel, + file: Optional[io.BytesIO] = None, + content_type: Optional[str] = None, ): """Insert FileDB object into MongoDB (makes Clowder ID), then Minio (makes version ID), then update MongoDB with the version ID from Minio. @@ -151,11 +151,11 @@ async def add_file_entry( async def add_local_file_entry( - new_file: FileDB, - user: UserOut, - es: Elasticsearch, - rabbitmq_client: BlockingChannel, - content_type: Optional[str] = None, + new_file: FileDB, + user: UserOut, + es: Elasticsearch, + rabbitmq_client: BlockingChannel, + content_type: Optional[str] = None, ): """Insert FileDB object into MongoDB (makes Clowder ID). Bytes are not stored in DB and versioning not supported for local files.""" @@ -181,7 +181,7 @@ async def add_local_file_entry( # TODO: Move this to MongoDB middle layer async def remove_file_entry( - file_id: Union[str, ObjectId], fs: Minio, es: Elasticsearch + file_id: Union[str, ObjectId], fs: Minio, es: Elasticsearch ): """Remove FileDB object into MongoDB, Minio, and associated metadata and version information.""" # TODO: Deleting individual versions will require updating version_id in mongo, or deleting entire document @@ -208,16 +208,16 @@ async def remove_local_file_entry(file_id: Union[str, ObjectId], es: Elasticsear @router.put("/{file_id}", response_model=FileOut) async def update_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - token=Depends(get_token), - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("uploader")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + token=Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("uploader")), ): # Check all connection and abort if any one of them is not available if fs is None or es is None: @@ -226,8 +226,8 @@ async def update_file( if (updated_file := await FileDB.get(PydanticObjectId(file_id))) is not None: if ( - file.filename != updated_file.name - or file.content_type != updated_file.content_type.content_type + file.filename != updated_file.name + or file.content_type != updated_file.content_type.content_type ): raise HTTPException( status_code=400, @@ -299,12 +299,12 @@ async def update_file( @router.get("/{file_id}") async def download_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - increment: Optional[bool] = True, - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + increment: Optional[bool] = True, + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -334,7 +334,7 @@ async def download_file( content.stream(settings.MINIO_UPLOAD_CHUNK_SIZE) ) response.headers["Content-Disposition"] = ( - "attachment; filename=%s" % file.name + "attachment; filename=%s" % file.name ) elif file.storage_type == StorageType.LOCAL: @@ -361,12 +361,12 @@ async def download_file( @router.get("/{file_id}/url/") async def download_file_url( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - expires_in_seconds: Optional[int] = 3600, - external_fs: Minio = Depends(dependencies.get_external_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + expires_in_seconds: Optional[int] = 3600, + external_fs: Minio = Depends(dependencies.get_external_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -414,11 +414,11 @@ async def download_file_url( @router.delete("/{file_id}") async def delete_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: if file.storage_type == StorageType.LOCAL: @@ -432,9 +432,9 @@ async def delete_file( @router.get("/{file_id}/summary", response_model=FileOut) async def get_file_summary( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: # TODO: Incrementing too often (3x per page view) @@ -447,10 +447,10 @@ async def get_file_summary( @router.get("/{file_id}/version_details", response_model=FileOut) async def get_file_version_details( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version_num: Optional[int] = 0, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version_num: Optional[int] = 0, + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: # TODO: Incrementing too often (3x per page view) @@ -470,17 +470,17 @@ async def get_file_version_details( @router.get("/{file_id}/versions", response_model=List[FileVersion]) async def get_file_versions( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - skip: int = 0, - limit: int = 20, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + skip: int = 0, + limit: int = 20, + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: mongo_versions = [] if file.storage_type == StorageType.MINIO: async for ver in FileVersionDB.find( - FileVersionDB.file_id == ObjectId(file_id) + FileVersionDB.file_id == ObjectId(file_id) ).sort(-FileVersionDB.created).skip(skip).limit(limit): mongo_versions.append(FileVersion(**ver.dict())) return mongo_versions @@ -491,15 +491,15 @@ async def get_file_versions( # submits file to extractor @router.post("/{file_id}/extract") async def post_file_extract( - file_id: str, - extractorName: str, - admin_mode: bool = Depends(get_admin_mode), - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("uploader")), + file_id: str, + extractorName: str, + admin_mode: bool = Depends(get_admin_mode), + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -524,12 +524,12 @@ async def post_file_extract( @router.post("/{file_id}/resubmit_extract") async def resubmit_file_extractions( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("editor")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("editor")), ): """This route will check metadata. We get the extractors run from metadata from extractors. Then they are resubmitted. At present parameters are not stored. This will change once Jobs are @@ -551,10 +551,10 @@ async def resubmit_file_extractions( @router.get("/{file_id}/thumbnail") async def download_file_thumbnail( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -576,15 +576,15 @@ async def download_file_thumbnail( @router.patch("/{file_id}/thumbnail/{thumbnail_id}", response_model=FileOut) async def add_file_thumbnail( - file_id: str, - thumbnail_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(FileAuthorization("editor")), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + file_id: str, + thumbnail_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(FileAuthorization("editor")), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? file.thumbnail_id = thumbnail_id diff --git a/backend/app/routers/groups.py b/backend/app/routers/groups.py index 0032e915b..60e407ffa 100644 --- a/backend/app/routers/groups.py +++ b/backend/app/routers/groups.py @@ -17,8 +17,8 @@ @router.post("", response_model=GroupOut) async def save_group( - group_in: GroupIn, - user=Depends(get_current_user), + group_in: GroupIn, + user=Depends(get_current_user), ): group_db = GroupDB(**group_in.dict(), creator=user.email) user_member = Member(user=user, editor=True) @@ -30,9 +30,9 @@ async def save_group( @router.get("", response_model=List[GroupOut]) async def get_groups( - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, ): """Get a list of all Groups in the db the user is a member/owner of. @@ -56,10 +56,10 @@ async def get_groups( @router.get("/search/{search_term}", response_model=List[GroupOut]) async def search_group( - search_term: str, - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + search_term: str, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, ): """Search all groups in the db based on text. @@ -85,9 +85,9 @@ async def search_group( @router.get("/{group_id}", response_model=GroupOut) async def get_group( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("viewer")), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("viewer")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: return group.dict() @@ -96,11 +96,11 @@ async def get_group( @router.put("/{group_id}", response_model=GroupOut) async def edit_group( - group_id: str, - group_info: GroupBase, - admin_mode: bool = Depends(get_admin_mode), - user_id=Depends(get_user), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + group_info: GroupBase, + admin_mode: bool = Depends(get_admin_mode), + user_id=Depends(get_user), + allow: bool = Depends(GroupAuthorization("editor")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: group_dict = dict(group_info) if group_info is not None else {} @@ -123,7 +123,7 @@ async def edit_group( if original_user not in groups_users: # remove them from auth async for auth in AuthorizationDB.find( - {"group_ids": ObjectId(group_id)} + {"group_ids": ObjectId(group_id)} ): auth.user_ids.remove(original_user.user.email) await auth.replace() @@ -167,9 +167,9 @@ async def edit_group( @router.delete("/{group_id}", response_model=GroupOut) async def delete_group( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("owner")), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("owner")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: await group.delete() @@ -180,11 +180,11 @@ async def delete_group( @router.post("/{group_id}/add/{username}", response_model=GroupOut) async def add_member( - group_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - role: Optional[str] = None, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + role: Optional[str] = None, + allow: bool = Depends(GroupAuthorization("editor")), ): """Add a new user to a group.""" if (user := await UserDB.find_one(UserDB.email == username)) is not None: @@ -218,10 +218,10 @@ async def add_member( @router.post("/{group_id}/remove/{username}", response_model=GroupOut) async def remove_member( - group_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("editor")), ): """Remove a user from a group.""" @@ -251,11 +251,11 @@ async def remove_member( @router.put("/{group_id}/update/{username}", response_model=GroupOut) async def update_member( - group_id: str, - username: str, - role: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + role: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("editor")), ): """Update user role.""" if (user := await UserDB.find_one({"email": username})) is not None: diff --git a/backend/app/routers/metadata.py b/backend/app/routers/metadata.py index a0d5c8958..377c72136 100644 --- a/backend/app/routers/metadata.py +++ b/backend/app/routers/metadata.py @@ -29,8 +29,8 @@ @router.post("/definition", response_model=MetadataDefinitionOut) async def save_metadata_definition( - definition_in: MetadataDefinitionIn, - user=Depends(get_current_user), + definition_in: MetadataDefinitionIn, + user=Depends(get_current_user), ): existing = await MetadataDefinitionDB.find_one( MetadataDefinitionDB.name == definition_in.name @@ -48,10 +48,10 @@ async def save_metadata_definition( @router.get("/definition", response_model=List[MetadataDefinitionOut]) async def get_metadata_definition_list( - name: Optional[str] = None, - user=Depends(get_current_user), - skip: int = 0, - limit: int = 2, + name: Optional[str] = None, + user=Depends(get_current_user), + skip: int = 0, + limit: int = 2, ): if name is None: defs = await MetadataDefinitionDB.find( @@ -71,11 +71,11 @@ async def get_metadata_definition_list( "/definition/{metadata_definition_id}", response_model=MetadataDefinitionOut ) async def get_metadata_definition( - metadata_definition_id: str, - user=Depends(get_current_user), + metadata_definition_id: str, + user=Depends(get_current_user), ): if ( - mdd := await MetadataDefinitionDB.get(PydanticObjectId(metadata_definition_id)) + mdd := await MetadataDefinitionDB.get(PydanticObjectId(metadata_definition_id)) ) is not None: return mdd.dict() raise HTTPException( @@ -88,8 +88,8 @@ async def get_metadata_definition( "/definition/{metadata_definition_id}", response_model=MetadataDefinitionOut ) async def delete_metadata_definition( - metadata_definition_id: str, - user=Depends(get_current_user), + metadata_definition_id: str, + user=Depends(get_current_user), ): """Delete metadata definition by specific ID.""" mdd = await MetadataDefinitionDB.find_one( @@ -105,7 +105,7 @@ async def delete_metadata_definition( raise HTTPException( status_code=400, detail=f"Metadata definition: {mdd.name} ({metadata_definition_id}) in use. " - f"You cannot delete it until all metadata records using it are deleted.", + f"You cannot delete it until all metadata records using it are deleted.", ) # TODO: Refactor this with permissions checks etc. @@ -122,10 +122,10 @@ async def delete_metadata_definition( "/definition/search/{search_term}", response_model=List[MetadataDefinitionOut] ) async def search_metadata_definition( - search_term: str, - skip: int = 0, - limit: int = 10, - user=Depends(get_current_user), + search_term: str, + skip: int = 0, + limit: int = 10, + user=Depends(get_current_user), ): """Search all metadata definition in the db based on text. @@ -151,12 +151,12 @@ async def search_metadata_definition( @router.patch("/{metadata_id}", response_model=MetadataOut) async def update_metadata( - metadata_in: MetadataPatch, - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_in: MetadataPatch, + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -174,10 +174,10 @@ async def update_metadata( @router.delete("/{metadata_id}") async def delete_metadata( - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Delete metadata by specific ID.""" md = await MetadataDB.find_one(MetadataDB.id == PyObjectId(metadata_id)) diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index 8987c7bc4..ad4693143 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -34,10 +34,10 @@ async def _build_metadata_db_obj( - metadata_in: MetadataIn, - dataset: DatasetOut, - user: UserOut, - agent: MetadataAgent = None, + metadata_in: MetadataIn, + dataset: DatasetOut, + user: UserOut, + agent: MetadataAgent = None, ): """Convenience function for converting MetadataIn to MetadataDB object.""" content = await validate_context( @@ -69,12 +69,12 @@ async def _build_metadata_db_obj( @router.post("/{dataset_id}/metadata", response_model=MetadataOut) async def add_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("uploader")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("uploader")), ): """Attach new metadata to a dataset. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -119,12 +119,12 @@ async def add_dataset_metadata( @router.put("/{dataset_id}/metadata", response_model=MetadataOut) async def replace_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -174,12 +174,12 @@ async def replace_dataset_metadata( @router.patch("/{dataset_id}/metadata", response_model=MetadataOut) async def update_dataset_metadata( - metadata_in: MetadataPatch, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataPatch, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -194,9 +194,9 @@ async def update_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing := await MetadataDB.get( - PydanticObjectId(metadata_in.metadata_id) - ) + existing := await MetadataDB.get( + PydanticObjectId(metadata_in.metadata_id) + ) ) is not None: content = await validate_context( metadata_in.content, @@ -244,12 +244,12 @@ async def update_dataset_metadata( @router.get("/{dataset_id}/metadata", response_model=List[MetadataOut]) async def get_dataset_metadata( - dataset_id: str, - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(dataset_id)] @@ -263,9 +263,9 @@ async def get_dataset_metadata( async for md in MetadataDB.find(*query): if md.definition is not None: if ( - md_def := await MetadataDefinitionDB.find_one( - MetadataDefinitionDB.name == md.definition - ) + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) ) is not None: md.description = md_def.description metadata.append(md) @@ -276,12 +276,12 @@ async def get_dataset_metadata( @router.delete("/{dataset_id}/metadata", response_model=MetadataOut) async def delete_dataset_metadata( - metadata_in: MetadataDelete, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataDelete, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # filter by metadata_id or definition @@ -289,9 +289,9 @@ async def delete_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - existing_md := await MetadataDB.find_one( - MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) - ) + existing_md := await MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) ) is not None: query.append(MetadataDB.metadata_id == metadata_in.metadata_id) else: diff --git a/backend/app/routers/metadata_files.py b/backend/app/routers/metadata_files.py index 21cd1af4d..02d9c3278 100644 --- a/backend/app/routers/metadata_files.py +++ b/backend/app/routers/metadata_files.py @@ -36,11 +36,11 @@ async def _build_metadata_db_obj( - metadata_in: MetadataIn, - file: FileOut, - user: UserOut, - agent: MetadataAgent = None, - version: int = None, + metadata_in: MetadataIn, + file: FileOut, + user: UserOut, + agent: MetadataAgent = None, + version: int = None, ): """Convenience function for building a MetadataDB object from incoming metadata plus a file. Agent and file version will be determined based on inputs if they are not provided directly.""" @@ -56,10 +56,10 @@ async def _build_metadata_db_obj( file_version = metadata_in.file_version if file_version is not None and file_version > 0: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == file.id, - FileVersionDB.version_num == file_version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == file.id, + FileVersionDB.version_num == file_version, + ) ) is None: raise HTTPException( status_code=404, @@ -103,12 +103,12 @@ async def _build_metadata_db_obj( @router.post("/{file_id}/metadata", response_model=MetadataOut) async def add_file_metadata( - metadata_in: MetadataIn, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("uploader")), + metadata_in: MetadataIn, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("uploader")), ): """Attach new metadata to a file. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -156,12 +156,12 @@ async def add_file_metadata( @router.put("/{file_id}/metadata", response_model=MetadataOut) async def replace_file_metadata( - metadata_in: MetadataPatch, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataPatch, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): """Replace metadata, including agent and context. If only metadata contents should be updated, use PATCH instead. @@ -175,10 +175,10 @@ async def replace_file_metadata( version = metadata_in.file_version if version is not None: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == version, + ) ) is None: raise HTTPException( status_code=404, @@ -230,12 +230,12 @@ async def replace_file_metadata( @router.patch("/{file_id}/metadata", response_model=MetadataOut) async def update_file_metadata( - metadata_in: MetadataPatch, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataPatch, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -246,10 +246,10 @@ async def update_file_metadata( # check if metadata with file version exists, replace metadata if none exists if ( - await MetadataDB.find_one( - MetadataDB.resource.resource_id == ObjectId(file_id), - MetadataDB.resource.version == metadata_in.file_version, - ) + await MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(file_id), + MetadataDB.resource.version == metadata_in.file_version, + ) ) is None: result = await replace_file_metadata(metadata_in, file_id, user, es) return result @@ -261,9 +261,9 @@ async def update_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing_md := await MetadataDB.find_one( - MetadataDB.id == ObjectId(metadata_in.metadata_id) - ) + existing_md := await MetadataDB.find_one( + MetadataDB.id == ObjectId(metadata_in.metadata_id) + ) ) is not None: content = await validate_context( metadata_in.content, @@ -281,10 +281,10 @@ async def update_file_metadata( if metadata_in.file_version is not None: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == metadata_in.file_version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == metadata_in.file_version, + ) ) is None: raise HTTPException( status_code=404, @@ -328,15 +328,15 @@ async def update_file_metadata( @router.get("/{file_id}/metadata", response_model=List[MetadataOut]) async def get_file_metadata( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - all_versions: Optional[bool] = False, - definition: Optional[str] = Form(None), - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + all_versions: Optional[bool] = False, + definition: Optional[str] = Form(None), + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + allow: bool = Depends(FileAuthorization("viewer")), ): """Get file metadata.""" if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -346,10 +346,10 @@ async def get_file_metadata( if not all_versions: if version is not None and version > 0: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == version, + ) ) is None: raise HTTPException( status_code=404, @@ -373,9 +373,9 @@ async def get_file_metadata( async for md in MetadataDB.find(*query): if md.definition is not None: if ( - md_def := await MetadataDefinitionDB.find_one( - MetadataDefinitionDB.name == md.definition - ) + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) ) is not None: md_def = MetadataDefinitionOut(**md_def.dict()) md.description = md_def.description @@ -387,13 +387,13 @@ async def get_file_metadata( @router.delete("/{file_id}/metadata", response_model=MetadataOut) async def delete_file_metadata( - metadata_in: MetadataDelete, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - # version: Optional[int] = Form(None), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataDelete, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + # version: Optional[int] = Form(None), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(file_id)] @@ -419,9 +419,9 @@ async def delete_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - await MetadataDB.find_one( - MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) - ) + await MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) ) is not None: query.append(MetadataDB.metadata_id == metadata_in.metadata_id) else: From 369454cfab4cd43129774262043451fafc6a60d1 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 11 Dec 2023 15:24:58 -0600 Subject: [PATCH 23/43] admin can be a part of the profile; no need for additional endpoint --- backend/app/keycloak_auth.py | 16 ++-- backend/app/models/users.py | 3 +- frontend/src/actions/user.js | 19 +---- frontend/src/components/Layout.tsx | 12 ++- frontend/src/openapi/v2/models/UserOut.ts | 1 + .../v2/services/AuthorizationService.ts | 60 ++++++++++++++ .../openapi/v2/services/DatasetsService.ts | 80 +++++++++++++++++++ .../src/openapi/v2/services/FilesService.ts | 55 +++++++++++++ .../src/openapi/v2/services/GroupsService.ts | 30 +++++++ .../openapi/v2/services/MetadataService.ts | 60 ++++++++++++++ frontend/src/types/data.ts | 1 - 11 files changed, 308 insertions(+), 29 deletions(-) diff --git a/backend/app/keycloak_auth.py b/backend/app/keycloak_auth.py index 24d2e5a54..6f4fe8848 100644 --- a/backend/app/keycloak_auth.py +++ b/backend/app/keycloak_auth.py @@ -3,7 +3,7 @@ import logging from datetime import datetime -from fastapi import Security, HTTPException, Depends +from fastapi import Security, HTTPException, Depends, Header from fastapi.security import OAuth2AuthorizationCodeBearer, APIKeyHeader, APIKeyCookie from itsdangerous.exc import BadSignature from itsdangerous.url_safe import URLSafeSerializer @@ -49,9 +49,6 @@ async def get_idp_public_key(): # Passing in API key via header. `auto_error=False` makes it so `get_current_user()` runs even if it doesn't find it api_key_header = APIKeyHeader(name="X-API-KEY", auto_error=False) -# Passing in admin mode via header. -admin_mode_header = APIKeyHeader(name="X-ADMIN-MODE", auto_error=False) - # Passing in JWT token via cookie. `auto_error=False` makes it so `get_current_user()` runs even if it doesn't find it. jwt_header = APIKeyCookie(name="Authorization", auto_error=False) @@ -227,8 +224,17 @@ async def get_current_user( ) +async def admin_mode_header( + x_admin_mode: str = Header(default=None, convert_underscores=False) +) -> bool: + """Dependency to read X-ADMIN-MODE header.""" + if x_admin_mode is not None: + return x_admin_mode.lower() == "true" + return False + + async def get_admin_mode( - admin_mode: bool = Security(admin_mode_header), + admin_mode: bool = Depends(admin_mode_header), ) -> bool: """Get Admin mode from Header.""" return admin_mode diff --git a/backend/app/models/users.py b/backend/app/models/users.py index beee19b36..fd168d69f 100644 --- a/backend/app/models/users.py +++ b/backend/app/models/users.py @@ -24,6 +24,8 @@ class UserLogin(BaseModel): class UserDoc(Document, UserBase): + admin: bool + class Settings: name = "users" @@ -31,7 +33,6 @@ class Settings: class UserDB(UserDoc): hashed_password: str = Field() keycloak_id: Optional[str] = None - admin: bool def verify_password(self, password): return pwd_context.verify(password, self.hashed_password) diff --git a/frontend/src/actions/user.js b/frontend/src/actions/user.js index b13a05082..d41d67897 100644 --- a/frontend/src/actions/user.js +++ b/frontend/src/actions/user.js @@ -2,8 +2,7 @@ import { V2 } from "../openapi"; import Cookies from "universal-cookie"; import config from "../app.config"; import { handleErrors } from "./common"; -import {fetchAdmin} from "./authorization"; -import {fetchDatasets} from "./dataset"; +import { fetchDatasets } from "./dataset"; const cookies = new Cookies(); @@ -55,8 +54,8 @@ export const SET_USER = "SET_USER"; export const REGISTER_USER = "REGISTER_USER"; export const REGISTER_ERROR = "REGISTER_ERROR"; export const LOGOUT = "LOGOUT"; -export const ADMIN = "ADMIN" -export const ADMIN_MODE = "ADMIN_MODE" +export const ADMIN = "ADMIN"; +export const ADMIN_MODE = "ADMIN_MODE"; export function _legacy_login(email, password) { return async (dispatch) => { @@ -131,19 +130,9 @@ export function fetchAllUsers(skip = 0, limit = 101) { }; } -export const setAdmin = () => { - return async (dispatch) => { - try { - dispatch({type: ADMIN, admin: await V2.LoginService.getAdminApiV2AdminGet()}); - } catch (error) { - dispatch({type: ADMIN, admin: false}); - } - }; -}; - export function toggleAdminMode(currentAdminMode) { return (dispatch) => { - dispatch({type: ADMIN_MODE, adminMode: !currentAdminMode}); + dispatch({ type: ADMIN_MODE, adminMode: !currentAdminMode }); dispatch(fetchDatasets(0, 21, false, !currentAdminMode)); }; } diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 7f301e3f0..6baf370f4 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -30,7 +30,7 @@ import { getCurrEmail } from "../utils/common"; import VpnKeyIcon from "@mui/icons-material/VpnKey"; import LogoutIcon from "@mui/icons-material/Logout"; import { EmbeddedSearch } from "./search/EmbeddedSearch"; -import { setAdmin, toggleAdminMode } from "../actions/user"; +import { toggleAdminMode } from "../actions/user"; import { AdminPanelSettings } from "@mui/icons-material"; const drawerWidth = 240; @@ -106,10 +106,8 @@ export default function PersistentDrawerLeft(props) { const [embeddedSearchHidden, setEmbeddedSearchHidden] = React.useState(false); const [anchorEl, setAnchorEl] = React.useState(null); const isMenuOpen = Boolean(anchorEl); - const admin = useSelector((state: RootState) => state.user.admin); - useEffect(() => { - dispatch(setAdmin()); - }, [dispatch]); + const currUserProfile = useSelector((state: RootState) => state.user.profile); + const adminMode = useSelector((state: RootState) => state.user.adminMode); const handleAdminMode = () => { dispatch(toggleAdminMode(adminMode)); @@ -227,7 +225,7 @@ export default function PersistentDrawerLeft(props) { User Profile - {admin && !adminMode ? ( + {currUserProfile.admin && !adminMode ? (
@@ -240,7 +238,7 @@ export default function PersistentDrawerLeft(props) { ) : ( <> )} - {admin && adminMode ? ( + {currUserProfile.admin && adminMode ? (
diff --git a/frontend/src/openapi/v2/models/UserOut.ts b/frontend/src/openapi/v2/models/UserOut.ts index 42c7657d5..f624b5122 100644 --- a/frontend/src/openapi/v2/models/UserOut.ts +++ b/frontend/src/openapi/v2/models/UserOut.ts @@ -20,4 +20,5 @@ export type UserOut = { first_name: string; last_name: string; id?: string; + admin: boolean; } diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index 22e2fe267..ac554993f 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -16,16 +16,21 @@ export class AuthorizationService { * Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id. * @param datasetId * @param requestBody + * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static saveAuthorizationApiV2AuthorizationsDatasetsDatasetIdPost( datasetId: string, requestBody: AuthorizationBase, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -38,15 +43,20 @@ export class AuthorizationService { * Get Dataset Role * Retrieve role of user for a specific dataset. * @param datasetId + * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static getDatasetRoleApiV2AuthorizationsDatasetsDatasetIdRoleGet( datasetId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -58,15 +68,20 @@ export class AuthorizationService { * Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. * See `routers/authorization.py` for more info. * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static getDatasetRoleViewerApiV2AuthorizationsDatasetsDatasetIdRoleViewerGet( datasetId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role/viewer}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -78,15 +93,20 @@ export class AuthorizationService { * Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. * See `routers/authorization.py` for more info. * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static getDatasetRoleOwnerApiV2AuthorizationsDatasetsDatasetIdRoleOwnerGet( datasetId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role/owner}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -97,16 +117,21 @@ export class AuthorizationService { * Get File Role * @param fileId * @param datasetId + * @param xAdminMode * @returns RoleType Successful Response * @throws ApiError */ public static getFileRoleApiV2AuthorizationsFilesFileIdRoleGet( fileId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/files/${fileId}/role}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -120,16 +145,21 @@ export class AuthorizationService { * Get Metadata Role * @param metadataId * @param datasetId + * @param xAdminMode * @returns AuthorizationMetadata Successful Response * @throws ApiError */ public static getMetadataRoleApiV2AuthorizationsMetadataMetadataIdRoleGet( metadataId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/metadata/${metadataId}/role}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -143,16 +173,21 @@ export class AuthorizationService { * Get Group Role * @param groupId * @param datasetId + * @param xAdminMode * @returns RoleType Successful Response * @throws ApiError */ public static getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleGet( groupId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/groups/${groupId}/role}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -168,6 +203,7 @@ export class AuthorizationService { * @param datasetId * @param groupId * @param role + * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ @@ -175,10 +211,14 @@ export class AuthorizationService { datasetId: string, groupId: string, role: RoleType, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}/group_role/${groupId}/${role}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -191,6 +231,7 @@ export class AuthorizationService { * @param datasetId * @param username * @param role + * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ @@ -198,10 +239,14 @@ export class AuthorizationService { datasetId: string, username: string, role: RoleType, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}/user_role/${username}/${role}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -213,16 +258,21 @@ export class AuthorizationService { * Remove any role the group has with a specific dataset. * @param datasetId * @param groupId + * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static removeDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdDelete( datasetId: string, groupId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/authorizations/datasets/${datasetId}/group_role/${groupId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -234,16 +284,21 @@ export class AuthorizationService { * Remove any role the user has with a specific dataset. * @param datasetId * @param username + * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static removeDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameDelete( datasetId: string, username: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/authorizations/datasets/${datasetId}/user_role/${username}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -254,15 +309,20 @@ export class AuthorizationService { * Get Dataset Roles * Get a list of all users and groups that have assigned roles on this dataset. * @param datasetId + * @param xAdminMode * @returns DatasetRoles Successful Response * @throws ApiError */ public static getDatasetRolesApiV2AuthorizationsDatasetsDatasetIdRolesGet( datasetId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/roles}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index e4c725d43..3b14fac45 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -23,6 +23,7 @@ export class DatasetsService { * @param limit * @param mine * @param datasetId + * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ @@ -31,10 +32,14 @@ export class DatasetsService { limit: number = 10, mine: boolean = false, datasetId?: string, + xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'skip': skip, 'limit': limit, @@ -70,15 +75,20 @@ export class DatasetsService { /** * Get Dataset * @param datasetId + * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static getDatasetApiV2DatasetsDatasetIdGet( datasetId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -89,16 +99,21 @@ export class DatasetsService { * Edit Dataset * @param datasetId * @param requestBody + * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static editDatasetApiV2DatasetsDatasetIdPut( datasetId: string, requestBody: DatasetBase, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/datasets/${datasetId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -110,15 +125,20 @@ export class DatasetsService { /** * Delete Dataset * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static deleteDatasetApiV2DatasetsDatasetIdDelete( datasetId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -129,16 +149,21 @@ export class DatasetsService { * Patch Dataset * @param datasetId * @param requestBody + * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static patchDatasetApiV2DatasetsDatasetIdPatch( datasetId: string, requestBody: DatasetPatch, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -153,6 +178,7 @@ export class DatasetsService { * @param folderId * @param skip * @param limit + * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -161,10 +187,14 @@ export class DatasetsService { folderId?: string, skip?: number, limit: number = 10, + xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/files`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'folder_id': folderId, 'skip': skip, @@ -181,6 +211,7 @@ export class DatasetsService { * @param datasetId * @param formData * @param folderId + * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -188,10 +219,14 @@ export class DatasetsService { datasetId: string, formData: Body_save_file_api_v2_datasets__dataset_id__files_post, folderId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/files`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'folder_id': folderId, }, @@ -209,6 +244,7 @@ export class DatasetsService { * @param parentFolder * @param skip * @param limit + * @param xAdminMode * @returns FolderOut Successful Response * @throws ApiError */ @@ -217,10 +253,14 @@ export class DatasetsService { parentFolder?: string, skip?: number, limit: number = 10, + xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/folders`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'parent_folder': parentFolder, 'skip': skip, @@ -236,16 +276,21 @@ export class DatasetsService { * Add Folder * @param datasetId * @param requestBody + * @param xAdminMode * @returns FolderOut Successful Response * @throws ApiError */ public static addFolderApiV2DatasetsDatasetIdFoldersPost( datasetId: string, requestBody: FolderIn, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/folders`, + headers: { + 'x_admin_mode': xAdminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -258,16 +303,21 @@ export class DatasetsService { * Delete Folder * @param datasetId * @param folderId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static deleteFolderApiV2DatasetsDatasetIdFoldersFolderIdDelete( datasetId: string, folderId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}/folders/${folderId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -279,6 +329,7 @@ export class DatasetsService { * @param datasetId * @param formData * @param folderId + * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -286,10 +337,14 @@ export class DatasetsService { datasetId: string, formData: Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post, folderId?: string, + xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/filesMultiple`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'folder_id': folderId, }, @@ -306,6 +361,7 @@ export class DatasetsService { * @param datasetId * @param requestBody * @param folderId + * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -313,10 +369,14 @@ export class DatasetsService { datasetId: string, requestBody: LocalFileIn, folderId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/local_files`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'folder_id': folderId, }, @@ -351,15 +411,20 @@ export class DatasetsService { /** * Download Dataset * @param datasetId + * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static downloadDatasetApiV2DatasetsDatasetIdDownloadGet( datasetId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/download`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -370,6 +435,7 @@ export class DatasetsService { * Get Dataset Extract * @param datasetId * @param extractorName + * @param xAdminMode * @param requestBody * @returns any Successful Response * @throws ApiError @@ -377,11 +443,15 @@ export class DatasetsService { public static getDatasetExtractApiV2DatasetsDatasetIdExtractPost( datasetId: string, extractorName: string, + xAdminMode?: string, requestBody?: any, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/extract`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'extractorName': extractorName, }, @@ -396,15 +466,20 @@ export class DatasetsService { /** * Download Dataset Thumbnail * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static downloadDatasetThumbnailApiV2DatasetsDatasetIdThumbnailGet( datasetId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/thumbnail`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, @@ -415,16 +490,21 @@ export class DatasetsService { * Add Dataset Thumbnail * @param datasetId * @param thumbnailId + * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static addDatasetThumbnailApiV2DatasetsDatasetIdThumbnailThumbnailIdPatch( datasetId: string, thumbnailId: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}/thumbnail/${thumbnailId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/FilesService.ts b/frontend/src/openapi/v2/services/FilesService.ts index 6a48df5f9..b4334e08f 100644 --- a/frontend/src/openapi/v2/services/FilesService.ts +++ b/frontend/src/openapi/v2/services/FilesService.ts @@ -15,6 +15,7 @@ export class FilesService { * @param version * @param increment * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ @@ -23,10 +24,14 @@ export class FilesService { version?: number, increment: boolean = true, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'version': version, 'increment': increment, @@ -43,6 +48,7 @@ export class FilesService { * @param fileId * @param formData * @param datasetId + * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -50,10 +56,14 @@ export class FilesService { fileId: string, formData: Body_update_file_api_v2_files__file_id__put, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -69,16 +79,21 @@ export class FilesService { * Delete File * @param fileId * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static deleteFileApiV2FilesFileIdDelete( fileId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -94,6 +109,7 @@ export class FilesService { * @param version * @param expiresInSeconds * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ @@ -102,10 +118,14 @@ export class FilesService { version?: number, expiresInSeconds: number = 3600, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/url/`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'version': version, 'expires_in_seconds': expiresInSeconds, @@ -121,16 +141,21 @@ export class FilesService { * Get File Summary * @param fileId * @param datasetId + * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ public static getFileSummaryApiV2FilesFileIdSummaryGet( fileId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/summary`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -145,6 +170,7 @@ export class FilesService { * @param fileId * @param versionNum * @param datasetId + * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -152,10 +178,14 @@ export class FilesService { fileId: string, versionNum?: number, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/version_details`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'version_num': versionNum, 'dataset_id': datasetId, @@ -172,6 +202,7 @@ export class FilesService { * @param skip * @param limit * @param datasetId + * @param xAdminMode * @returns FileVersion Successful Response * @throws ApiError */ @@ -180,10 +211,14 @@ export class FilesService { skip?: number, limit: number = 20, datasetId?: string, + xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/versions`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'skip': skip, 'limit': limit, @@ -200,6 +235,7 @@ export class FilesService { * @param fileId * @param extractorName * @param datasetId + * @param xAdminMode * @param requestBody * @returns any Successful Response * @throws ApiError @@ -208,11 +244,15 @@ export class FilesService { fileId: string, extractorName: string, datasetId?: string, + xAdminMode?: string, requestBody?: any, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/extract`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'extractorName': extractorName, 'dataset_id': datasetId, @@ -237,16 +277,21 @@ export class FilesService { * rabbitmq_client: Rabbitmq Client * @param fileId * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static resubmitFileExtractionsApiV2FilesFileIdResubmitExtractPost( fileId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/resubmit_extract`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -260,16 +305,21 @@ export class FilesService { * Download File Thumbnail * @param fileId * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static downloadFileThumbnailApiV2FilesFileIdThumbnailGet( fileId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/thumbnail`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -284,6 +334,7 @@ export class FilesService { * @param fileId * @param thumbnailId * @param datasetId + * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -291,10 +342,14 @@ export class FilesService { fileId: string, thumbnailId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/thumbnail/${thumbnailId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, diff --git a/frontend/src/openapi/v2/services/GroupsService.ts b/frontend/src/openapi/v2/services/GroupsService.ts index 6c20074ce..9d2dd379b 100644 --- a/frontend/src/openapi/v2/services/GroupsService.ts +++ b/frontend/src/openapi/v2/services/GroupsService.ts @@ -94,16 +94,21 @@ export class GroupsService { * Get Group * @param groupId * @param datasetId + * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ public static getGroupApiV2GroupsGroupIdGet( groupId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/groups/${groupId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -118,6 +123,7 @@ export class GroupsService { * @param groupId * @param requestBody * @param datasetId + * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ @@ -125,10 +131,14 @@ export class GroupsService { groupId: string, requestBody: GroupBase, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/groups/${groupId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -144,16 +154,21 @@ export class GroupsService { * Delete Group * @param groupId * @param datasetId + * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ public static deleteGroupApiV2GroupsGroupIdDelete( groupId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/groups/${groupId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -170,6 +185,7 @@ export class GroupsService { * @param username * @param role * @param datasetId + * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ @@ -178,10 +194,14 @@ export class GroupsService { username: string, role?: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/groups/${groupId}/add/${username}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'role': role, 'dataset_id': datasetId, @@ -198,6 +218,7 @@ export class GroupsService { * @param groupId * @param username * @param datasetId + * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ @@ -205,10 +226,14 @@ export class GroupsService { groupId: string, username: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/groups/${groupId}/remove/${username}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -225,6 +250,7 @@ export class GroupsService { * @param username * @param role * @param datasetId + * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ @@ -233,10 +259,14 @@ export class GroupsService { username: string, role: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/groups/${groupId}/update/${username}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'role': role, 'dataset_id': datasetId, diff --git a/frontend/src/openapi/v2/services/MetadataService.ts b/frontend/src/openapi/v2/services/MetadataService.ts index 3090fc9c6..a4a512bc4 100644 --- a/frontend/src/openapi/v2/services/MetadataService.ts +++ b/frontend/src/openapi/v2/services/MetadataService.ts @@ -135,16 +135,21 @@ export class MetadataService { * Delete metadata by specific ID. * @param metadataId * @param datasetId + * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static deleteMetadataApiV2MetadataMetadataIdDelete( metadataId: string, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/metadata/${metadataId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -164,6 +169,7 @@ export class MetadataService { * @param metadataId * @param requestBody * @param datasetId + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -171,10 +177,14 @@ export class MetadataService { metadataId: string, requestBody: MetadataPatch, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/metadata/${metadataId}`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -193,6 +203,7 @@ export class MetadataService { * @param version * @param allVersions * @param datasetId + * @param xAdminMode * @param formData * @returns MetadataOut Successful Response * @throws ApiError @@ -202,11 +213,15 @@ export class MetadataService { version?: number, allVersions: boolean = false, datasetId?: string, + xAdminMode?: string, formData?: Body_get_file_metadata_api_v2_files__file_id__metadata_get, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'version': version, 'all_versions': allVersions, @@ -229,6 +244,7 @@ export class MetadataService { * @param fileId * @param requestBody * @param datasetId + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -236,10 +252,14 @@ export class MetadataService { fileId: string, requestBody: MetadataPatch, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -261,6 +281,7 @@ export class MetadataService { * @param fileId * @param requestBody * @param datasetId + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -268,10 +289,14 @@ export class MetadataService { fileId: string, requestBody: MetadataIn, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -288,6 +313,7 @@ export class MetadataService { * @param fileId * @param requestBody * @param datasetId + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -295,10 +321,14 @@ export class MetadataService { fileId: string, requestBody: MetadataDelete, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -320,6 +350,7 @@ export class MetadataService { * @param fileId * @param requestBody * @param datasetId + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -327,10 +358,14 @@ export class MetadataService { fileId: string, requestBody: MetadataPatch, datasetId?: string, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, query: { 'dataset_id': datasetId, }, @@ -345,17 +380,22 @@ export class MetadataService { /** * Get Dataset Metadata * @param datasetId + * @param xAdminMode * @param formData * @returns MetadataOut Successful Response * @throws ApiError */ public static getDatasetMetadataApiV2DatasetsDatasetIdMetadataGet( datasetId: string, + xAdminMode?: string, formData?: Body_get_dataset_metadata_api_v2_datasets__dataset_id__metadata_get, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, formData: formData, mediaType: 'application/x-www-form-urlencoded', errors: { @@ -373,16 +413,21 @@ export class MetadataService { * Metadata document that was updated * @param datasetId * @param requestBody + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static replaceDatasetMetadataApiV2DatasetsDatasetIdMetadataPut( datasetId: string, requestBody: MetadataIn, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/datasets/${datasetId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -400,16 +445,21 @@ export class MetadataService { * Metadata document that was added to database * @param datasetId * @param requestBody + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static addDatasetMetadataApiV2DatasetsDatasetIdMetadataPost( datasetId: string, requestBody: MetadataIn, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -422,16 +472,21 @@ export class MetadataService { * Delete Dataset Metadata * @param datasetId * @param requestBody + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static deleteDatasetMetadataApiV2DatasetsDatasetIdMetadataDelete( datasetId: string, requestBody: MetadataDelete, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, body: requestBody, mediaType: 'application/json', errors: { @@ -449,16 +504,21 @@ export class MetadataService { * Metadata document that was updated * @param datasetId * @param requestBody + * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static updateDatasetMetadataApiV2DatasetsDatasetIdMetadataPatch( datasetId: string, requestBody: MetadataPatch, + xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}/metadata`, + headers: { + 'x_admin_mode': xAdminMode, + }, body: requestBody, mediaType: 'application/json', errors: { diff --git a/frontend/src/types/data.ts b/frontend/src/types/data.ts index dd3558187..07b81956c 100644 --- a/frontend/src/types/data.ts +++ b/frontend/src/types/data.ts @@ -199,7 +199,6 @@ export interface UserState { hashedKey: string; apiKeys: UserAPIKeyOut[]; profile: UserOut; - admin: boolean; adminMode: boolean; } From 67645a047896a13b48d4a5293f08343d84dd1966 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 11 Dec 2023 15:43:39 -0600 Subject: [PATCH 24/43] fix drop/enable admin toggle --- frontend/src/actions/user.js | 1 - frontend/src/components/Layout.tsx | 50 +++++++++++++---------- frontend/src/components/users/Profile.tsx | 9 ++-- frontend/src/reducers/user.ts | 12 ++---- frontend/src/types/action.ts | 6 --- 5 files changed, 38 insertions(+), 40 deletions(-) diff --git a/frontend/src/actions/user.js b/frontend/src/actions/user.js index d41d67897..1b584c743 100644 --- a/frontend/src/actions/user.js +++ b/frontend/src/actions/user.js @@ -54,7 +54,6 @@ export const SET_USER = "SET_USER"; export const REGISTER_USER = "REGISTER_USER"; export const REGISTER_ERROR = "REGISTER_ERROR"; export const LOGOUT = "LOGOUT"; -export const ADMIN = "ADMIN"; export const ADMIN_MODE = "ADMIN_MODE"; export function _legacy_login(email, password) { diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 6baf370f4..ff9cc1def 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -30,7 +30,7 @@ import { getCurrEmail } from "../utils/common"; import VpnKeyIcon from "@mui/icons-material/VpnKey"; import LogoutIcon from "@mui/icons-material/Logout"; import { EmbeddedSearch } from "./search/EmbeddedSearch"; -import { toggleAdminMode } from "../actions/user"; +import { fetchUserProfile, toggleAdminMode } from "../actions/user"; import { AdminPanelSettings } from "@mui/icons-material"; const drawerWidth = 240; @@ -109,6 +109,12 @@ export default function PersistentDrawerLeft(props) { const currUserProfile = useSelector((state: RootState) => state.user.profile); const adminMode = useSelector((state: RootState) => state.user.adminMode); + const fetchCurrUserProfile = () => dispatch(fetchUserProfile()); + + useEffect(() => { + fetchCurrUserProfile(); + }, []); + const handleAdminMode = () => { dispatch(toggleAdminMode(adminMode)); }; @@ -225,33 +231,33 @@ export default function PersistentDrawerLeft(props) { User Profile - {currUserProfile.admin && !adminMode ? ( -
- + + {currUserProfile.admin ? ( + <> - - - - Admin Mode + {adminMode ? ( + <> + + + + Enable Admin Mode + + ) : ( + <> + + + + Drop Admin Mode + + )} -
- ) : ( - <> - )} - {currUserProfile.admin && adminMode ? ( -
+ - - - - - Drop Admin Mode - -
+ ) : ( <> )} - + diff --git a/frontend/src/components/users/Profile.tsx b/frontend/src/components/users/Profile.tsx index b77738c73..6341f5f19 100644 --- a/frontend/src/components/users/Profile.tsx +++ b/frontend/src/components/users/Profile.tsx @@ -13,8 +13,7 @@ import { fetchUserProfile } from "../../actions/user"; export const Profile = (): JSX.Element => { const dispatch = useDispatch(); - const user = useSelector((state: RootState) => state.user); - const profile = user["profile"]; + const profile = useSelector((state: RootState) => state.user.profile); const fetchProfile = () => dispatch(fetchUserProfile()); // component did mount useEffect(() => { @@ -42,7 +41,11 @@ export const Profile = (): JSX.Element => { {profile.first_name} {profile.last_name} {profile.email} - {user.admin? Admin: Not admin} + {profile.admin ? ( + Admin + ) : ( + Not admin + )} diff --git a/frontend/src/reducers/user.ts b/frontend/src/reducers/user.ts index 5af98bb04..bf349baba 100644 --- a/frontend/src/reducers/user.ts +++ b/frontend/src/reducers/user.ts @@ -1,5 +1,5 @@ import { - ADMIN, ADMIN_MODE, + ADMIN_MODE, DELETE_API_KEY, GENERATE_API_KEY, LIST_API_KEYS, @@ -12,28 +12,24 @@ import { } from "../actions/user"; import { UserState } from "../types/data"; import { DataAction } from "../types/action"; +import { UserOut } from "../openapi/v2"; const defaultState: UserState = { Authorization: null, loginError: false, - admin: false, adminMode: false, registerSucceeded: false, errorMsg: "", hashedKey: "", apiKeys: [], - profile: null, + profile: {}, }; const user = (state = defaultState, action: DataAction) => { switch (action.type) { case ADMIN_MODE: return Object.assign({}, state, { - adminMode: action.adminMode - }); - case ADMIN: - return Object.assign({}, state, { - admin: action.admin + adminMode: action.adminMode, }); case SET_USER: return Object.assign({}, state, { diff --git a/frontend/src/types/action.ts b/frontend/src/types/action.ts index ed63ca940..febf481ed 100644 --- a/frontend/src/types/action.ts +++ b/frontend/src/types/action.ts @@ -101,11 +101,6 @@ interface SET_USER { Authorization: string; } -interface ADMIN { - type: "ADMIN"; - admin: boolean; -} - interface ADMIN_MODE { type: "ADMIN_MODE"; adminMode: boolean; @@ -466,7 +461,6 @@ interface RESET_VIS_DATA_PRESIGNED_URL { } export type DataAction = - | ADMIN | ADMIN_MODE | RECEIVE_FILES_IN_DATASET | RECEIVE_FOLDERS_IN_DATASET From 957e9aa9ef6fa119af6268e9510d9460477c96ac Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Mon, 11 Dec 2023 15:54:05 -0600 Subject: [PATCH 25/43] simplify admin logic --- frontend/src/actions/user.js | 8 +++----- frontend/src/components/Layout.tsx | 16 ++++++++-------- frontend/src/reducers/user.ts | 6 +++--- frontend/src/types/action.ts | 8 ++++---- 4 files changed, 18 insertions(+), 20 deletions(-) diff --git a/frontend/src/actions/user.js b/frontend/src/actions/user.js index 1b584c743..2ebdc40d7 100644 --- a/frontend/src/actions/user.js +++ b/frontend/src/actions/user.js @@ -2,7 +2,6 @@ import { V2 } from "../openapi"; import Cookies from "universal-cookie"; import config from "../app.config"; import { handleErrors } from "./common"; -import { fetchDatasets } from "./dataset"; const cookies = new Cookies(); @@ -54,7 +53,7 @@ export const SET_USER = "SET_USER"; export const REGISTER_USER = "REGISTER_USER"; export const REGISTER_ERROR = "REGISTER_ERROR"; export const LOGOUT = "LOGOUT"; -export const ADMIN_MODE = "ADMIN_MODE"; +export const TOGGLE_ADMIN_MODE = "TOGGLE_ADMIN_MODE"; export function _legacy_login(email, password) { return async (dispatch) => { @@ -129,10 +128,9 @@ export function fetchAllUsers(skip = 0, limit = 101) { }; } -export function toggleAdminMode(currentAdminMode) { +export function toggleAdminMode() { return (dispatch) => { - dispatch({ type: ADMIN_MODE, adminMode: !currentAdminMode }); - dispatch(fetchDatasets(0, 21, false, !currentAdminMode)); + dispatch({ type: TOGGLE_ADMIN_MODE, receivedAt: Date.now() }); }; } diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index ff9cc1def..be0aff0a8 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -30,7 +30,10 @@ import { getCurrEmail } from "../utils/common"; import VpnKeyIcon from "@mui/icons-material/VpnKey"; import LogoutIcon from "@mui/icons-material/Logout"; import { EmbeddedSearch } from "./search/EmbeddedSearch"; -import { fetchUserProfile, toggleAdminMode } from "../actions/user"; +import { + fetchUserProfile, + toggleAdminMode as toggleAdminModeAction, +} from "../actions/user"; import { AdminPanelSettings } from "@mui/icons-material"; const drawerWidth = 240; @@ -110,15 +113,12 @@ export default function PersistentDrawerLeft(props) { const adminMode = useSelector((state: RootState) => state.user.adminMode); const fetchCurrUserProfile = () => dispatch(fetchUserProfile()); + const toggleAdminMode = () => dispatch(toggleAdminModeAction()); useEffect(() => { fetchCurrUserProfile(); }, []); - const handleAdminMode = () => { - dispatch(toggleAdminMode(adminMode)); - }; - const handleDrawerOpen = () => { setOpen(true); }; @@ -234,20 +234,20 @@ export default function PersistentDrawerLeft(props) { {currUserProfile.admin ? ( <> - + {adminMode ? ( <> - Enable Admin Mode + Drop Admin Mode ) : ( <> - Drop Admin Mode + Enable Admin Mode )} diff --git a/frontend/src/reducers/user.ts b/frontend/src/reducers/user.ts index bf349baba..567e5404c 100644 --- a/frontend/src/reducers/user.ts +++ b/frontend/src/reducers/user.ts @@ -1,5 +1,4 @@ import { - ADMIN_MODE, DELETE_API_KEY, GENERATE_API_KEY, LIST_API_KEYS, @@ -9,6 +8,7 @@ import { REGISTER_USER, RESET_API_KEY, SET_USER, + TOGGLE_ADMIN_MODE, } from "../actions/user"; import { UserState } from "../types/data"; import { DataAction } from "../types/action"; @@ -27,9 +27,9 @@ const defaultState: UserState = { const user = (state = defaultState, action: DataAction) => { switch (action.type) { - case ADMIN_MODE: + case TOGGLE_ADMIN_MODE: return Object.assign({}, state, { - adminMode: action.adminMode, + adminMode: !state.adminMode, }); case SET_USER: return Object.assign({}, state, { diff --git a/frontend/src/types/action.ts b/frontend/src/types/action.ts index febf481ed..f8c9e789e 100644 --- a/frontend/src/types/action.ts +++ b/frontend/src/types/action.ts @@ -26,6 +26,7 @@ import { LIST_USERS, PREFIX_SEARCH_USERS, RECEIVE_USER_PROFILE, + TOGGLE_ADMIN_MODE, } from "../actions/user"; import { CREATE_GROUP, DELETE_GROUP } from "../actions/group"; import { RECEIVE_FILE_PRESIGNED_URL } from "../actions/file"; @@ -101,9 +102,8 @@ interface SET_USER { Authorization: string; } -interface ADMIN_MODE { - type: "ADMIN_MODE"; - adminMode: boolean; +interface TOGGLE_ADMIN_MODE { + type: "TOGGLE_ADMIN_MODE"; } interface LOGIN_ERROR { @@ -461,7 +461,7 @@ interface RESET_VIS_DATA_PRESIGNED_URL { } export type DataAction = - | ADMIN_MODE + | TOGGLE_ADMIN_MODE | RECEIVE_FILES_IN_DATASET | RECEIVE_FOLDERS_IN_DATASET | DELETE_FILE From e95a34f8ddf87046e6f1852a024950b344dc273c Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 09:51:19 -0600 Subject: [PATCH 26/43] rewrite backend --- backend/app/deps/authorization_deps.py | 87 ++++---- backend/app/keycloak_auth.py | 82 +++----- backend/app/models/users.py | 1 + backend/app/routers/authentication.py | 31 ++- backend/app/routers/authorization.py | 154 +++++++------- backend/app/routers/datasets.py | 256 +++++++++++------------ backend/app/routers/elasticsearch.py | 15 +- backend/app/routers/files.py | 177 ++++++++-------- backend/app/routers/groups.py | 73 +++---- backend/app/routers/metadata.py | 55 ++--- backend/app/routers/metadata_datasets.py | 89 ++++---- backend/app/routers/metadata_files.py | 139 ++++++------ 12 files changed, 584 insertions(+), 575 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index 501e03c24..81cb942d0 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -2,7 +2,7 @@ from beanie.operators import Or from fastapi import Depends, HTTPException -from app.keycloak_auth import get_current_username, get_admin_mode +from app.keycloak_auth import get_current_username from app.models.authorization import RoleType, AuthorizationDB from app.models.datasets import DatasetDB, DatasetStatus from app.models.files import FileDB @@ -10,11 +10,12 @@ from app.models.metadata import MetadataDB from app.models.pyobjectid import PyObjectId from app.routers.authentication import get_admin +from app.routers.authentication import get_admin_mode async def get_role( - dataset_id: str, - current_user=Depends(get_current_username), + dataset_id: str, + current_user=Depends(get_current_username), ) -> RoleType: """Returns the role a specific user has on a dataset. If the user is a creator (owner), they are not listed in the user_ids list.""" @@ -29,8 +30,8 @@ async def get_role( async def get_role_by_file( - file_id: str, - current_user=Depends(get_current_username), + file_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: authorization = await AuthorizationDB.find_one( @@ -42,7 +43,7 @@ async def get_role_by_file( ) if authorization is None: if ( - dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) ) is not None: if dataset.status == DatasetStatus.AUTHENTICATED.name: auth_dict = { @@ -63,8 +64,8 @@ async def get_role_by_file( async def get_role_by_metadata( - metadata_id: str, - current_user=Depends(get_current_username), + metadata_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None: resource_type = md_out.resource.collection @@ -81,7 +82,7 @@ async def get_role_by_metadata( return authorization.role elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -94,8 +95,8 @@ async def get_role_by_metadata( async def get_role_by_group( - group_id: str, - current_user=Depends(get_current_username), + group_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (group := await GroupDB.get(group_id)) is not None: if group.creator == current_user: @@ -115,7 +116,7 @@ async def get_role_by_group( async def is_public_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is public.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -126,7 +127,7 @@ async def is_public_dataset( async def is_authenticated_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is authenticated.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -144,11 +145,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + dataset_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here. @@ -174,11 +175,11 @@ async def __call__( ) else: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if ( - current_dataset.status == DatasetStatus.AUTHENTICATED.name - and self.role == "viewer" + current_dataset.status == DatasetStatus.AUTHENTICATED.name + and self.role == "viewer" ): return True else: @@ -201,11 +202,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -239,11 +240,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -255,7 +256,7 @@ async def __call__( resource_id = md_out.resource.resource_id if resource_type == "files": if ( - file := await FileDB.get(PydanticObjectId(resource_id)) + file := await FileDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == file.dataset_id, @@ -277,7 +278,7 @@ async def __call__( ) elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -306,11 +307,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -342,8 +343,8 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - dataset_id: str, + self, + dataset_id: str, ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if dataset.status == self.status: @@ -362,13 +363,13 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - file_id: str, + self, + file_id: str, ): if (file_out := await FileDB.get(PydanticObjectId(file_id))) is not None: dataset_id = file_out.dataset_id if ( - dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if dataset.status == self.status: return True diff --git a/backend/app/keycloak_auth.py b/backend/app/keycloak_auth.py index 6f4fe8848..0b0684e0b 100644 --- a/backend/app/keycloak_auth.py +++ b/backend/app/keycloak_auth.py @@ -3,7 +3,7 @@ import logging from datetime import datetime -from fastapi import Security, HTTPException, Depends, Header +from fastapi import Security, HTTPException, Depends from fastapi.security import OAuth2AuthorizationCodeBearer, APIKeyHeader, APIKeyCookie from itsdangerous.exc import BadSignature from itsdangerous.url_safe import URLSafeSerializer @@ -54,8 +54,8 @@ async def get_idp_public_key(): async def get_token( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), ) -> Json: """Decode token. Use to secure endpoints.""" if token: @@ -91,17 +91,17 @@ async def get_token( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: return {"preferred_username": payload["user"]} elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: current_time = datetime.utcnow() if key.expires is not None and current_time >= key.expires: @@ -140,9 +140,9 @@ async def get_user(identity: Json = Depends(get_token)): async def get_current_user( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), - token_cookie: str = Security(jwt_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), + token_cookie: str = Security(jwt_header), ) -> UserOut: """Retrieve the user object from Mongo by first getting user id from JWT and then querying Mongo. Potentially expensive. Use `get_current_username` if all you need is user name. @@ -178,18 +178,18 @@ async def get_current_user( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - key := await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + key := await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: user = await UserDB.find_one(UserDB.email == key.user) return UserOut(**user.dict()) elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: current_time = datetime.utcnow() @@ -224,26 +224,10 @@ async def get_current_user( ) -async def admin_mode_header( - x_admin_mode: str = Header(default=None, convert_underscores=False) -) -> bool: - """Dependency to read X-ADMIN-MODE header.""" - if x_admin_mode is not None: - return x_admin_mode.lower() == "true" - return False - - -async def get_admin_mode( - admin_mode: bool = Depends(admin_mode_header), -) -> bool: - """Get Admin mode from Header.""" - return admin_mode - - async def get_current_username( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), - token_cookie: str = Security(jwt_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), + token_cookie: str = Security(jwt_header), ) -> str: """Retrieve the user id from the JWT token. Does not query MongoDB.""" if token: @@ -276,18 +260,18 @@ async def get_current_username( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - key := await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + key := await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: # Key is coming from a listener job return key.user elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: # Key is coming from a user request current_time = datetime.utcnow() diff --git a/backend/app/models/users.py b/backend/app/models/users.py index fd168d69f..23a5e570b 100644 --- a/backend/app/models/users.py +++ b/backend/app/models/users.py @@ -25,6 +25,7 @@ class UserLogin(BaseModel): class UserDoc(Document, UserBase): admin: bool + admin_mode: bool = False class Settings: name = "users" diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index b663244f7..3cf917af1 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -1,5 +1,6 @@ import json +from beanie import PydanticObjectId from fastapi import APIRouter, HTTPException, Depends from keycloak.exceptions import ( KeycloakAuthenticationError, @@ -8,8 +9,6 @@ ) from passlib.hash import bcrypt -from beanie import PydanticObjectId - from app.keycloak_auth import create_user, get_current_user from app.keycloak_auth import keycloak_openid from app.models.datasets import DatasetDB @@ -96,23 +95,41 @@ async def authenticate_user(email: str, password: str): @router.get("/admin") async def get_admin(dataset_id: str = None, current_username=Depends(get_current_user)): if ( - current_user := await UserDB.find_one(UserDB.email == current_username.email) + current_user := await UserDB.find_one(UserDB.email == current_username.email) ) is not None: if current_user.admin: return current_user.admin elif ( - dataset_id - and (dataset_db := await DatasetDB.get(PydanticObjectId(dataset_id))) - is not None + dataset_id + and (dataset_db := await DatasetDB.get(PydanticObjectId(dataset_id))) + is not None ): return dataset_db.creator.email == current_username.email else: return False +@router.get("/admin_mode") +async def get_admin_mode(current_username=Depends(get_current_user)) -> bool: + """Get Admin mode from User Object.""" + if ( + current_user := await UserDB.find_one(UserDB.email == current_username.email) + ) is not None: + if current_user.admin_mode is not None: + return current_user.admin_mode + else: + return False + else: + raise HTTPException( + status_code=404, + detail="User doesn't exist.", + headers={"WWW-Authenticate": "Bearer"}, + ) + + @router.post("/users/set_admin/{useremail}", response_model=UserOut) async def set_admin( - useremail: str, current_username=Depends(get_current_user), admin=Depends(get_admin) + useremail: str, current_username=Depends(get_current_user), admin=Depends(get_admin) ): if admin: if (user := await UserDB.find_one(UserDB.email == useremail)) is not None: diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index e30e23178..0e8e6b99f 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -11,7 +11,7 @@ get_role_by_metadata, get_role_by_group, ) -from app.keycloak_auth import get_current_username, get_user, get_admin_mode +from app.keycloak_auth import get_current_username, get_user from app.models.authorization import ( AuthorizationBase, AuthorizationMetadata, @@ -30,7 +30,7 @@ from app.models.groups import GroupDB from app.models.pyobjectid import PyObjectId from app.models.users import UserDB -from app.routers.authentication import get_admin +from app.routers.authentication import get_admin, get_admin_mode from app.search.index import index_dataset router = APIRouter() @@ -38,11 +38,11 @@ @router.post("/datasets/{dataset_id}", response_model=AuthorizationOut) async def save_authorization( - dataset_id: str, - authorization_in: AuthorizationBase, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_username), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + authorization_in: AuthorizationBase, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_username), + allow: bool = Depends(Authorization("editor")), ): """Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id.""" @@ -69,10 +69,10 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationOut) async def get_dataset_role( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - admin=Depends(get_admin), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization @@ -90,7 +90,7 @@ async def get_dataset_role( ) if auth_db is None: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if current_dataset.status == DatasetStatus.AUTHENTICATED.name: public_authorization_in = { @@ -112,9 +112,9 @@ async def get_dataset_role( @router.get("/datasets/{dataset_id}/role/viewer}") async def get_dataset_role_viewer( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -123,9 +123,9 @@ async def get_dataset_role_viewer( @router.get("/datasets/{dataset_id}/role/owner}") async def get_dataset_role_owner( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("owner")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("owner")), ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -134,11 +134,11 @@ async def get_dataset_role_owner( @router.get("/files/{file_id}/role}", response_model=RoleType) async def get_file_role( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_file), - admin=Depends(get_admin), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_file), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -149,11 +149,11 @@ async def get_file_role( @router.get("/metadata/{metadata_id}/role}", response_model=AuthorizationMetadata) async def get_metadata_role( - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_metadata), - admin=Depends(get_admin), + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_metadata), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -164,11 +164,11 @@ async def get_metadata_role( @router.get("/groups/{group_id}/role}", response_model=RoleType) async def get_group_role( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_group), - admin=Depends(get_admin), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_group), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -182,13 +182,13 @@ async def get_group_role( response_model=AuthorizationOut, ) async def set_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - role: RoleType, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + role: RoleType, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign an entire group a specific role for a dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: @@ -198,10 +198,10 @@ async def set_dataset_group_role( dataset_id, group_id, admin_mode, es, user_id, allow ) if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.role == role, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.role == role, + ) ) is not None: if group_id not in auth_db.group_ids: auth_db.group_ids.append(group_id) @@ -236,13 +236,13 @@ async def set_dataset_group_role( response_model=AuthorizationOut, ) async def set_dataset_user_role( - dataset_id: str, - username: str, - role: RoleType, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + role: RoleType, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign a single user a specific role for a dataset.""" @@ -296,22 +296,22 @@ async def set_dataset_user_role( response_model=AuthorizationOut, ) async def remove_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the group has with a specific dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: if (group := await GroupDB.get(group_id)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == dataset_id, - AuthorizationDB.group_ids == group_id, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == dataset_id, + AuthorizationDB.group_ids == group_id, + ) ) is not None: auth_db.group_ids.remove(PyObjectId(group_id)) for u in group.users: @@ -332,22 +332,22 @@ async def remove_dataset_group_role( response_model=AuthorizationOut, ) async def remove_dataset_user_role( - dataset_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the user has with a specific dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (await UserDB.find_one(UserDB.email == username)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.user_ids == username, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.user_ids == username, + ) ) is not None: auth_db.user_ids.remove(username) await auth_db.save() @@ -362,16 +362,16 @@ async def remove_dataset_user_role( @router.get("/datasets/{dataset_id}/roles}", response_model=DatasetRoles) async def get_dataset_roles( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: roles = DatasetRoles(dataset_id=str(dataset.id)) async for auth in AuthorizationDB.find( - AuthorizationDB.dataset_id == ObjectId(dataset_id) + AuthorizationDB.dataset_id == ObjectId(dataset_id) ): # First, fetch all groups that have a role on the dataset group_user_counts = {} @@ -389,8 +389,8 @@ async def get_dataset_roles( # Next, get all users but omit those that are included in a group above async for user in UserDB.find(In(UserDB.email, auth.user_ids)): if ( - user.email in group_user_counts - and auth.user_ids.count(user.email) == group_user_counts[user.email] + user.email in group_user_counts + and auth.user_ids.count(user.email) == group_user_counts[user.email] ): continue # TODO: Why is this necessary here but not on root-level ObjectIDs? diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index a623bd916..357e9752d 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -36,7 +36,6 @@ get_token, get_user, get_current_user, - get_admin_mode, ) from app.models.authorization import AuthorizationDB, RoleType from app.models.datasets import ( @@ -56,6 +55,7 @@ from app.models.users import UserOut from app.rabbitmq.listeners import submit_dataset_job from app.routers.authentication import get_admin +from app.routers.authentication import get_admin_mode from app.routers.files import add_file_entry, remove_file_entry from app.search.connect import ( delete_document_by_id, @@ -134,12 +134,12 @@ def nested_update(target_dict, update_dict): async def _create_folder_structure( - dataset_id: str, - contents: dict, - folder_path: str, - folder_lookup: dict, - user: UserOut, - parent_folder_id: Optional[str] = None, + dataset_id: str, + contents: dict, + folder_path: str, + folder_lookup: dict, + user: UserOut, + parent_folder_id: Optional[str] = None, ): """Recursively create folders encountered in folder_path until the target folder is created. Arguments: @@ -174,8 +174,8 @@ async def _create_folder_structure( async def _get_folder_hierarchy( - folder_id: str, - hierarchy: str, + folder_id: str, + hierarchy: str, ): """Generate a string of nested path to folder for use in zip file creation.""" folder = await FolderDB.get(PydanticObjectId(folder_id)) @@ -187,9 +187,9 @@ async def _get_folder_hierarchy( @router.post("", response_model=DatasetOut) async def save_dataset( - dataset_in: DatasetIn, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + dataset_in: DatasetIn, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): dataset = DatasetDB(**dataset_in.dict(), creator=user) await dataset.insert() @@ -208,12 +208,12 @@ async def save_dataset( @router.get("", response_model=List[DatasetOut]) async def get_datasets( - admin_mode: bool = Depends(get_admin_mode), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - mine: bool = False, - admin=Depends(get_admin), + admin_mode: bool = Depends(get_admin_mode), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + mine: bool = False, + admin=Depends(get_admin), ): if admin_mode and admin: datasets = await DatasetDBViewList.find( @@ -245,10 +245,10 @@ async def get_datasets( @router.get("/{dataset_id}", response_model=DatasetOut) async def get_dataset( - dataset_id: str, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: return dataset.dict() @@ -257,14 +257,14 @@ async def get_dataset( @router.get("/{dataset_id}/files", response_model=List[FileOut]) async def get_dataset_files( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - folder_id: Optional[str] = None, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + folder_id: Optional[str] = None, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if authenticated: query = [ @@ -286,12 +286,12 @@ async def get_dataset_files( @router.put("/{dataset_id}", response_model=DatasetOut) async def edit_dataset( - dataset_id: str, - dataset_info: DatasetBase, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetBase, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Refactor this with permissions checks etc. @@ -307,12 +307,12 @@ async def edit_dataset( @router.patch("/{dataset_id}", response_model=DatasetOut) async def patch_dataset( - dataset_id: str, - dataset_info: DatasetPatch, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetPatch, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Update method not working properly @@ -332,11 +332,11 @@ async def patch_dataset( @router.delete("/{dataset_id}") async def delete_dataset( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # delete from elasticsearch @@ -347,7 +347,7 @@ async def delete_dataset( MetadataDB.resource.resource_id == PydanticObjectId(dataset_id) ).delete() async for file in FileDB.find( - FileDB.dataset_id == PydanticObjectId(dataset_id) + FileDB.dataset_id == PydanticObjectId(dataset_id) ): await remove_file_entry(file.id, fs, es) await FolderDB.find( @@ -362,11 +362,11 @@ async def delete_dataset( @router.post("/{dataset_id}/folders", response_model=FolderOut) async def add_folder( - dataset_id: str, - folder_in: FolderIn, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_in: FolderIn, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: parent_folder = folder_in.parent_folder @@ -385,14 +385,14 @@ async def add_folder( @router.get("/{dataset_id}/folders", response_model=List[FolderOut]) async def get_dataset_folders( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - parent_folder: Optional[str] = None, - user_id=Depends(get_user), - authenticated: bool = Depends(CheckStatus("authenticated")), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + parent_folder: Optional[str] = None, + user_id=Depends(get_user), + authenticated: bool = Depends(CheckStatus("authenticated")), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if (await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if authenticated: @@ -418,12 +418,12 @@ async def get_dataset_folders( @router.delete("/{dataset_id}/folders/{folder_id}") async def delete_folder( - dataset_id: str, - folder_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + folder_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (folder := await FolderDB.get(PydanticObjectId(folder_id))) is not None: @@ -434,14 +434,14 @@ async def delete_folder( # recursively delete child folder and files async def _delete_nested_folders(parent_folder_id): while ( - await FolderDB.find_one( - FolderDB.dataset_id == ObjectId(dataset_id), - FolderDB.parent_folder == ObjectId(parent_folder_id), - ) + await FolderDB.find_one( + FolderDB.dataset_id == ObjectId(dataset_id), + FolderDB.parent_folder == ObjectId(parent_folder_id), + ) ) is not None: async for subfolder in FolderDB.find( - FolderDB.dataset_id == PydanticObjectId(dataset_id), - FolderDB.parent_folder == PydanticObjectId(parent_folder_id), + FolderDB.dataset_id == PydanticObjectId(dataset_id), + FolderDB.parent_folder == PydanticObjectId(parent_folder_id), ): async for file in FileDB.find(FileDB.folder_id == subfolder.id): await remove_file_entry(file.id, fs, es) @@ -462,15 +462,15 @@ async def _delete_nested_folders(parent_folder_id): @router.post("/{dataset_id}/files", response_model=FileOut) async def save_file( - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - admin_mode: bool = Depends(get_admin_mode), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + admin_mode: bool = Depends(get_admin_mode), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -503,15 +503,15 @@ async def save_file( @router.post("/{dataset_id}/filesMultiple", response_model=List[FileOut]) async def save_files( - dataset_id: str, - files: List[UploadFile], - admin_mode: bool = Depends(get_admin_mode), - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + files: List[UploadFile], + admin_mode: bool = Depends(get_admin_mode), + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: files_added = [] @@ -526,7 +526,7 @@ async def save_files( if folder_id is not None: if ( - folder := await FolderDB.get(PydanticObjectId(folder_id)) + folder := await FolderDB.get(PydanticObjectId(folder_id)) ) is not None: new_file.folder_id = folder.id else: @@ -552,13 +552,13 @@ async def save_files( @router.post("/{dataset_id}/local_files", response_model=FileOut) async def save_local_file( - localfile_in: LocalFileIn, - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + localfile_in: LocalFileIn, + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -607,12 +607,12 @@ async def save_local_file( @router.post("/createFromZip", response_model=DatasetOut) async def create_dataset_from_zip( - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - token: str = Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + token: str = Depends(get_token), ): if file.filename.endswith(".zip") == False: raise HTTPException(status_code=404, detail=f"File is not a zip file") @@ -680,11 +680,11 @@ async def create_dataset_from_zip( @router.get("/{dataset_id}/download", response_model=DatasetOut) async def download_dataset( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload") @@ -839,15 +839,15 @@ async def download_dataset( # can handle parameeters pass in as key/values in info @router.post("/{dataset_id}/extract") async def get_dataset_extract( - dataset_id: str, - extractorName: str, - request: Request, - admin_mode: bool = Depends(get_admin_mode), - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + extractorName: str, + request: Request, + admin_mode: bool = Depends(get_admin_mode), + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -867,10 +867,10 @@ async def get_dataset_extract( @router.get("/{dataset_id}/thumbnail") async def download_dataset_thumbnail( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): # If dataset exists in MongoDB, download from Minio if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -895,14 +895,14 @@ async def download_dataset_thumbnail( @router.patch("/{dataset_id}/thumbnail/{thumbnail_id}", response_model=DatasetOut) async def add_dataset_thumbnail( - dataset_id: str, - thumbnail_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + thumbnail_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? dataset.thumbnail_id = thumbnail_id diff --git a/backend/app/routers/elasticsearch.py b/backend/app/routers/elasticsearch.py index e1ae5d28f..0e9d4926f 100644 --- a/backend/app/routers/elasticsearch.py +++ b/backend/app/routers/elasticsearch.py @@ -4,18 +4,19 @@ from fastapi.routing import APIRouter, Request from app.config import settings -from app.keycloak_auth import get_current_username, get_admin_mode +from app.keycloak_auth import get_current_username from app.routers.authentication import get_admin +from app.routers.authentication import get_admin_mode from app.search.connect import connect_elasticsearch, search_index router = APIRouter() def _add_permissions_clause( - query, - username: str, - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + query, + username: str, + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): """Append filter to Elasticsearch object that restricts permissions based on the requesting user.""" # TODO: Add public filter once added @@ -55,8 +56,8 @@ async def search(index_name: str, query: str, username=Depends(get_current_usern @router.post("/all/_msearch") async def msearch( - request: Request, - username=Depends(get_current_username), + request: Request, + username=Depends(get_current_username), ): es = await connect_elasticsearch() query = await request.body() diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 4e967ed87..400299303 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -24,12 +24,13 @@ from app import dependencies from app.config import settings from app.deps.authorization_deps import FileAuthorization -from app.keycloak_auth import get_current_user, get_token, get_admin_mode +from app.keycloak_auth import get_current_user, get_token from app.models.files import FileOut, FileVersion, FileDB, FileVersionDB, StorageType from app.models.metadata import MetadataDB from app.models.thumbnails import ThumbnailDB from app.models.users import UserOut from app.rabbitmq.listeners import submit_file_job, EventListenerJobDB +from app.routers.authentication import get_admin_mode from app.routers.feeds import check_feed_listeners from app.routers.utils import get_content_type from app.search.connect import ( @@ -44,10 +45,10 @@ async def _resubmit_file_extractors( - file: FileOut, - rabbitmq_client: BlockingChannel, - user: UserOut, - credentials: HTTPAuthorizationCredentials = Security(security), + file: FileOut, + rabbitmq_client: BlockingChannel, + user: UserOut, + credentials: HTTPAuthorizationCredentials = Security(security), ): """This helper method will check metadata. We get the extractors run from metadata from extractors. Then they are resubmitted. At present parameters are not stored. This will change once Jobs are @@ -61,8 +62,8 @@ async def _resubmit_file_extractors( """ resubmitted_jobs = [] async for job in EventListenerJobDB.find( - EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), - EventListenerJobDB.resource_ref.version == file.version_num - 1, + EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), + EventListenerJobDB.resource_ref.version == file.version_num - 1, ): resubmitted_job = {"listener_id": job.listener_id, "parameters": job.parameters} try: @@ -86,13 +87,13 @@ async def _resubmit_file_extractors( # TODO: Move this to MongoDB middle layer async def add_file_entry( - new_file: FileDB, - user: UserOut, - fs: Minio, - es: Elasticsearch, - rabbitmq_client: BlockingChannel, - file: Optional[io.BytesIO] = None, - content_type: Optional[str] = None, + new_file: FileDB, + user: UserOut, + fs: Minio, + es: Elasticsearch, + rabbitmq_client: BlockingChannel, + file: Optional[io.BytesIO] = None, + content_type: Optional[str] = None, ): """Insert FileDB object into MongoDB (makes Clowder ID), then Minio (makes version ID), then update MongoDB with the version ID from Minio. @@ -151,11 +152,11 @@ async def add_file_entry( async def add_local_file_entry( - new_file: FileDB, - user: UserOut, - es: Elasticsearch, - rabbitmq_client: BlockingChannel, - content_type: Optional[str] = None, + new_file: FileDB, + user: UserOut, + es: Elasticsearch, + rabbitmq_client: BlockingChannel, + content_type: Optional[str] = None, ): """Insert FileDB object into MongoDB (makes Clowder ID). Bytes are not stored in DB and versioning not supported for local files.""" @@ -181,7 +182,7 @@ async def add_local_file_entry( # TODO: Move this to MongoDB middle layer async def remove_file_entry( - file_id: Union[str, ObjectId], fs: Minio, es: Elasticsearch + file_id: Union[str, ObjectId], fs: Minio, es: Elasticsearch ): """Remove FileDB object into MongoDB, Minio, and associated metadata and version information.""" # TODO: Deleting individual versions will require updating version_id in mongo, or deleting entire document @@ -208,16 +209,16 @@ async def remove_local_file_entry(file_id: Union[str, ObjectId], es: Elasticsear @router.put("/{file_id}", response_model=FileOut) async def update_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - token=Depends(get_token), - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("uploader")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + token=Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("uploader")), ): # Check all connection and abort if any one of them is not available if fs is None or es is None: @@ -226,8 +227,8 @@ async def update_file( if (updated_file := await FileDB.get(PydanticObjectId(file_id))) is not None: if ( - file.filename != updated_file.name - or file.content_type != updated_file.content_type.content_type + file.filename != updated_file.name + or file.content_type != updated_file.content_type.content_type ): raise HTTPException( status_code=400, @@ -299,12 +300,12 @@ async def update_file( @router.get("/{file_id}") async def download_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - increment: Optional[bool] = True, - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + increment: Optional[bool] = True, + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -334,7 +335,7 @@ async def download_file( content.stream(settings.MINIO_UPLOAD_CHUNK_SIZE) ) response.headers["Content-Disposition"] = ( - "attachment; filename=%s" % file.name + "attachment; filename=%s" % file.name ) elif file.storage_type == StorageType.LOCAL: @@ -361,12 +362,12 @@ async def download_file( @router.get("/{file_id}/url/") async def download_file_url( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - expires_in_seconds: Optional[int] = 3600, - external_fs: Minio = Depends(dependencies.get_external_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + expires_in_seconds: Optional[int] = 3600, + external_fs: Minio = Depends(dependencies.get_external_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -414,11 +415,11 @@ async def download_file_url( @router.delete("/{file_id}") async def delete_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: if file.storage_type == StorageType.LOCAL: @@ -432,9 +433,9 @@ async def delete_file( @router.get("/{file_id}/summary", response_model=FileOut) async def get_file_summary( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: # TODO: Incrementing too often (3x per page view) @@ -447,10 +448,10 @@ async def get_file_summary( @router.get("/{file_id}/version_details", response_model=FileOut) async def get_file_version_details( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version_num: Optional[int] = 0, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version_num: Optional[int] = 0, + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: # TODO: Incrementing too often (3x per page view) @@ -470,17 +471,17 @@ async def get_file_version_details( @router.get("/{file_id}/versions", response_model=List[FileVersion]) async def get_file_versions( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - skip: int = 0, - limit: int = 20, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + skip: int = 0, + limit: int = 20, + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: mongo_versions = [] if file.storage_type == StorageType.MINIO: async for ver in FileVersionDB.find( - FileVersionDB.file_id == ObjectId(file_id) + FileVersionDB.file_id == ObjectId(file_id) ).sort(-FileVersionDB.created).skip(skip).limit(limit): mongo_versions.append(FileVersion(**ver.dict())) return mongo_versions @@ -491,15 +492,15 @@ async def get_file_versions( # submits file to extractor @router.post("/{file_id}/extract") async def post_file_extract( - file_id: str, - extractorName: str, - admin_mode: bool = Depends(get_admin_mode), - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("uploader")), + file_id: str, + extractorName: str, + admin_mode: bool = Depends(get_admin_mode), + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -524,12 +525,12 @@ async def post_file_extract( @router.post("/{file_id}/resubmit_extract") async def resubmit_file_extractions( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("editor")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("editor")), ): """This route will check metadata. We get the extractors run from metadata from extractors. Then they are resubmitted. At present parameters are not stored. This will change once Jobs are @@ -551,10 +552,10 @@ async def resubmit_file_extractions( @router.get("/{file_id}/thumbnail") async def download_file_thumbnail( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -576,15 +577,15 @@ async def download_file_thumbnail( @router.patch("/{file_id}/thumbnail/{thumbnail_id}", response_model=FileOut) async def add_file_thumbnail( - file_id: str, - thumbnail_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(FileAuthorization("editor")), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + file_id: str, + thumbnail_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(FileAuthorization("editor")), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? file.thumbnail_id = thumbnail_id diff --git a/backend/app/routers/groups.py b/backend/app/routers/groups.py index 60e407ffa..c04d43d2c 100644 --- a/backend/app/routers/groups.py +++ b/backend/app/routers/groups.py @@ -7,18 +7,19 @@ from fastapi import HTTPException, Depends, APIRouter from app.deps.authorization_deps import AuthorizationDB, GroupAuthorization -from app.keycloak_auth import get_current_user, get_user, get_admin_mode +from app.keycloak_auth import get_current_user, get_user from app.models.authorization import RoleType from app.models.groups import GroupOut, GroupIn, GroupDB, GroupBase, Member from app.models.users import UserOut, UserDB +from app.routers.authentication import get_admin_mode router = APIRouter() @router.post("", response_model=GroupOut) async def save_group( - group_in: GroupIn, - user=Depends(get_current_user), + group_in: GroupIn, + user=Depends(get_current_user), ): group_db = GroupDB(**group_in.dict(), creator=user.email) user_member = Member(user=user, editor=True) @@ -30,9 +31,9 @@ async def save_group( @router.get("", response_model=List[GroupOut]) async def get_groups( - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, ): """Get a list of all Groups in the db the user is a member/owner of. @@ -56,10 +57,10 @@ async def get_groups( @router.get("/search/{search_term}", response_model=List[GroupOut]) async def search_group( - search_term: str, - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + search_term: str, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, ): """Search all groups in the db based on text. @@ -85,9 +86,9 @@ async def search_group( @router.get("/{group_id}", response_model=GroupOut) async def get_group( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("viewer")), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("viewer")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: return group.dict() @@ -96,11 +97,11 @@ async def get_group( @router.put("/{group_id}", response_model=GroupOut) async def edit_group( - group_id: str, - group_info: GroupBase, - admin_mode: bool = Depends(get_admin_mode), - user_id=Depends(get_user), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + group_info: GroupBase, + admin_mode: bool = Depends(get_admin_mode), + user_id=Depends(get_user), + allow: bool = Depends(GroupAuthorization("editor")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: group_dict = dict(group_info) if group_info is not None else {} @@ -123,7 +124,7 @@ async def edit_group( if original_user not in groups_users: # remove them from auth async for auth in AuthorizationDB.find( - {"group_ids": ObjectId(group_id)} + {"group_ids": ObjectId(group_id)} ): auth.user_ids.remove(original_user.user.email) await auth.replace() @@ -167,9 +168,9 @@ async def edit_group( @router.delete("/{group_id}", response_model=GroupOut) async def delete_group( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("owner")), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("owner")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: await group.delete() @@ -180,11 +181,11 @@ async def delete_group( @router.post("/{group_id}/add/{username}", response_model=GroupOut) async def add_member( - group_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - role: Optional[str] = None, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + role: Optional[str] = None, + allow: bool = Depends(GroupAuthorization("editor")), ): """Add a new user to a group.""" if (user := await UserDB.find_one(UserDB.email == username)) is not None: @@ -218,10 +219,10 @@ async def add_member( @router.post("/{group_id}/remove/{username}", response_model=GroupOut) async def remove_member( - group_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("editor")), ): """Remove a user from a group.""" @@ -251,11 +252,11 @@ async def remove_member( @router.put("/{group_id}/update/{username}", response_model=GroupOut) async def update_member( - group_id: str, - username: str, - role: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + role: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("editor")), ): """Update user role.""" if (user := await UserDB.find_one({"email": username})) is not None: diff --git a/backend/app/routers/metadata.py b/backend/app/routers/metadata.py index 377c72136..ad73f5e79 100644 --- a/backend/app/routers/metadata.py +++ b/backend/app/routers/metadata.py @@ -12,7 +12,7 @@ from app import dependencies from app.deps.authorization_deps import MetadataAuthorization -from app.keycloak_auth import get_current_user, get_admin_mode +from app.keycloak_auth import get_current_user from app.models.metadata import ( MetadataDefinitionIn, MetadataDefinitionDB, @@ -23,14 +23,15 @@ MetadataDB, ) from app.models.pyobjectid import PyObjectId +from app.routers.authentication import get_admin_mode router = APIRouter() @router.post("/definition", response_model=MetadataDefinitionOut) async def save_metadata_definition( - definition_in: MetadataDefinitionIn, - user=Depends(get_current_user), + definition_in: MetadataDefinitionIn, + user=Depends(get_current_user), ): existing = await MetadataDefinitionDB.find_one( MetadataDefinitionDB.name == definition_in.name @@ -48,10 +49,10 @@ async def save_metadata_definition( @router.get("/definition", response_model=List[MetadataDefinitionOut]) async def get_metadata_definition_list( - name: Optional[str] = None, - user=Depends(get_current_user), - skip: int = 0, - limit: int = 2, + name: Optional[str] = None, + user=Depends(get_current_user), + skip: int = 0, + limit: int = 2, ): if name is None: defs = await MetadataDefinitionDB.find( @@ -71,11 +72,11 @@ async def get_metadata_definition_list( "/definition/{metadata_definition_id}", response_model=MetadataDefinitionOut ) async def get_metadata_definition( - metadata_definition_id: str, - user=Depends(get_current_user), + metadata_definition_id: str, + user=Depends(get_current_user), ): if ( - mdd := await MetadataDefinitionDB.get(PydanticObjectId(metadata_definition_id)) + mdd := await MetadataDefinitionDB.get(PydanticObjectId(metadata_definition_id)) ) is not None: return mdd.dict() raise HTTPException( @@ -88,8 +89,8 @@ async def get_metadata_definition( "/definition/{metadata_definition_id}", response_model=MetadataDefinitionOut ) async def delete_metadata_definition( - metadata_definition_id: str, - user=Depends(get_current_user), + metadata_definition_id: str, + user=Depends(get_current_user), ): """Delete metadata definition by specific ID.""" mdd = await MetadataDefinitionDB.find_one( @@ -105,7 +106,7 @@ async def delete_metadata_definition( raise HTTPException( status_code=400, detail=f"Metadata definition: {mdd.name} ({metadata_definition_id}) in use. " - f"You cannot delete it until all metadata records using it are deleted.", + f"You cannot delete it until all metadata records using it are deleted.", ) # TODO: Refactor this with permissions checks etc. @@ -122,10 +123,10 @@ async def delete_metadata_definition( "/definition/search/{search_term}", response_model=List[MetadataDefinitionOut] ) async def search_metadata_definition( - search_term: str, - skip: int = 0, - limit: int = 10, - user=Depends(get_current_user), + search_term: str, + skip: int = 0, + limit: int = 10, + user=Depends(get_current_user), ): """Search all metadata definition in the db based on text. @@ -151,12 +152,12 @@ async def search_metadata_definition( @router.patch("/{metadata_id}", response_model=MetadataOut) async def update_metadata( - metadata_in: MetadataPatch, - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_in: MetadataPatch, + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -174,10 +175,10 @@ async def update_metadata( @router.delete("/{metadata_id}") async def delete_metadata( - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Delete metadata by specific ID.""" md = await MetadataDB.find_one(MetadataDB.id == PyObjectId(metadata_id)) diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index ad4693143..3597ebbdf 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -10,7 +10,7 @@ from app import dependencies from app.config import settings from app.deps.authorization_deps import Authorization -from app.keycloak_auth import get_current_user, UserOut, get_admin_mode +from app.keycloak_auth import get_current_user, UserOut from app.models.datasets import DatasetOut, DatasetDB from app.models.listeners import EventListenerDB from app.models.metadata import ( @@ -25,6 +25,7 @@ MetadataDelete, MetadataDefinitionDB, ) +from app.routers.authentication import get_admin_mode from app.search.connect import delete_document_by_id from app.search.index import index_dataset @@ -34,10 +35,10 @@ async def _build_metadata_db_obj( - metadata_in: MetadataIn, - dataset: DatasetOut, - user: UserOut, - agent: MetadataAgent = None, + metadata_in: MetadataIn, + dataset: DatasetOut, + user: UserOut, + agent: MetadataAgent = None, ): """Convenience function for converting MetadataIn to MetadataDB object.""" content = await validate_context( @@ -69,12 +70,12 @@ async def _build_metadata_db_obj( @router.post("/{dataset_id}/metadata", response_model=MetadataOut) async def add_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("uploader")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("uploader")), ): """Attach new metadata to a dataset. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -119,12 +120,12 @@ async def add_dataset_metadata( @router.put("/{dataset_id}/metadata", response_model=MetadataOut) async def replace_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -174,12 +175,12 @@ async def replace_dataset_metadata( @router.patch("/{dataset_id}/metadata", response_model=MetadataOut) async def update_dataset_metadata( - metadata_in: MetadataPatch, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataPatch, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -194,9 +195,9 @@ async def update_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing := await MetadataDB.get( - PydanticObjectId(metadata_in.metadata_id) - ) + existing := await MetadataDB.get( + PydanticObjectId(metadata_in.metadata_id) + ) ) is not None: content = await validate_context( metadata_in.content, @@ -244,12 +245,12 @@ async def update_dataset_metadata( @router.get("/{dataset_id}/metadata", response_model=List[MetadataOut]) async def get_dataset_metadata( - dataset_id: str, - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(dataset_id)] @@ -263,9 +264,9 @@ async def get_dataset_metadata( async for md in MetadataDB.find(*query): if md.definition is not None: if ( - md_def := await MetadataDefinitionDB.find_one( - MetadataDefinitionDB.name == md.definition - ) + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) ) is not None: md.description = md_def.description metadata.append(md) @@ -276,12 +277,12 @@ async def get_dataset_metadata( @router.delete("/{dataset_id}/metadata", response_model=MetadataOut) async def delete_dataset_metadata( - metadata_in: MetadataDelete, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataDelete, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # filter by metadata_id or definition @@ -289,9 +290,9 @@ async def delete_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - existing_md := await MetadataDB.find_one( - MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) - ) + existing_md := await MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) ) is not None: query.append(MetadataDB.metadata_id == metadata_in.metadata_id) else: diff --git a/backend/app/routers/metadata_files.py b/backend/app/routers/metadata_files.py index 02d9c3278..54e1ab716 100644 --- a/backend/app/routers/metadata_files.py +++ b/backend/app/routers/metadata_files.py @@ -13,7 +13,7 @@ from app import dependencies from app.config import settings from app.deps.authorization_deps import FileAuthorization -from app.keycloak_auth import get_current_user, UserOut, get_admin_mode +from app.keycloak_auth import get_current_user, UserOut from app.models.files import FileOut, FileDB, FileVersionDB from app.models.listeners import EventListenerDB from app.models.metadata import ( @@ -29,6 +29,7 @@ MetadataDelete, MetadataDefinitionDB, ) +from app.routers.authentication import get_admin_mode from app.search.connect import delete_document_by_id from app.search.index import index_file @@ -36,11 +37,11 @@ async def _build_metadata_db_obj( - metadata_in: MetadataIn, - file: FileOut, - user: UserOut, - agent: MetadataAgent = None, - version: int = None, + metadata_in: MetadataIn, + file: FileOut, + user: UserOut, + agent: MetadataAgent = None, + version: int = None, ): """Convenience function for building a MetadataDB object from incoming metadata plus a file. Agent and file version will be determined based on inputs if they are not provided directly.""" @@ -56,10 +57,10 @@ async def _build_metadata_db_obj( file_version = metadata_in.file_version if file_version is not None and file_version > 0: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == file.id, - FileVersionDB.version_num == file_version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == file.id, + FileVersionDB.version_num == file_version, + ) ) is None: raise HTTPException( status_code=404, @@ -103,12 +104,12 @@ async def _build_metadata_db_obj( @router.post("/{file_id}/metadata", response_model=MetadataOut) async def add_file_metadata( - metadata_in: MetadataIn, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("uploader")), + metadata_in: MetadataIn, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("uploader")), ): """Attach new metadata to a file. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -156,12 +157,12 @@ async def add_file_metadata( @router.put("/{file_id}/metadata", response_model=MetadataOut) async def replace_file_metadata( - metadata_in: MetadataPatch, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataPatch, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): """Replace metadata, including agent and context. If only metadata contents should be updated, use PATCH instead. @@ -175,10 +176,10 @@ async def replace_file_metadata( version = metadata_in.file_version if version is not None: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == version, + ) ) is None: raise HTTPException( status_code=404, @@ -230,12 +231,12 @@ async def replace_file_metadata( @router.patch("/{file_id}/metadata", response_model=MetadataOut) async def update_file_metadata( - metadata_in: MetadataPatch, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataPatch, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -246,10 +247,10 @@ async def update_file_metadata( # check if metadata with file version exists, replace metadata if none exists if ( - await MetadataDB.find_one( - MetadataDB.resource.resource_id == ObjectId(file_id), - MetadataDB.resource.version == metadata_in.file_version, - ) + await MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(file_id), + MetadataDB.resource.version == metadata_in.file_version, + ) ) is None: result = await replace_file_metadata(metadata_in, file_id, user, es) return result @@ -261,9 +262,9 @@ async def update_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing_md := await MetadataDB.find_one( - MetadataDB.id == ObjectId(metadata_in.metadata_id) - ) + existing_md := await MetadataDB.find_one( + MetadataDB.id == ObjectId(metadata_in.metadata_id) + ) ) is not None: content = await validate_context( metadata_in.content, @@ -281,10 +282,10 @@ async def update_file_metadata( if metadata_in.file_version is not None: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == metadata_in.file_version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == metadata_in.file_version, + ) ) is None: raise HTTPException( status_code=404, @@ -328,15 +329,15 @@ async def update_file_metadata( @router.get("/{file_id}/metadata", response_model=List[MetadataOut]) async def get_file_metadata( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - all_versions: Optional[bool] = False, - definition: Optional[str] = Form(None), - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + all_versions: Optional[bool] = False, + definition: Optional[str] = Form(None), + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + allow: bool = Depends(FileAuthorization("viewer")), ): """Get file metadata.""" if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -346,10 +347,10 @@ async def get_file_metadata( if not all_versions: if version is not None and version > 0: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == version, + ) ) is None: raise HTTPException( status_code=404, @@ -373,9 +374,9 @@ async def get_file_metadata( async for md in MetadataDB.find(*query): if md.definition is not None: if ( - md_def := await MetadataDefinitionDB.find_one( - MetadataDefinitionDB.name == md.definition - ) + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) ) is not None: md_def = MetadataDefinitionOut(**md_def.dict()) md.description = md_def.description @@ -387,13 +388,13 @@ async def get_file_metadata( @router.delete("/{file_id}/metadata", response_model=MetadataOut) async def delete_file_metadata( - metadata_in: MetadataDelete, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - # version: Optional[int] = Form(None), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataDelete, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + # version: Optional[int] = Form(None), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(file_id)] @@ -419,9 +420,9 @@ async def delete_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - await MetadataDB.find_one( - MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) - ) + await MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) ) is not None: query.append(MetadataDB.metadata_id == metadata_in.metadata_id) else: From d40f4199c5ffecf75e64a55238aac73a920e6b35 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 09:51:47 -0600 Subject: [PATCH 27/43] codegen --- frontend/src/openapi/v2/models/UserOut.ts | 1 + .../v2/services/AuthorizationService.ts | 60 -------------- .../openapi/v2/services/DatasetsService.ts | 80 ------------------- .../src/openapi/v2/services/FilesService.ts | 55 ------------- .../src/openapi/v2/services/GroupsService.ts | 30 ------- .../src/openapi/v2/services/LoginService.ts | 13 +++ .../openapi/v2/services/MetadataService.ts | 60 -------------- 7 files changed, 14 insertions(+), 285 deletions(-) diff --git a/frontend/src/openapi/v2/models/UserOut.ts b/frontend/src/openapi/v2/models/UserOut.ts index f624b5122..d10ffda13 100644 --- a/frontend/src/openapi/v2/models/UserOut.ts +++ b/frontend/src/openapi/v2/models/UserOut.ts @@ -21,4 +21,5 @@ export type UserOut = { last_name: string; id?: string; admin: boolean; + admin_mode?: boolean; } diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index ac554993f..22e2fe267 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -16,21 +16,16 @@ export class AuthorizationService { * Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id. * @param datasetId * @param requestBody - * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static saveAuthorizationApiV2AuthorizationsDatasetsDatasetIdPost( datasetId: string, requestBody: AuthorizationBase, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -43,20 +38,15 @@ export class AuthorizationService { * Get Dataset Role * Retrieve role of user for a specific dataset. * @param datasetId - * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static getDatasetRoleApiV2AuthorizationsDatasetsDatasetIdRoleGet( datasetId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -68,20 +58,15 @@ export class AuthorizationService { * Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. * See `routers/authorization.py` for more info. * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static getDatasetRoleViewerApiV2AuthorizationsDatasetsDatasetIdRoleViewerGet( datasetId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role/viewer}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -93,20 +78,15 @@ export class AuthorizationService { * Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. * See `routers/authorization.py` for more info. * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static getDatasetRoleOwnerApiV2AuthorizationsDatasetsDatasetIdRoleOwnerGet( datasetId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/role/owner}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -117,21 +97,16 @@ export class AuthorizationService { * Get File Role * @param fileId * @param datasetId - * @param xAdminMode * @returns RoleType Successful Response * @throws ApiError */ public static getFileRoleApiV2AuthorizationsFilesFileIdRoleGet( fileId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/files/${fileId}/role}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -145,21 +120,16 @@ export class AuthorizationService { * Get Metadata Role * @param metadataId * @param datasetId - * @param xAdminMode * @returns AuthorizationMetadata Successful Response * @throws ApiError */ public static getMetadataRoleApiV2AuthorizationsMetadataMetadataIdRoleGet( metadataId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/metadata/${metadataId}/role}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -173,21 +143,16 @@ export class AuthorizationService { * Get Group Role * @param groupId * @param datasetId - * @param xAdminMode * @returns RoleType Successful Response * @throws ApiError */ public static getGroupRoleApiV2AuthorizationsGroupsGroupIdRoleGet( groupId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/groups/${groupId}/role}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -203,7 +168,6 @@ export class AuthorizationService { * @param datasetId * @param groupId * @param role - * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ @@ -211,14 +175,10 @@ export class AuthorizationService { datasetId: string, groupId: string, role: RoleType, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}/group_role/${groupId}/${role}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -231,7 +191,6 @@ export class AuthorizationService { * @param datasetId * @param username * @param role - * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ @@ -239,14 +198,10 @@ export class AuthorizationService { datasetId: string, username: string, role: RoleType, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/authorizations/datasets/${datasetId}/user_role/${username}/${role}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -258,21 +213,16 @@ export class AuthorizationService { * Remove any role the group has with a specific dataset. * @param datasetId * @param groupId - * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static removeDatasetGroupRoleApiV2AuthorizationsDatasetsDatasetIdGroupRoleGroupIdDelete( datasetId: string, groupId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/authorizations/datasets/${datasetId}/group_role/${groupId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -284,21 +234,16 @@ export class AuthorizationService { * Remove any role the user has with a specific dataset. * @param datasetId * @param username - * @param xAdminMode * @returns AuthorizationOut Successful Response * @throws ApiError */ public static removeDatasetUserRoleApiV2AuthorizationsDatasetsDatasetIdUserRoleUsernameDelete( datasetId: string, username: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/authorizations/datasets/${datasetId}/user_role/${username}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -309,20 +254,15 @@ export class AuthorizationService { * Get Dataset Roles * Get a list of all users and groups that have assigned roles on this dataset. * @param datasetId - * @param xAdminMode * @returns DatasetRoles Successful Response * @throws ApiError */ public static getDatasetRolesApiV2AuthorizationsDatasetsDatasetIdRolesGet( datasetId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/authorizations/datasets/${datasetId}/roles}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/DatasetsService.ts b/frontend/src/openapi/v2/services/DatasetsService.ts index 3b14fac45..e4c725d43 100644 --- a/frontend/src/openapi/v2/services/DatasetsService.ts +++ b/frontend/src/openapi/v2/services/DatasetsService.ts @@ -23,7 +23,6 @@ export class DatasetsService { * @param limit * @param mine * @param datasetId - * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ @@ -32,14 +31,10 @@ export class DatasetsService { limit: number = 10, mine: boolean = false, datasetId?: string, - xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'skip': skip, 'limit': limit, @@ -75,20 +70,15 @@ export class DatasetsService { /** * Get Dataset * @param datasetId - * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static getDatasetApiV2DatasetsDatasetIdGet( datasetId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -99,21 +89,16 @@ export class DatasetsService { * Edit Dataset * @param datasetId * @param requestBody - * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static editDatasetApiV2DatasetsDatasetIdPut( datasetId: string, requestBody: DatasetBase, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/datasets/${datasetId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -125,20 +110,15 @@ export class DatasetsService { /** * Delete Dataset * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static deleteDatasetApiV2DatasetsDatasetIdDelete( datasetId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -149,21 +129,16 @@ export class DatasetsService { * Patch Dataset * @param datasetId * @param requestBody - * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static patchDatasetApiV2DatasetsDatasetIdPatch( datasetId: string, requestBody: DatasetPatch, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -178,7 +153,6 @@ export class DatasetsService { * @param folderId * @param skip * @param limit - * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -187,14 +161,10 @@ export class DatasetsService { folderId?: string, skip?: number, limit: number = 10, - xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/files`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'folder_id': folderId, 'skip': skip, @@ -211,7 +181,6 @@ export class DatasetsService { * @param datasetId * @param formData * @param folderId - * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -219,14 +188,10 @@ export class DatasetsService { datasetId: string, formData: Body_save_file_api_v2_datasets__dataset_id__files_post, folderId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/files`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'folder_id': folderId, }, @@ -244,7 +209,6 @@ export class DatasetsService { * @param parentFolder * @param skip * @param limit - * @param xAdminMode * @returns FolderOut Successful Response * @throws ApiError */ @@ -253,14 +217,10 @@ export class DatasetsService { parentFolder?: string, skip?: number, limit: number = 10, - xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/folders`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'parent_folder': parentFolder, 'skip': skip, @@ -276,21 +236,16 @@ export class DatasetsService { * Add Folder * @param datasetId * @param requestBody - * @param xAdminMode * @returns FolderOut Successful Response * @throws ApiError */ public static addFolderApiV2DatasetsDatasetIdFoldersPost( datasetId: string, requestBody: FolderIn, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/folders`, - headers: { - 'x_admin_mode': xAdminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -303,21 +258,16 @@ export class DatasetsService { * Delete Folder * @param datasetId * @param folderId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static deleteFolderApiV2DatasetsDatasetIdFoldersFolderIdDelete( datasetId: string, folderId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}/folders/${folderId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -329,7 +279,6 @@ export class DatasetsService { * @param datasetId * @param formData * @param folderId - * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -337,14 +286,10 @@ export class DatasetsService { datasetId: string, formData: Body_save_files_api_v2_datasets__dataset_id__filesMultiple_post, folderId?: string, - xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/filesMultiple`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'folder_id': folderId, }, @@ -361,7 +306,6 @@ export class DatasetsService { * @param datasetId * @param requestBody * @param folderId - * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -369,14 +313,10 @@ export class DatasetsService { datasetId: string, requestBody: LocalFileIn, folderId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/local_files`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'folder_id': folderId, }, @@ -411,20 +351,15 @@ export class DatasetsService { /** * Download Dataset * @param datasetId - * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static downloadDatasetApiV2DatasetsDatasetIdDownloadGet( datasetId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/download`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -435,7 +370,6 @@ export class DatasetsService { * Get Dataset Extract * @param datasetId * @param extractorName - * @param xAdminMode * @param requestBody * @returns any Successful Response * @throws ApiError @@ -443,15 +377,11 @@ export class DatasetsService { public static getDatasetExtractApiV2DatasetsDatasetIdExtractPost( datasetId: string, extractorName: string, - xAdminMode?: string, requestBody?: any, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/extract`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'extractorName': extractorName, }, @@ -466,20 +396,15 @@ export class DatasetsService { /** * Download Dataset Thumbnail * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static downloadDatasetThumbnailApiV2DatasetsDatasetIdThumbnailGet( datasetId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/thumbnail`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, @@ -490,21 +415,16 @@ export class DatasetsService { * Add Dataset Thumbnail * @param datasetId * @param thumbnailId - * @param xAdminMode * @returns DatasetOut Successful Response * @throws ApiError */ public static addDatasetThumbnailApiV2DatasetsDatasetIdThumbnailThumbnailIdPatch( datasetId: string, thumbnailId: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}/thumbnail/${thumbnailId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, errors: { 422: `Validation Error`, }, diff --git a/frontend/src/openapi/v2/services/FilesService.ts b/frontend/src/openapi/v2/services/FilesService.ts index b4334e08f..6a48df5f9 100644 --- a/frontend/src/openapi/v2/services/FilesService.ts +++ b/frontend/src/openapi/v2/services/FilesService.ts @@ -15,7 +15,6 @@ export class FilesService { * @param version * @param increment * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ @@ -24,14 +23,10 @@ export class FilesService { version?: number, increment: boolean = true, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'version': version, 'increment': increment, @@ -48,7 +43,6 @@ export class FilesService { * @param fileId * @param formData * @param datasetId - * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -56,14 +50,10 @@ export class FilesService { fileId: string, formData: Body_update_file_api_v2_files__file_id__put, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -79,21 +69,16 @@ export class FilesService { * Delete File * @param fileId * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static deleteFileApiV2FilesFileIdDelete( fileId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -109,7 +94,6 @@ export class FilesService { * @param version * @param expiresInSeconds * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ @@ -118,14 +102,10 @@ export class FilesService { version?: number, expiresInSeconds: number = 3600, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/url/`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'version': version, 'expires_in_seconds': expiresInSeconds, @@ -141,21 +121,16 @@ export class FilesService { * Get File Summary * @param fileId * @param datasetId - * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ public static getFileSummaryApiV2FilesFileIdSummaryGet( fileId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/summary`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -170,7 +145,6 @@ export class FilesService { * @param fileId * @param versionNum * @param datasetId - * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -178,14 +152,10 @@ export class FilesService { fileId: string, versionNum?: number, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/version_details`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'version_num': versionNum, 'dataset_id': datasetId, @@ -202,7 +172,6 @@ export class FilesService { * @param skip * @param limit * @param datasetId - * @param xAdminMode * @returns FileVersion Successful Response * @throws ApiError */ @@ -211,14 +180,10 @@ export class FilesService { skip?: number, limit: number = 20, datasetId?: string, - xAdminMode?: string, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/versions`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'skip': skip, 'limit': limit, @@ -235,7 +200,6 @@ export class FilesService { * @param fileId * @param extractorName * @param datasetId - * @param xAdminMode * @param requestBody * @returns any Successful Response * @throws ApiError @@ -244,15 +208,11 @@ export class FilesService { fileId: string, extractorName: string, datasetId?: string, - xAdminMode?: string, requestBody?: any, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/extract`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'extractorName': extractorName, 'dataset_id': datasetId, @@ -277,21 +237,16 @@ export class FilesService { * rabbitmq_client: Rabbitmq Client * @param fileId * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static resubmitFileExtractionsApiV2FilesFileIdResubmitExtractPost( fileId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/resubmit_extract`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -305,21 +260,16 @@ export class FilesService { * Download File Thumbnail * @param fileId * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static downloadFileThumbnailApiV2FilesFileIdThumbnailGet( fileId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/thumbnail`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -334,7 +284,6 @@ export class FilesService { * @param fileId * @param thumbnailId * @param datasetId - * @param xAdminMode * @returns FileOut Successful Response * @throws ApiError */ @@ -342,14 +291,10 @@ export class FilesService { fileId: string, thumbnailId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/thumbnail/${thumbnailId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, diff --git a/frontend/src/openapi/v2/services/GroupsService.ts b/frontend/src/openapi/v2/services/GroupsService.ts index 9d2dd379b..6c20074ce 100644 --- a/frontend/src/openapi/v2/services/GroupsService.ts +++ b/frontend/src/openapi/v2/services/GroupsService.ts @@ -94,21 +94,16 @@ export class GroupsService { * Get Group * @param groupId * @param datasetId - * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ public static getGroupApiV2GroupsGroupIdGet( groupId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'GET', path: `/api/v2/groups/${groupId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -123,7 +118,6 @@ export class GroupsService { * @param groupId * @param requestBody * @param datasetId - * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ @@ -131,14 +125,10 @@ export class GroupsService { groupId: string, requestBody: GroupBase, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/groups/${groupId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -154,21 +144,16 @@ export class GroupsService { * Delete Group * @param groupId * @param datasetId - * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ public static deleteGroupApiV2GroupsGroupIdDelete( groupId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/groups/${groupId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -185,7 +170,6 @@ export class GroupsService { * @param username * @param role * @param datasetId - * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ @@ -194,14 +178,10 @@ export class GroupsService { username: string, role?: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/groups/${groupId}/add/${username}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'role': role, 'dataset_id': datasetId, @@ -218,7 +198,6 @@ export class GroupsService { * @param groupId * @param username * @param datasetId - * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ @@ -226,14 +205,10 @@ export class GroupsService { groupId: string, username: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/groups/${groupId}/remove/${username}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -250,7 +225,6 @@ export class GroupsService { * @param username * @param role * @param datasetId - * @param xAdminMode * @returns GroupOut Successful Response * @throws ApiError */ @@ -259,14 +233,10 @@ export class GroupsService { username: string, role: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/groups/${groupId}/update/${username}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'role': role, 'dataset_id': datasetId, diff --git a/frontend/src/openapi/v2/services/LoginService.ts b/frontend/src/openapi/v2/services/LoginService.ts index 2a1bdd772..34d40b6e1 100644 --- a/frontend/src/openapi/v2/services/LoginService.ts +++ b/frontend/src/openapi/v2/services/LoginService.ts @@ -70,6 +70,19 @@ export class LoginService { }); } + /** + * Get Admin Mode + * Get Admin mode from User Object. + * @returns boolean Successful Response + * @throws ApiError + */ + public static getAdminModeApiV2AdminModeGet(): CancelablePromise { + return __request({ + method: 'GET', + path: `/api/v2/admin_mode`, + }); + } + /** * Set Admin * @param useremail diff --git a/frontend/src/openapi/v2/services/MetadataService.ts b/frontend/src/openapi/v2/services/MetadataService.ts index a4a512bc4..3090fc9c6 100644 --- a/frontend/src/openapi/v2/services/MetadataService.ts +++ b/frontend/src/openapi/v2/services/MetadataService.ts @@ -135,21 +135,16 @@ export class MetadataService { * Delete metadata by specific ID. * @param metadataId * @param datasetId - * @param xAdminMode * @returns any Successful Response * @throws ApiError */ public static deleteMetadataApiV2MetadataMetadataIdDelete( metadataId: string, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/metadata/${metadataId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -169,7 +164,6 @@ export class MetadataService { * @param metadataId * @param requestBody * @param datasetId - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -177,14 +171,10 @@ export class MetadataService { metadataId: string, requestBody: MetadataPatch, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/metadata/${metadataId}`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -203,7 +193,6 @@ export class MetadataService { * @param version * @param allVersions * @param datasetId - * @param xAdminMode * @param formData * @returns MetadataOut Successful Response * @throws ApiError @@ -213,15 +202,11 @@ export class MetadataService { version?: number, allVersions: boolean = false, datasetId?: string, - xAdminMode?: string, formData?: Body_get_file_metadata_api_v2_files__file_id__metadata_get, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/files/${fileId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'version': version, 'all_versions': allVersions, @@ -244,7 +229,6 @@ export class MetadataService { * @param fileId * @param requestBody * @param datasetId - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -252,14 +236,10 @@ export class MetadataService { fileId: string, requestBody: MetadataPatch, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/files/${fileId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -281,7 +261,6 @@ export class MetadataService { * @param fileId * @param requestBody * @param datasetId - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -289,14 +268,10 @@ export class MetadataService { fileId: string, requestBody: MetadataIn, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/files/${fileId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -313,7 +288,6 @@ export class MetadataService { * @param fileId * @param requestBody * @param datasetId - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -321,14 +295,10 @@ export class MetadataService { fileId: string, requestBody: MetadataDelete, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/files/${fileId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -350,7 +320,6 @@ export class MetadataService { * @param fileId * @param requestBody * @param datasetId - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ @@ -358,14 +327,10 @@ export class MetadataService { fileId: string, requestBody: MetadataPatch, datasetId?: string, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/files/${fileId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, query: { 'dataset_id': datasetId, }, @@ -380,22 +345,17 @@ export class MetadataService { /** * Get Dataset Metadata * @param datasetId - * @param xAdminMode * @param formData * @returns MetadataOut Successful Response * @throws ApiError */ public static getDatasetMetadataApiV2DatasetsDatasetIdMetadataGet( datasetId: string, - xAdminMode?: string, formData?: Body_get_dataset_metadata_api_v2_datasets__dataset_id__metadata_get, ): CancelablePromise> { return __request({ method: 'GET', path: `/api/v2/datasets/${datasetId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, formData: formData, mediaType: 'application/x-www-form-urlencoded', errors: { @@ -413,21 +373,16 @@ export class MetadataService { * Metadata document that was updated * @param datasetId * @param requestBody - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static replaceDatasetMetadataApiV2DatasetsDatasetIdMetadataPut( datasetId: string, requestBody: MetadataIn, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PUT', path: `/api/v2/datasets/${datasetId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -445,21 +400,16 @@ export class MetadataService { * Metadata document that was added to database * @param datasetId * @param requestBody - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static addDatasetMetadataApiV2DatasetsDatasetIdMetadataPost( datasetId: string, requestBody: MetadataIn, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/datasets/${datasetId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -472,21 +422,16 @@ export class MetadataService { * Delete Dataset Metadata * @param datasetId * @param requestBody - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static deleteDatasetMetadataApiV2DatasetsDatasetIdMetadataDelete( datasetId: string, requestBody: MetadataDelete, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'DELETE', path: `/api/v2/datasets/${datasetId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, body: requestBody, mediaType: 'application/json', errors: { @@ -504,21 +449,16 @@ export class MetadataService { * Metadata document that was updated * @param datasetId * @param requestBody - * @param xAdminMode * @returns MetadataOut Successful Response * @throws ApiError */ public static updateDatasetMetadataApiV2DatasetsDatasetIdMetadataPatch( datasetId: string, requestBody: MetadataPatch, - xAdminMode?: string, ): CancelablePromise { return __request({ method: 'PATCH', path: `/api/v2/datasets/${datasetId}/metadata`, - headers: { - 'x_admin_mode': xAdminMode, - }, body: requestBody, mediaType: 'application/json', errors: { From e4448ac3d8cceef72c772c2b3a6d746dfcbcf35f Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 09:59:52 -0600 Subject: [PATCH 28/43] add endpoint to set admin --- backend/app/routers/authentication.py | 28 +++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index 3cf917af1..bdb4708ca 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -127,6 +127,34 @@ async def get_admin_mode(current_username=Depends(get_current_user)) -> bool: ) +@router.post("/admin_mode", response_model=UserOut) +async def set_admin_mode( + admin_mode_on: bool, + admin=Depends(get_admin), + current_username=Depends(get_current_user)): + """Set Admin mode from User Object.""" + if ( + current_user := await UserDB.find_one(UserDB.email == current_username.email) + ) is not None: + # only admin can set admin mode + if admin: + current_user.admin_mode = True + await current_user.replace() + return current_user.dict() + else: + raise HTTPException( + status_code=403, + detail="You are not admin yet. Only admin can set admin mode.", + headers={"WWW-Authenticate": "Bearer"}, + ) + else: + raise HTTPException( + status_code=404, + detail="User doesn't exist.", + headers={"WWW-Authenticate": "Bearer"}, + ) + + @router.post("/users/set_admin/{useremail}", response_model=UserOut) async def set_admin( useremail: str, current_username=Depends(get_current_user), admin=Depends(get_admin) From 179ea8a240cfe77d1dd839cdbe10a7e92820c7d6 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 10:00:13 -0600 Subject: [PATCH 29/43] fix bug --- backend/app/routers/authentication.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index bdb4708ca..86c715448 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -138,7 +138,7 @@ async def set_admin_mode( ) is not None: # only admin can set admin mode if admin: - current_user.admin_mode = True + current_user.admin_mode = admin_mode_on await current_user.replace() return current_user.dict() else: From 59cfcad7b6efcd1fb04cbc9b756d36cd3c329117 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 10:20:52 -0600 Subject: [PATCH 30/43] pytest, black format and codegen --- backend/app/deps/authorization_deps.py | 84 +++--- backend/app/keycloak_auth.py | 64 ++--- backend/app/routers/authentication.py | 34 +-- backend/app/routers/authorization.py | 150 +++++------ backend/app/routers/datasets.py | 254 +++++++++--------- backend/app/routers/elasticsearch.py | 12 +- backend/app/routers/files.py | 174 ++++++------ backend/app/routers/groups.py | 70 ++--- backend/app/routers/metadata.py | 52 ++-- backend/app/routers/metadata_datasets.py | 86 +++--- backend/app/routers/metadata_files.py | 136 +++++----- backend/app/tests/test_authorization.py | 35 ++- .../src/openapi/v2/services/LoginService.ts | 37 ++- 13 files changed, 625 insertions(+), 563 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index 81cb942d0..fdf29f9ad 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -14,8 +14,8 @@ async def get_role( - dataset_id: str, - current_user=Depends(get_current_username), + dataset_id: str, + current_user=Depends(get_current_username), ) -> RoleType: """Returns the role a specific user has on a dataset. If the user is a creator (owner), they are not listed in the user_ids list.""" @@ -30,8 +30,8 @@ async def get_role( async def get_role_by_file( - file_id: str, - current_user=Depends(get_current_username), + file_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: authorization = await AuthorizationDB.find_one( @@ -43,7 +43,7 @@ async def get_role_by_file( ) if authorization is None: if ( - dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) ) is not None: if dataset.status == DatasetStatus.AUTHENTICATED.name: auth_dict = { @@ -64,8 +64,8 @@ async def get_role_by_file( async def get_role_by_metadata( - metadata_id: str, - current_user=Depends(get_current_username), + metadata_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None: resource_type = md_out.resource.collection @@ -82,7 +82,7 @@ async def get_role_by_metadata( return authorization.role elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -95,8 +95,8 @@ async def get_role_by_metadata( async def get_role_by_group( - group_id: str, - current_user=Depends(get_current_username), + group_id: str, + current_user=Depends(get_current_username), ) -> RoleType: if (group := await GroupDB.get(group_id)) is not None: if group.creator == current_user: @@ -116,7 +116,7 @@ async def get_role_by_group( async def is_public_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is public.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -127,7 +127,7 @@ async def is_public_dataset( async def is_authenticated_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is authenticated.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -145,11 +145,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - dataset_id: str, - current_user: str = Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + self, + dataset_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here. @@ -175,11 +175,11 @@ async def __call__( ) else: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if ( - current_dataset.status == DatasetStatus.AUTHENTICATED.name - and self.role == "viewer" + current_dataset.status == DatasetStatus.AUTHENTICATED.name + and self.role == "viewer" ): return True else: @@ -202,11 +202,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -240,11 +240,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -256,7 +256,7 @@ async def __call__( resource_id = md_out.resource.resource_id if resource_type == "files": if ( - file := await FileDB.get(PydanticObjectId(resource_id)) + file := await FileDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == file.dataset_id, @@ -278,7 +278,7 @@ async def __call__( ) elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -307,11 +307,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user: str = Depends(get_current_username), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -343,8 +343,8 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - dataset_id: str, + self, + dataset_id: str, ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if dataset.status == self.status: @@ -363,13 +363,13 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - file_id: str, + self, + file_id: str, ): if (file_out := await FileDB.get(PydanticObjectId(file_id))) is not None: dataset_id = file_out.dataset_id if ( - dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if dataset.status == self.status: return True diff --git a/backend/app/keycloak_auth.py b/backend/app/keycloak_auth.py index 0b0684e0b..a19645de7 100644 --- a/backend/app/keycloak_auth.py +++ b/backend/app/keycloak_auth.py @@ -54,8 +54,8 @@ async def get_idp_public_key(): async def get_token( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), ) -> Json: """Decode token. Use to secure endpoints.""" if token: @@ -91,17 +91,17 @@ async def get_token( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: return {"preferred_username": payload["user"]} elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: current_time = datetime.utcnow() if key.expires is not None and current_time >= key.expires: @@ -140,9 +140,9 @@ async def get_user(identity: Json = Depends(get_token)): async def get_current_user( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), - token_cookie: str = Security(jwt_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), + token_cookie: str = Security(jwt_header), ) -> UserOut: """Retrieve the user object from Mongo by first getting user id from JWT and then querying Mongo. Potentially expensive. Use `get_current_username` if all you need is user name. @@ -178,18 +178,18 @@ async def get_current_user( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - key := await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + key := await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: user = await UserDB.find_one(UserDB.email == key.user) return UserOut(**user.dict()) elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: current_time = datetime.utcnow() @@ -225,9 +225,9 @@ async def get_current_user( async def get_current_username( - token: str = Security(oauth2_scheme), - api_key: str = Security(api_key_header), - token_cookie: str = Security(jwt_header), + token: str = Security(oauth2_scheme), + api_key: str = Security(api_key_header), + token_cookie: str = Security(jwt_header), ) -> str: """Retrieve the user id from the JWT token. Does not query MongoDB.""" if token: @@ -260,18 +260,18 @@ async def get_current_username( payload = serializer.loads(api_key) # Key is valid, check expiration date in database if ( - key := await ListenerAPIKeyDB.find_one( - ListenerAPIKeyDB.user == payload["user"], - ListenerAPIKeyDB.key == payload["key"], - ) + key := await ListenerAPIKeyDB.find_one( + ListenerAPIKeyDB.user == payload["user"], + ListenerAPIKeyDB.key == payload["key"], + ) ) is not None: # Key is coming from a listener job return key.user elif ( - key := await UserAPIKeyDB.find_one( - UserAPIKeyDB.user == payload["user"], - UserAPIKeyDB.key == payload["key"], - ) + key := await UserAPIKeyDB.find_one( + UserAPIKeyDB.user == payload["user"], + UserAPIKeyDB.key == payload["key"], + ) ) is not None: # Key is coming from a user request current_time = datetime.utcnow() diff --git a/backend/app/routers/authentication.py b/backend/app/routers/authentication.py index 86c715448..49876a7f7 100644 --- a/backend/app/routers/authentication.py +++ b/backend/app/routers/authentication.py @@ -92,28 +92,31 @@ async def authenticate_user(email: str, password: str): return user -@router.get("/admin") -async def get_admin(dataset_id: str = None, current_username=Depends(get_current_user)): +@router.get("/users/me/is_admin", response_model=bool) +async def get_admin( + dataset_id: str = None, current_username=Depends(get_current_user) +) -> bool: if ( - current_user := await UserDB.find_one(UserDB.email == current_username.email) + current_user := await UserDB.find_one(UserDB.email == current_username.email) ) is not None: if current_user.admin: return current_user.admin elif ( - dataset_id - and (dataset_db := await DatasetDB.get(PydanticObjectId(dataset_id))) - is not None + dataset_id + and (dataset_db := await DatasetDB.get(PydanticObjectId(dataset_id))) + is not None ): + # TODO: question regarding resource creator is considered as admin of the resource? return dataset_db.creator.email == current_username.email else: return False -@router.get("/admin_mode") +@router.get("/users/me/admin_mode") async def get_admin_mode(current_username=Depends(get_current_user)) -> bool: """Get Admin mode from User Object.""" if ( - current_user := await UserDB.find_one(UserDB.email == current_username.email) + current_user := await UserDB.find_one(UserDB.email == current_username.email) ) is not None: if current_user.admin_mode is not None: return current_user.admin_mode @@ -127,20 +130,21 @@ async def get_admin_mode(current_username=Depends(get_current_user)) -> bool: ) -@router.post("/admin_mode", response_model=UserOut) +@router.post("/users/me/admin_mode", response_model=bool) async def set_admin_mode( - admin_mode_on: bool, - admin=Depends(get_admin), - current_username=Depends(get_current_user)): + admin_mode_on: bool, + admin=Depends(get_admin), + current_username=Depends(get_current_user), +) -> bool: """Set Admin mode from User Object.""" if ( - current_user := await UserDB.find_one(UserDB.email == current_username.email) + current_user := await UserDB.find_one(UserDB.email == current_username.email) ) is not None: # only admin can set admin mode if admin: current_user.admin_mode = admin_mode_on await current_user.replace() - return current_user.dict() + return current_user.admin_mode else: raise HTTPException( status_code=403, @@ -157,7 +161,7 @@ async def set_admin_mode( @router.post("/users/set_admin/{useremail}", response_model=UserOut) async def set_admin( - useremail: str, current_username=Depends(get_current_user), admin=Depends(get_admin) + useremail: str, current_username=Depends(get_current_user), admin=Depends(get_admin) ): if admin: if (user := await UserDB.find_one(UserDB.email == useremail)) is not None: diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 0e8e6b99f..b25f23e38 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -38,11 +38,11 @@ @router.post("/datasets/{dataset_id}", response_model=AuthorizationOut) async def save_authorization( - dataset_id: str, - authorization_in: AuthorizationBase, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_username), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + authorization_in: AuthorizationBase, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_username), + allow: bool = Depends(Authorization("editor")), ): """Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id.""" @@ -69,10 +69,10 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationOut) async def get_dataset_role( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - admin=Depends(get_admin), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization @@ -90,7 +90,7 @@ async def get_dataset_role( ) if auth_db is None: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if current_dataset.status == DatasetStatus.AUTHENTICATED.name: public_authorization_in = { @@ -112,9 +112,9 @@ async def get_dataset_role( @router.get("/datasets/{dataset_id}/role/viewer}") async def get_dataset_role_viewer( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -123,9 +123,9 @@ async def get_dataset_role_viewer( @router.get("/datasets/{dataset_id}/role/owner}") async def get_dataset_role_owner( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("owner")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("owner")), ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -134,11 +134,11 @@ async def get_dataset_role_owner( @router.get("/files/{file_id}/role}", response_model=RoleType) async def get_file_role( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_file), - admin=Depends(get_admin), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_file), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -149,11 +149,11 @@ async def get_file_role( @router.get("/metadata/{metadata_id}/role}", response_model=AuthorizationMetadata) async def get_metadata_role( - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_metadata), - admin=Depends(get_admin), + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_metadata), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -164,11 +164,11 @@ async def get_metadata_role( @router.get("/groups/{group_id}/role}", response_model=RoleType) async def get_group_role( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_group), - admin=Depends(get_admin), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_group), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -182,13 +182,13 @@ async def get_group_role( response_model=AuthorizationOut, ) async def set_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - role: RoleType, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + role: RoleType, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign an entire group a specific role for a dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: @@ -198,10 +198,10 @@ async def set_dataset_group_role( dataset_id, group_id, admin_mode, es, user_id, allow ) if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.role == role, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.role == role, + ) ) is not None: if group_id not in auth_db.group_ids: auth_db.group_ids.append(group_id) @@ -236,13 +236,13 @@ async def set_dataset_group_role( response_model=AuthorizationOut, ) async def set_dataset_user_role( - dataset_id: str, - username: str, - role: RoleType, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + role: RoleType, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign a single user a specific role for a dataset.""" @@ -296,22 +296,22 @@ async def set_dataset_user_role( response_model=AuthorizationOut, ) async def remove_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the group has with a specific dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: if (group := await GroupDB.get(group_id)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == dataset_id, - AuthorizationDB.group_ids == group_id, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == dataset_id, + AuthorizationDB.group_ids == group_id, + ) ) is not None: auth_db.group_ids.remove(PyObjectId(group_id)) for u in group.users: @@ -332,22 +332,22 @@ async def remove_dataset_group_role( response_model=AuthorizationOut, ) async def remove_dataset_user_role( - dataset_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the user has with a specific dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (await UserDB.find_one(UserDB.email == username)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.user_ids == username, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.user_ids == username, + ) ) is not None: auth_db.user_ids.remove(username) await auth_db.save() @@ -362,16 +362,16 @@ async def remove_dataset_user_role( @router.get("/datasets/{dataset_id}/roles}", response_model=DatasetRoles) async def get_dataset_roles( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: roles = DatasetRoles(dataset_id=str(dataset.id)) async for auth in AuthorizationDB.find( - AuthorizationDB.dataset_id == ObjectId(dataset_id) + AuthorizationDB.dataset_id == ObjectId(dataset_id) ): # First, fetch all groups that have a role on the dataset group_user_counts = {} @@ -389,8 +389,8 @@ async def get_dataset_roles( # Next, get all users but omit those that are included in a group above async for user in UserDB.find(In(UserDB.email, auth.user_ids)): if ( - user.email in group_user_counts - and auth.user_ids.count(user.email) == group_user_counts[user.email] + user.email in group_user_counts + and auth.user_ids.count(user.email) == group_user_counts[user.email] ): continue # TODO: Why is this necessary here but not on root-level ObjectIDs? diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 357e9752d..2f22177c9 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -134,12 +134,12 @@ def nested_update(target_dict, update_dict): async def _create_folder_structure( - dataset_id: str, - contents: dict, - folder_path: str, - folder_lookup: dict, - user: UserOut, - parent_folder_id: Optional[str] = None, + dataset_id: str, + contents: dict, + folder_path: str, + folder_lookup: dict, + user: UserOut, + parent_folder_id: Optional[str] = None, ): """Recursively create folders encountered in folder_path until the target folder is created. Arguments: @@ -174,8 +174,8 @@ async def _create_folder_structure( async def _get_folder_hierarchy( - folder_id: str, - hierarchy: str, + folder_id: str, + hierarchy: str, ): """Generate a string of nested path to folder for use in zip file creation.""" folder = await FolderDB.get(PydanticObjectId(folder_id)) @@ -187,9 +187,9 @@ async def _get_folder_hierarchy( @router.post("", response_model=DatasetOut) async def save_dataset( - dataset_in: DatasetIn, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + dataset_in: DatasetIn, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): dataset = DatasetDB(**dataset_in.dict(), creator=user) await dataset.insert() @@ -208,12 +208,12 @@ async def save_dataset( @router.get("", response_model=List[DatasetOut]) async def get_datasets( - admin_mode: bool = Depends(get_admin_mode), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - mine: bool = False, - admin=Depends(get_admin), + admin_mode: bool = Depends(get_admin_mode), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + mine: bool = False, + admin=Depends(get_admin), ): if admin_mode and admin: datasets = await DatasetDBViewList.find( @@ -245,10 +245,10 @@ async def get_datasets( @router.get("/{dataset_id}", response_model=DatasetOut) async def get_dataset( - dataset_id: str, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: return dataset.dict() @@ -257,14 +257,14 @@ async def get_dataset( @router.get("/{dataset_id}/files", response_model=List[FileOut]) async def get_dataset_files( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - folder_id: Optional[str] = None, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + folder_id: Optional[str] = None, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if authenticated: query = [ @@ -286,12 +286,12 @@ async def get_dataset_files( @router.put("/{dataset_id}", response_model=DatasetOut) async def edit_dataset( - dataset_id: str, - dataset_info: DatasetBase, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetBase, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Refactor this with permissions checks etc. @@ -307,12 +307,12 @@ async def edit_dataset( @router.patch("/{dataset_id}", response_model=DatasetOut) async def patch_dataset( - dataset_id: str, - dataset_info: DatasetPatch, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetPatch, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Update method not working properly @@ -332,11 +332,11 @@ async def patch_dataset( @router.delete("/{dataset_id}") async def delete_dataset( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # delete from elasticsearch @@ -347,7 +347,7 @@ async def delete_dataset( MetadataDB.resource.resource_id == PydanticObjectId(dataset_id) ).delete() async for file in FileDB.find( - FileDB.dataset_id == PydanticObjectId(dataset_id) + FileDB.dataset_id == PydanticObjectId(dataset_id) ): await remove_file_entry(file.id, fs, es) await FolderDB.find( @@ -362,11 +362,11 @@ async def delete_dataset( @router.post("/{dataset_id}/folders", response_model=FolderOut) async def add_folder( - dataset_id: str, - folder_in: FolderIn, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_in: FolderIn, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: parent_folder = folder_in.parent_folder @@ -385,14 +385,14 @@ async def add_folder( @router.get("/{dataset_id}/folders", response_model=List[FolderOut]) async def get_dataset_folders( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - parent_folder: Optional[str] = None, - user_id=Depends(get_user), - authenticated: bool = Depends(CheckStatus("authenticated")), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + parent_folder: Optional[str] = None, + user_id=Depends(get_user), + authenticated: bool = Depends(CheckStatus("authenticated")), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if (await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if authenticated: @@ -418,12 +418,12 @@ async def get_dataset_folders( @router.delete("/{dataset_id}/folders/{folder_id}") async def delete_folder( - dataset_id: str, - folder_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + folder_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (folder := await FolderDB.get(PydanticObjectId(folder_id))) is not None: @@ -434,14 +434,14 @@ async def delete_folder( # recursively delete child folder and files async def _delete_nested_folders(parent_folder_id): while ( - await FolderDB.find_one( - FolderDB.dataset_id == ObjectId(dataset_id), - FolderDB.parent_folder == ObjectId(parent_folder_id), - ) + await FolderDB.find_one( + FolderDB.dataset_id == ObjectId(dataset_id), + FolderDB.parent_folder == ObjectId(parent_folder_id), + ) ) is not None: async for subfolder in FolderDB.find( - FolderDB.dataset_id == PydanticObjectId(dataset_id), - FolderDB.parent_folder == PydanticObjectId(parent_folder_id), + FolderDB.dataset_id == PydanticObjectId(dataset_id), + FolderDB.parent_folder == PydanticObjectId(parent_folder_id), ): async for file in FileDB.find(FileDB.folder_id == subfolder.id): await remove_file_entry(file.id, fs, es) @@ -462,15 +462,15 @@ async def _delete_nested_folders(parent_folder_id): @router.post("/{dataset_id}/files", response_model=FileOut) async def save_file( - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - admin_mode: bool = Depends(get_admin_mode), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + admin_mode: bool = Depends(get_admin_mode), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -503,15 +503,15 @@ async def save_file( @router.post("/{dataset_id}/filesMultiple", response_model=List[FileOut]) async def save_files( - dataset_id: str, - files: List[UploadFile], - admin_mode: bool = Depends(get_admin_mode), - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + files: List[UploadFile], + admin_mode: bool = Depends(get_admin_mode), + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: files_added = [] @@ -526,7 +526,7 @@ async def save_files( if folder_id is not None: if ( - folder := await FolderDB.get(PydanticObjectId(folder_id)) + folder := await FolderDB.get(PydanticObjectId(folder_id)) ) is not None: new_file.folder_id = folder.id else: @@ -552,13 +552,13 @@ async def save_files( @router.post("/{dataset_id}/local_files", response_model=FileOut) async def save_local_file( - localfile_in: LocalFileIn, - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + localfile_in: LocalFileIn, + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -607,12 +607,12 @@ async def save_local_file( @router.post("/createFromZip", response_model=DatasetOut) async def create_dataset_from_zip( - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - token: str = Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + token: str = Depends(get_token), ): if file.filename.endswith(".zip") == False: raise HTTPException(status_code=404, detail=f"File is not a zip file") @@ -680,11 +680,11 @@ async def create_dataset_from_zip( @router.get("/{dataset_id}/download", response_model=DatasetOut) async def download_dataset( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload") @@ -839,15 +839,15 @@ async def download_dataset( # can handle parameeters pass in as key/values in info @router.post("/{dataset_id}/extract") async def get_dataset_extract( - dataset_id: str, - extractorName: str, - request: Request, - admin_mode: bool = Depends(get_admin_mode), - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + extractorName: str, + request: Request, + admin_mode: bool = Depends(get_admin_mode), + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -867,10 +867,10 @@ async def get_dataset_extract( @router.get("/{dataset_id}/thumbnail") async def download_dataset_thumbnail( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): # If dataset exists in MongoDB, download from Minio if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -895,14 +895,14 @@ async def download_dataset_thumbnail( @router.patch("/{dataset_id}/thumbnail/{thumbnail_id}", response_model=DatasetOut) async def add_dataset_thumbnail( - dataset_id: str, - thumbnail_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + thumbnail_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? dataset.thumbnail_id = thumbnail_id diff --git a/backend/app/routers/elasticsearch.py b/backend/app/routers/elasticsearch.py index 0e9d4926f..4f93941af 100644 --- a/backend/app/routers/elasticsearch.py +++ b/backend/app/routers/elasticsearch.py @@ -13,10 +13,10 @@ def _add_permissions_clause( - query, - username: str, - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + query, + username: str, + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): """Append filter to Elasticsearch object that restricts permissions based on the requesting user.""" # TODO: Add public filter once added @@ -56,8 +56,8 @@ async def search(index_name: str, query: str, username=Depends(get_current_usern @router.post("/all/_msearch") async def msearch( - request: Request, - username=Depends(get_current_username), + request: Request, + username=Depends(get_current_username), ): es = await connect_elasticsearch() query = await request.body() diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 400299303..81e231583 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -45,10 +45,10 @@ async def _resubmit_file_extractors( - file: FileOut, - rabbitmq_client: BlockingChannel, - user: UserOut, - credentials: HTTPAuthorizationCredentials = Security(security), + file: FileOut, + rabbitmq_client: BlockingChannel, + user: UserOut, + credentials: HTTPAuthorizationCredentials = Security(security), ): """This helper method will check metadata. We get the extractors run from metadata from extractors. Then they are resubmitted. At present parameters are not stored. This will change once Jobs are @@ -62,8 +62,8 @@ async def _resubmit_file_extractors( """ resubmitted_jobs = [] async for job in EventListenerJobDB.find( - EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), - EventListenerJobDB.resource_ref.version == file.version_num - 1, + EventListenerJobDB.resource_ref.resource_id == ObjectId(file.id), + EventListenerJobDB.resource_ref.version == file.version_num - 1, ): resubmitted_job = {"listener_id": job.listener_id, "parameters": job.parameters} try: @@ -87,13 +87,13 @@ async def _resubmit_file_extractors( # TODO: Move this to MongoDB middle layer async def add_file_entry( - new_file: FileDB, - user: UserOut, - fs: Minio, - es: Elasticsearch, - rabbitmq_client: BlockingChannel, - file: Optional[io.BytesIO] = None, - content_type: Optional[str] = None, + new_file: FileDB, + user: UserOut, + fs: Minio, + es: Elasticsearch, + rabbitmq_client: BlockingChannel, + file: Optional[io.BytesIO] = None, + content_type: Optional[str] = None, ): """Insert FileDB object into MongoDB (makes Clowder ID), then Minio (makes version ID), then update MongoDB with the version ID from Minio. @@ -152,11 +152,11 @@ async def add_file_entry( async def add_local_file_entry( - new_file: FileDB, - user: UserOut, - es: Elasticsearch, - rabbitmq_client: BlockingChannel, - content_type: Optional[str] = None, + new_file: FileDB, + user: UserOut, + es: Elasticsearch, + rabbitmq_client: BlockingChannel, + content_type: Optional[str] = None, ): """Insert FileDB object into MongoDB (makes Clowder ID). Bytes are not stored in DB and versioning not supported for local files.""" @@ -182,7 +182,7 @@ async def add_local_file_entry( # TODO: Move this to MongoDB middle layer async def remove_file_entry( - file_id: Union[str, ObjectId], fs: Minio, es: Elasticsearch + file_id: Union[str, ObjectId], fs: Minio, es: Elasticsearch ): """Remove FileDB object into MongoDB, Minio, and associated metadata and version information.""" # TODO: Deleting individual versions will require updating version_id in mongo, or deleting entire document @@ -209,16 +209,16 @@ async def remove_local_file_entry(file_id: Union[str, ObjectId], es: Elasticsear @router.put("/{file_id}", response_model=FileOut) async def update_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - token=Depends(get_token), - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("uploader")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + token=Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("uploader")), ): # Check all connection and abort if any one of them is not available if fs is None or es is None: @@ -227,8 +227,8 @@ async def update_file( if (updated_file := await FileDB.get(PydanticObjectId(file_id))) is not None: if ( - file.filename != updated_file.name - or file.content_type != updated_file.content_type.content_type + file.filename != updated_file.name + or file.content_type != updated_file.content_type.content_type ): raise HTTPException( status_code=400, @@ -300,12 +300,12 @@ async def update_file( @router.get("/{file_id}") async def download_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - increment: Optional[bool] = True, - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + increment: Optional[bool] = True, + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -335,7 +335,7 @@ async def download_file( content.stream(settings.MINIO_UPLOAD_CHUNK_SIZE) ) response.headers["Content-Disposition"] = ( - "attachment; filename=%s" % file.name + "attachment; filename=%s" % file.name ) elif file.storage_type == StorageType.LOCAL: @@ -362,12 +362,12 @@ async def download_file( @router.get("/{file_id}/url/") async def download_file_url( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - expires_in_seconds: Optional[int] = 3600, - external_fs: Minio = Depends(dependencies.get_external_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + expires_in_seconds: Optional[int] = 3600, + external_fs: Minio = Depends(dependencies.get_external_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -415,11 +415,11 @@ async def download_file_url( @router.delete("/{file_id}") async def delete_file( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: if file.storage_type == StorageType.LOCAL: @@ -433,9 +433,9 @@ async def delete_file( @router.get("/{file_id}/summary", response_model=FileOut) async def get_file_summary( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: # TODO: Incrementing too often (3x per page view) @@ -448,10 +448,10 @@ async def get_file_summary( @router.get("/{file_id}/version_details", response_model=FileOut) async def get_file_version_details( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version_num: Optional[int] = 0, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version_num: Optional[int] = 0, + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: # TODO: Incrementing too often (3x per page view) @@ -471,17 +471,17 @@ async def get_file_version_details( @router.get("/{file_id}/versions", response_model=List[FileVersion]) async def get_file_versions( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - skip: int = 0, - limit: int = 20, - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + skip: int = 0, + limit: int = 20, + allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: mongo_versions = [] if file.storage_type == StorageType.MINIO: async for ver in FileVersionDB.find( - FileVersionDB.file_id == ObjectId(file_id) + FileVersionDB.file_id == ObjectId(file_id) ).sort(-FileVersionDB.created).skip(skip).limit(limit): mongo_versions.append(FileVersion(**ver.dict())) return mongo_versions @@ -492,15 +492,15 @@ async def get_file_versions( # submits file to extractor @router.post("/{file_id}/extract") async def post_file_extract( - file_id: str, - extractorName: str, - admin_mode: bool = Depends(get_admin_mode), - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("uploader")), + file_id: str, + extractorName: str, + admin_mode: bool = Depends(get_admin_mode), + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -525,12 +525,12 @@ async def post_file_extract( @router.post("/{file_id}/resubmit_extract") async def resubmit_file_extractions( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - credentials: HTTPAuthorizationCredentials = Security(security), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(FileAuthorization("editor")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + credentials: HTTPAuthorizationCredentials = Security(security), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(FileAuthorization("editor")), ): """This route will check metadata. We get the extractors run from metadata from extractors. Then they are resubmitted. At present parameters are not stored. This will change once Jobs are @@ -552,10 +552,10 @@ async def resubmit_file_extractions( @router.get("/{file_id}/thumbnail") async def download_file_thumbnail( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(FileAuthorization("viewer")), ): # If file exists in MongoDB, download from Minio if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -577,15 +577,15 @@ async def download_file_thumbnail( @router.patch("/{file_id}/thumbnail/{thumbnail_id}", response_model=FileOut) async def add_file_thumbnail( - file_id: str, - thumbnail_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(FileAuthorization("editor")), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + file_id: str, + thumbnail_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(FileAuthorization("editor")), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? file.thumbnail_id = thumbnail_id diff --git a/backend/app/routers/groups.py b/backend/app/routers/groups.py index c04d43d2c..91cb55309 100644 --- a/backend/app/routers/groups.py +++ b/backend/app/routers/groups.py @@ -18,8 +18,8 @@ @router.post("", response_model=GroupOut) async def save_group( - group_in: GroupIn, - user=Depends(get_current_user), + group_in: GroupIn, + user=Depends(get_current_user), ): group_db = GroupDB(**group_in.dict(), creator=user.email) user_member = Member(user=user, editor=True) @@ -31,9 +31,9 @@ async def save_group( @router.get("", response_model=List[GroupOut]) async def get_groups( - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, ): """Get a list of all Groups in the db the user is a member/owner of. @@ -57,10 +57,10 @@ async def get_groups( @router.get("/search/{search_term}", response_model=List[GroupOut]) async def search_group( - search_term: str, - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + search_term: str, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, ): """Search all groups in the db based on text. @@ -86,9 +86,9 @@ async def search_group( @router.get("/{group_id}", response_model=GroupOut) async def get_group( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("viewer")), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("viewer")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: return group.dict() @@ -97,11 +97,11 @@ async def get_group( @router.put("/{group_id}", response_model=GroupOut) async def edit_group( - group_id: str, - group_info: GroupBase, - admin_mode: bool = Depends(get_admin_mode), - user_id=Depends(get_user), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + group_info: GroupBase, + admin_mode: bool = Depends(get_admin_mode), + user_id=Depends(get_user), + allow: bool = Depends(GroupAuthorization("editor")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: group_dict = dict(group_info) if group_info is not None else {} @@ -124,7 +124,7 @@ async def edit_group( if original_user not in groups_users: # remove them from auth async for auth in AuthorizationDB.find( - {"group_ids": ObjectId(group_id)} + {"group_ids": ObjectId(group_id)} ): auth.user_ids.remove(original_user.user.email) await auth.replace() @@ -168,9 +168,9 @@ async def edit_group( @router.delete("/{group_id}", response_model=GroupOut) async def delete_group( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("owner")), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("owner")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: await group.delete() @@ -181,11 +181,11 @@ async def delete_group( @router.post("/{group_id}/add/{username}", response_model=GroupOut) async def add_member( - group_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - role: Optional[str] = None, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + role: Optional[str] = None, + allow: bool = Depends(GroupAuthorization("editor")), ): """Add a new user to a group.""" if (user := await UserDB.find_one(UserDB.email == username)) is not None: @@ -219,10 +219,10 @@ async def add_member( @router.post("/{group_id}/remove/{username}", response_model=GroupOut) async def remove_member( - group_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("editor")), ): """Remove a user from a group.""" @@ -252,11 +252,11 @@ async def remove_member( @router.put("/{group_id}/update/{username}", response_model=GroupOut) async def update_member( - group_id: str, - username: str, - role: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + role: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(GroupAuthorization("editor")), ): """Update user role.""" if (user := await UserDB.find_one({"email": username})) is not None: diff --git a/backend/app/routers/metadata.py b/backend/app/routers/metadata.py index ad73f5e79..d9aa51242 100644 --- a/backend/app/routers/metadata.py +++ b/backend/app/routers/metadata.py @@ -30,8 +30,8 @@ @router.post("/definition", response_model=MetadataDefinitionOut) async def save_metadata_definition( - definition_in: MetadataDefinitionIn, - user=Depends(get_current_user), + definition_in: MetadataDefinitionIn, + user=Depends(get_current_user), ): existing = await MetadataDefinitionDB.find_one( MetadataDefinitionDB.name == definition_in.name @@ -49,10 +49,10 @@ async def save_metadata_definition( @router.get("/definition", response_model=List[MetadataDefinitionOut]) async def get_metadata_definition_list( - name: Optional[str] = None, - user=Depends(get_current_user), - skip: int = 0, - limit: int = 2, + name: Optional[str] = None, + user=Depends(get_current_user), + skip: int = 0, + limit: int = 2, ): if name is None: defs = await MetadataDefinitionDB.find( @@ -72,11 +72,11 @@ async def get_metadata_definition_list( "/definition/{metadata_definition_id}", response_model=MetadataDefinitionOut ) async def get_metadata_definition( - metadata_definition_id: str, - user=Depends(get_current_user), + metadata_definition_id: str, + user=Depends(get_current_user), ): if ( - mdd := await MetadataDefinitionDB.get(PydanticObjectId(metadata_definition_id)) + mdd := await MetadataDefinitionDB.get(PydanticObjectId(metadata_definition_id)) ) is not None: return mdd.dict() raise HTTPException( @@ -89,8 +89,8 @@ async def get_metadata_definition( "/definition/{metadata_definition_id}", response_model=MetadataDefinitionOut ) async def delete_metadata_definition( - metadata_definition_id: str, - user=Depends(get_current_user), + metadata_definition_id: str, + user=Depends(get_current_user), ): """Delete metadata definition by specific ID.""" mdd = await MetadataDefinitionDB.find_one( @@ -106,7 +106,7 @@ async def delete_metadata_definition( raise HTTPException( status_code=400, detail=f"Metadata definition: {mdd.name} ({metadata_definition_id}) in use. " - f"You cannot delete it until all metadata records using it are deleted.", + f"You cannot delete it until all metadata records using it are deleted.", ) # TODO: Refactor this with permissions checks etc. @@ -123,10 +123,10 @@ async def delete_metadata_definition( "/definition/search/{search_term}", response_model=List[MetadataDefinitionOut] ) async def search_metadata_definition( - search_term: str, - skip: int = 0, - limit: int = 10, - user=Depends(get_current_user), + search_term: str, + skip: int = 0, + limit: int = 10, + user=Depends(get_current_user), ): """Search all metadata definition in the db based on text. @@ -152,12 +152,12 @@ async def search_metadata_definition( @router.patch("/{metadata_id}", response_model=MetadataOut) async def update_metadata( - metadata_in: MetadataPatch, - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_in: MetadataPatch, + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -175,10 +175,10 @@ async def update_metadata( @router.delete("/{metadata_id}") async def delete_metadata( - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - allow: bool = Depends(MetadataAuthorization("editor")), + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + allow: bool = Depends(MetadataAuthorization("editor")), ): """Delete metadata by specific ID.""" md = await MetadataDB.find_one(MetadataDB.id == PyObjectId(metadata_id)) diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index 3597ebbdf..10701009a 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -35,10 +35,10 @@ async def _build_metadata_db_obj( - metadata_in: MetadataIn, - dataset: DatasetOut, - user: UserOut, - agent: MetadataAgent = None, + metadata_in: MetadataIn, + dataset: DatasetOut, + user: UserOut, + agent: MetadataAgent = None, ): """Convenience function for converting MetadataIn to MetadataDB object.""" content = await validate_context( @@ -70,12 +70,12 @@ async def _build_metadata_db_obj( @router.post("/{dataset_id}/metadata", response_model=MetadataOut) async def add_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("uploader")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("uploader")), ): """Attach new metadata to a dataset. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -120,12 +120,12 @@ async def add_dataset_metadata( @router.put("/{dataset_id}/metadata", response_model=MetadataOut) async def replace_dataset_metadata( - metadata_in: MetadataIn, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataIn, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -175,12 +175,12 @@ async def replace_dataset_metadata( @router.patch("/{dataset_id}/metadata", response_model=MetadataOut) async def update_dataset_metadata( - metadata_in: MetadataPatch, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataPatch, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -195,9 +195,9 @@ async def update_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing := await MetadataDB.get( - PydanticObjectId(metadata_in.metadata_id) - ) + existing := await MetadataDB.get( + PydanticObjectId(metadata_in.metadata_id) + ) ) is not None: content = await validate_context( metadata_in.content, @@ -245,12 +245,12 @@ async def update_dataset_metadata( @router.get("/{dataset_id}/metadata", response_model=List[MetadataOut]) async def get_dataset_metadata( - dataset_id: str, - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(dataset_id)] @@ -264,9 +264,9 @@ async def get_dataset_metadata( async for md in MetadataDB.find(*query): if md.definition is not None: if ( - md_def := await MetadataDefinitionDB.find_one( - MetadataDefinitionDB.name == md.definition - ) + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) ) is not None: md.description = md_def.description metadata.append(md) @@ -277,12 +277,12 @@ async def get_dataset_metadata( @router.delete("/{dataset_id}/metadata", response_model=MetadataOut) async def delete_dataset_metadata( - metadata_in: MetadataDelete, - dataset_id: str, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + metadata_in: MetadataDelete, + dataset_id: str, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # filter by metadata_id or definition @@ -290,9 +290,9 @@ async def delete_dataset_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - existing_md := await MetadataDB.find_one( - MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) - ) + existing_md := await MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) ) is not None: query.append(MetadataDB.metadata_id == metadata_in.metadata_id) else: diff --git a/backend/app/routers/metadata_files.py b/backend/app/routers/metadata_files.py index 54e1ab716..6cc156212 100644 --- a/backend/app/routers/metadata_files.py +++ b/backend/app/routers/metadata_files.py @@ -37,11 +37,11 @@ async def _build_metadata_db_obj( - metadata_in: MetadataIn, - file: FileOut, - user: UserOut, - agent: MetadataAgent = None, - version: int = None, + metadata_in: MetadataIn, + file: FileOut, + user: UserOut, + agent: MetadataAgent = None, + version: int = None, ): """Convenience function for building a MetadataDB object from incoming metadata plus a file. Agent and file version will be determined based on inputs if they are not provided directly.""" @@ -57,10 +57,10 @@ async def _build_metadata_db_obj( file_version = metadata_in.file_version if file_version is not None and file_version > 0: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == file.id, - FileVersionDB.version_num == file_version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == file.id, + FileVersionDB.version_num == file_version, + ) ) is None: raise HTTPException( status_code=404, @@ -104,12 +104,12 @@ async def _build_metadata_db_obj( @router.post("/{file_id}/metadata", response_model=MetadataOut) async def add_file_metadata( - metadata_in: MetadataIn, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("uploader")), + metadata_in: MetadataIn, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("uploader")), ): """Attach new metadata to a file. The body must include a contents field with the JSON metadata, and either a context JSON-LD object, context_url, or definition (name of a metadata definition) to be valid. @@ -157,12 +157,12 @@ async def add_file_metadata( @router.put("/{file_id}/metadata", response_model=MetadataOut) async def replace_file_metadata( - metadata_in: MetadataPatch, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataPatch, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): """Replace metadata, including agent and context. If only metadata contents should be updated, use PATCH instead. @@ -176,10 +176,10 @@ async def replace_file_metadata( version = metadata_in.file_version if version is not None: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == version, + ) ) is None: raise HTTPException( status_code=404, @@ -231,12 +231,12 @@ async def replace_file_metadata( @router.patch("/{file_id}/metadata", response_model=MetadataOut) async def update_file_metadata( - metadata_in: MetadataPatch, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataPatch, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or agent should be changed, use PUT. @@ -247,10 +247,10 @@ async def update_file_metadata( # check if metadata with file version exists, replace metadata if none exists if ( - await MetadataDB.find_one( - MetadataDB.resource.resource_id == ObjectId(file_id), - MetadataDB.resource.version == metadata_in.file_version, - ) + await MetadataDB.find_one( + MetadataDB.resource.resource_id == ObjectId(file_id), + MetadataDB.resource.version == metadata_in.file_version, + ) ) is None: result = await replace_file_metadata(metadata_in, file_id, user, es) return result @@ -262,9 +262,9 @@ async def update_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, validate the patch against existing context if ( - existing_md := await MetadataDB.find_one( - MetadataDB.id == ObjectId(metadata_in.metadata_id) - ) + existing_md := await MetadataDB.find_one( + MetadataDB.id == ObjectId(metadata_in.metadata_id) + ) ) is not None: content = await validate_context( metadata_in.content, @@ -282,10 +282,10 @@ async def update_file_metadata( if metadata_in.file_version is not None: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == metadata_in.file_version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == metadata_in.file_version, + ) ) is None: raise HTTPException( status_code=404, @@ -329,15 +329,15 @@ async def update_file_metadata( @router.get("/{file_id}/metadata", response_model=List[MetadataOut]) async def get_file_metadata( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - version: Optional[int] = None, - all_versions: Optional[bool] = False, - definition: Optional[str] = Form(None), - listener_name: Optional[str] = Form(None), - listener_version: Optional[float] = Form(None), - user=Depends(get_current_user), - allow: bool = Depends(FileAuthorization("viewer")), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + version: Optional[int] = None, + all_versions: Optional[bool] = False, + definition: Optional[str] = Form(None), + listener_name: Optional[str] = Form(None), + listener_version: Optional[float] = Form(None), + user=Depends(get_current_user), + allow: bool = Depends(FileAuthorization("viewer")), ): """Get file metadata.""" if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -347,10 +347,10 @@ async def get_file_metadata( if not all_versions: if version is not None and version > 0: if ( - await FileVersionDB.find_one( - FileVersionDB.file_id == ObjectId(file_id), - FileVersionDB.version_num == version, - ) + await FileVersionDB.find_one( + FileVersionDB.file_id == ObjectId(file_id), + FileVersionDB.version_num == version, + ) ) is None: raise HTTPException( status_code=404, @@ -374,9 +374,9 @@ async def get_file_metadata( async for md in MetadataDB.find(*query): if md.definition is not None: if ( - md_def := await MetadataDefinitionDB.find_one( - MetadataDefinitionDB.name == md.definition - ) + md_def := await MetadataDefinitionDB.find_one( + MetadataDefinitionDB.name == md.definition + ) ) is not None: md_def = MetadataDefinitionOut(**md_def.dict()) md.description = md_def.description @@ -388,13 +388,13 @@ async def get_file_metadata( @router.delete("/{file_id}/metadata", response_model=MetadataOut) async def delete_file_metadata( - metadata_in: MetadataDelete, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - # version: Optional[int] = Form(None), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(FileAuthorization("editor")), + metadata_in: MetadataDelete, + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + # version: Optional[int] = Form(None), + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(FileAuthorization("editor")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: query = [MetadataDB.resource.resource_id == ObjectId(file_id)] @@ -420,9 +420,9 @@ async def delete_file_metadata( if metadata_in.metadata_id is not None: # If a specific metadata_id is provided, delete the matching entry if ( - await MetadataDB.find_one( - MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) - ) + await MetadataDB.find_one( + MetadataDB.metadata_id == ObjectId(metadata_in.metadata_id) + ) ) is not None: query.append(MetadataDB.metadata_id == metadata_in.metadata_id) else: diff --git a/backend/app/tests/test_authorization.py b/backend/app/tests/test_authorization.py index d2501e706..f6e473c71 100644 --- a/backend/app/tests/test_authorization.py +++ b/backend/app/tests/test_authorization.py @@ -1,6 +1,7 @@ from fastapi.testclient import TestClient + from app.config import settings -from app.tests.utils import create_dataset, create_group +from app.tests.utils import create_dataset def test_create(client: TestClient, headers: dict): @@ -12,3 +13,35 @@ def test_create(client: TestClient, headers: dict): headers=headers, ) assert response.status_code == 200 + + +def test_get_admin_info(client: TestClient, headers: dict): + response = client.get( + f"{settings.API_V2_STR}/users/me/is_admin", + headers=headers, + ) + assert response.status_code == 200 + assert response.json() == True + + response = client.get( + f"{settings.API_V2_STR}/users/me/admin_mode", + headers=headers, + ) + assert response.status_code == 200 + assert response.json() == False + + +def test_set_admin_mode(client: TestClient, headers: dict): + response = client.post( + f"{settings.API_V2_STR}/users/me/admin_mode?admin_mode_on=True", + headers=headers, + ) + assert response.status_code == 200 + assert response.json() == True + + response = client.post( + f"{settings.API_V2_STR}/users/me/admin_mode?admin_mode_on=False", + headers=headers, + ) + assert response.status_code == 200 + assert response.json() == False diff --git a/frontend/src/openapi/v2/services/LoginService.ts b/frontend/src/openapi/v2/services/LoginService.ts index 34d40b6e1..667454956 100644 --- a/frontend/src/openapi/v2/services/LoginService.ts +++ b/frontend/src/openapi/v2/services/LoginService.ts @@ -52,15 +52,15 @@ export class LoginService { /** * Get Admin * @param datasetId - * @returns any Successful Response + * @returns boolean Successful Response * @throws ApiError */ - public static getAdminApiV2AdminGet( + public static getAdminApiV2UsersMeIsAdminGet( datasetId?: string, - ): CancelablePromise { + ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/admin`, + path: `/api/v2/users/me/is_admin`, query: { 'dataset_id': datasetId, }, @@ -76,10 +76,35 @@ export class LoginService { * @returns boolean Successful Response * @throws ApiError */ - public static getAdminModeApiV2AdminModeGet(): CancelablePromise { + public static getAdminModeApiV2UsersMeAdminModeGet(): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/admin_mode`, + path: `/api/v2/users/me/admin_mode`, + }); + } + + /** + * Set Admin Mode + * Set Admin mode from User Object. + * @param adminModeOn + * @param datasetId + * @returns boolean Successful Response + * @throws ApiError + */ + public static setAdminModeApiV2UsersMeAdminModePost( + adminModeOn: boolean, + datasetId?: string, + ): CancelablePromise { + return __request({ + method: 'POST', + path: `/api/v2/users/me/admin_mode`, + query: { + 'admin_mode_on': adminModeOn, + 'dataset_id': datasetId, + }, + errors: { + 422: `Validation Error`, + }, }); } From 0002c0704d205f828f7e8c5138ffb57cc8c81041 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 10:40:41 -0600 Subject: [PATCH 31/43] wire in the frontend toggle action --- frontend/src/actions/user.js | 17 ++++++++++++++--- frontend/src/components/Layout.tsx | 5 +++-- frontend/src/reducers/user.ts | 2 +- frontend/src/types/action.ts | 22 ++++++---------------- 4 files changed, 24 insertions(+), 22 deletions(-) diff --git a/frontend/src/actions/user.js b/frontend/src/actions/user.js index 2ebdc40d7..c56c285fd 100644 --- a/frontend/src/actions/user.js +++ b/frontend/src/actions/user.js @@ -53,7 +53,6 @@ export const SET_USER = "SET_USER"; export const REGISTER_USER = "REGISTER_USER"; export const REGISTER_ERROR = "REGISTER_ERROR"; export const LOGOUT = "LOGOUT"; -export const TOGGLE_ADMIN_MODE = "TOGGLE_ADMIN_MODE"; export function _legacy_login(email, password) { return async (dispatch) => { @@ -128,9 +127,21 @@ export function fetchAllUsers(skip = 0, limit = 101) { }; } -export function toggleAdminMode() { +export const TOGGLE_ADMIN_MODE = "TOGGLE_ADMIN_MODE"; + +export function toggleAdminMode(adminModeOn) { return (dispatch) => { - dispatch({ type: TOGGLE_ADMIN_MODE, receivedAt: Date.now() }); + return V2.LoginService.setAdminModeApiV2UsersMeAdminModePost(adminModeOn) + .then((json) => { + dispatch({ + type: TOGGLE_ADMIN_MODE, + adminMode: json, + receivedAt: Date.now(), + }); + }) + .catch((reason) => { + dispatch(toggleAdminMode(adminModeOn)); + }); }; } diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index be0aff0a8..336b33a04 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -113,7 +113,8 @@ export default function PersistentDrawerLeft(props) { const adminMode = useSelector((state: RootState) => state.user.adminMode); const fetchCurrUserProfile = () => dispatch(fetchUserProfile()); - const toggleAdminMode = () => dispatch(toggleAdminModeAction()); + const toggleAdminMode = (adminModeOn: boolean) => + dispatch(toggleAdminModeAction(adminModeOn)); useEffect(() => { fetchCurrUserProfile(); @@ -234,7 +235,7 @@ export default function PersistentDrawerLeft(props) { {currUserProfile.admin ? ( <> - + toggleAdminMode(!adminMode)}> {adminMode ? ( <> diff --git a/frontend/src/reducers/user.ts b/frontend/src/reducers/user.ts index 567e5404c..fae978b09 100644 --- a/frontend/src/reducers/user.ts +++ b/frontend/src/reducers/user.ts @@ -29,7 +29,7 @@ const user = (state = defaultState, action: DataAction) => { switch (action.type) { case TOGGLE_ADMIN_MODE: return Object.assign({}, state, { - adminMode: !state.adminMode, + adminMode: action.adminMode, }); case SET_USER: return Object.assign({}, state, { diff --git a/frontend/src/types/action.ts b/frontend/src/types/action.ts index f8c9e789e..295b79cbe 100644 --- a/frontend/src/types/action.ts +++ b/frontend/src/types/action.ts @@ -1,10 +1,4 @@ -import { - ExtractedMetadata, - FilePreview, - Folder, - MetadataJsonld, - Profile, -} from "./data"; +import {ExtractedMetadata, FilePreview, Folder, MetadataJsonld, Profile,} from "./data"; import { AuthorizationBase, DatasetOut as Dataset, @@ -22,15 +16,10 @@ import { VisualizationConfigOut, VisualizationDataOut, } from "../openapi/v2"; -import { - LIST_USERS, - PREFIX_SEARCH_USERS, - RECEIVE_USER_PROFILE, - TOGGLE_ADMIN_MODE, -} from "../actions/user"; -import { CREATE_GROUP, DELETE_GROUP } from "../actions/group"; -import { RECEIVE_FILE_PRESIGNED_URL } from "../actions/file"; -import { GET_VIS_DATA_PRESIGNED_URL } from "../actions/visualization"; +import {LIST_USERS, PREFIX_SEARCH_USERS, RECEIVE_USER_PROFILE, TOGGLE_ADMIN_MODE,} from "../actions/user"; +import {CREATE_GROUP, DELETE_GROUP} from "../actions/group"; +import {RECEIVE_FILE_PRESIGNED_URL} from "../actions/file"; +import {GET_VIS_DATA_PRESIGNED_URL} from "../actions/visualization"; interface RECEIVE_FILES_IN_DATASET { type: "RECEIVE_FILES_IN_DATASET"; @@ -103,6 +92,7 @@ interface SET_USER { } interface TOGGLE_ADMIN_MODE { + adminMode: boolean; type: "TOGGLE_ADMIN_MODE"; } From 8d87f45e8445786bfec3033a6964c9952555a0f8 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 10:42:12 -0600 Subject: [PATCH 32/43] setting is working but other bugs --- frontend/src/components/Layout.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 336b33a04..25507fa3c 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -235,7 +235,7 @@ export default function PersistentDrawerLeft(props) { {currUserProfile.admin ? ( <> - toggleAdminMode(!adminMode)}> + toggleAdminMode(!adminMode)}> {adminMode ? ( <> From a7c6ff0de34701705ab474b018e019c4c94e002e Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 15:06:40 -0600 Subject: [PATCH 33/43] fix admin toggle --- frontend/src/actions/user.js | 18 ++++++++++++++++++ frontend/src/components/Explore.tsx | 8 +++++++- frontend/src/components/Layout.tsx | 3 +++ frontend/src/reducers/user.ts | 5 +++++ frontend/src/types/action.ts | 18 +++++++++++++----- 5 files changed, 46 insertions(+), 6 deletions(-) diff --git a/frontend/src/actions/user.js b/frontend/src/actions/user.js index c56c285fd..266a5cf0a 100644 --- a/frontend/src/actions/user.js +++ b/frontend/src/actions/user.js @@ -145,6 +145,24 @@ export function toggleAdminMode(adminModeOn) { }; } +export const GET_ADMIN_MODE_STATUS = "GET_ADMIN_MODE_STATUS"; + +export function getAdminModeStatus() { + return (dispatch) => { + return V2.LoginService.getAdminModeApiV2UsersMeAdminModeGet() + .then((json) => { + dispatch({ + type: GET_ADMIN_MODE_STATUS, + adminMode: json, + receivedAt: Date.now(), + }); + }) + .catch((reason) => { + dispatch(getAdminModeStatus()); + }); + }; +} + export const PREFIX_SEARCH_USERS = "PREFIX_SEARCH_USERS"; export function prefixSearchAllUsers(text = "", skip = 0, limit = 101) { diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index d28b67021..ebdc458e5 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -28,6 +28,8 @@ export const Explore = (): JSX.Element => { mine: boolean | undefined ) => dispatch(fetchDatasets(skip, limit, mine)); const datasets = useSelector((state: RootState) => state.dataset.datasets); + // const adminMode = useSelector((state: RootState) => state.user.adminMode); + const adminMode = useSelector((state: RootState) => state.user.adminMode); // TODO add option to determine limit number; default show 5 datasets each time const [currPageNum, setCurrPageNum] = useState(0); @@ -45,6 +47,11 @@ export const Explore = (): JSX.Element => { listDatasets(0, limit, mine); }, []); + // Admin mode will fetch all datasets + useEffect(() => { + listDatasets(0, limit, mine); + }, [adminMode]); + // fetch thumbnails from each individual dataset/id calls useEffect(() => { // disable flipping if reaches the last page @@ -85,7 +92,6 @@ export const Explore = (): JSX.Element => { {/*Error Message dialogue*/} - diff --git a/frontend/src/components/Layout.tsx b/frontend/src/components/Layout.tsx index 25507fa3c..4c7667bee 100644 --- a/frontend/src/components/Layout.tsx +++ b/frontend/src/components/Layout.tsx @@ -32,6 +32,7 @@ import LogoutIcon from "@mui/icons-material/Logout"; import { EmbeddedSearch } from "./search/EmbeddedSearch"; import { fetchUserProfile, + getAdminModeStatus as getAdminModeStatusAction, toggleAdminMode as toggleAdminModeAction, } from "../actions/user"; import { AdminPanelSettings } from "@mui/icons-material"; @@ -115,9 +116,11 @@ export default function PersistentDrawerLeft(props) { const fetchCurrUserProfile = () => dispatch(fetchUserProfile()); const toggleAdminMode = (adminModeOn: boolean) => dispatch(toggleAdminModeAction(adminModeOn)); + const getAdminModeStatus = () => dispatch(getAdminModeStatusAction()); useEffect(() => { fetchCurrUserProfile(); + getAdminModeStatus(); }, []); const handleDrawerOpen = () => { diff --git a/frontend/src/reducers/user.ts b/frontend/src/reducers/user.ts index fae978b09..5d81b9583 100644 --- a/frontend/src/reducers/user.ts +++ b/frontend/src/reducers/user.ts @@ -1,6 +1,7 @@ import { DELETE_API_KEY, GENERATE_API_KEY, + GET_ADMIN_MODE_STATUS, LIST_API_KEYS, LOGIN_ERROR, RECEIVE_USER_PROFILE, @@ -31,6 +32,10 @@ const user = (state = defaultState, action: DataAction) => { return Object.assign({}, state, { adminMode: action.adminMode, }); + case GET_ADMIN_MODE_STATUS: + return Object.assign({}, state, { + adminMode: action.adminMode, + }); case SET_USER: return Object.assign({}, state, { Authorization: action.Authorization, diff --git a/frontend/src/types/action.ts b/frontend/src/types/action.ts index 295b79cbe..912aeccc8 100644 --- a/frontend/src/types/action.ts +++ b/frontend/src/types/action.ts @@ -1,4 +1,10 @@ -import {ExtractedMetadata, FilePreview, Folder, MetadataJsonld, Profile,} from "./data"; +import { + ExtractedMetadata, + FilePreview, + Folder, + MetadataJsonld, + Profile, +} from "./data"; import { AuthorizationBase, DatasetOut as Dataset, @@ -16,10 +22,6 @@ import { VisualizationConfigOut, VisualizationDataOut, } from "../openapi/v2"; -import {LIST_USERS, PREFIX_SEARCH_USERS, RECEIVE_USER_PROFILE, TOGGLE_ADMIN_MODE,} from "../actions/user"; -import {CREATE_GROUP, DELETE_GROUP} from "../actions/group"; -import {RECEIVE_FILE_PRESIGNED_URL} from "../actions/file"; -import {GET_VIS_DATA_PRESIGNED_URL} from "../actions/visualization"; interface RECEIVE_FILES_IN_DATASET { type: "RECEIVE_FILES_IN_DATASET"; @@ -96,6 +98,11 @@ interface TOGGLE_ADMIN_MODE { type: "TOGGLE_ADMIN_MODE"; } +interface GET_ADMIN_MODE_STATUS { + adminMode: boolean; + type: "GET_ADMIN_MODE_STATUS"; +} + interface LOGIN_ERROR { errorMsg: string; type: "LOGIN_ERROR"; @@ -451,6 +458,7 @@ interface RESET_VIS_DATA_PRESIGNED_URL { } export type DataAction = + | GET_ADMIN_MODE_STATUS | TOGGLE_ADMIN_MODE | RECEIVE_FILES_IN_DATASET | RECEIVE_FOLDERS_IN_DATASET From 0e625b154d26f6b9282fb996486d316783c692c2 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 15:23:48 -0600 Subject: [PATCH 34/43] add admin mode toggle to every listing page --- frontend/src/components/Explore.tsx | 1 - frontend/src/components/groups/Groups.tsx | 5 +++++ .../src/components/listeners/ExtractionHistory.tsx | 6 ++++++ .../src/components/metadata/MetadataDefinitions.tsx | 10 ++++++++-- 4 files changed, 19 insertions(+), 3 deletions(-) diff --git a/frontend/src/components/Explore.tsx b/frontend/src/components/Explore.tsx index ebdc458e5..0cfb6fea5 100644 --- a/frontend/src/components/Explore.tsx +++ b/frontend/src/components/Explore.tsx @@ -28,7 +28,6 @@ export const Explore = (): JSX.Element => { mine: boolean | undefined ) => dispatch(fetchDatasets(skip, limit, mine)); const datasets = useSelector((state: RootState) => state.dataset.datasets); - // const adminMode = useSelector((state: RootState) => state.user.adminMode); const adminMode = useSelector((state: RootState) => state.user.adminMode); // TODO add option to determine limit number; default show 5 datasets each time diff --git a/frontend/src/components/groups/Groups.tsx b/frontend/src/components/groups/Groups.tsx index 5ed93bec5..4d1c33c1b 100644 --- a/frontend/src/components/groups/Groups.tsx +++ b/frontend/src/components/groups/Groups.tsx @@ -42,6 +42,7 @@ export function Groups() { ) => dispatch(searchGroupsAction(searchTerm, skip, limit)); const groups = useSelector((state: RootState) => state.group.groups); + const adminMode = useSelector((state: RootState) => state.user.adminMode); // TODO add option to determine limit number; default show 5 groups each time const [currPageNum, setCurrPageNum] = useState(0); @@ -57,6 +58,10 @@ export function Groups() { listGroups(skip, limit); }, []); + useEffect(() => { + listGroups(skip, limit); + }, [adminMode]); + useEffect(() => { // disable flipping if reaches the last page if (groups.length < limit) setNextDisabled(true); diff --git a/frontend/src/components/listeners/ExtractionHistory.tsx b/frontend/src/components/listeners/ExtractionHistory.tsx index ee579b88c..f1d3c6801 100644 --- a/frontend/src/components/listeners/ExtractionHistory.tsx +++ b/frontend/src/components/listeners/ExtractionHistory.tsx @@ -115,6 +115,7 @@ export const ExtractionHistory = (): JSX.Element => { const listeners = useSelector((state: RootState) => state.listener.listeners); const jobs = useSelector((state: RootState) => state.listener.jobs); + const adminMode = useSelector((state: RootState) => state.user.adminMode); const [errorOpen, setErrorOpen] = useState(false); const [currPageNum, setCurrPageNum] = useState(0); @@ -132,6 +133,11 @@ export const ExtractionHistory = (): JSX.Element => { listListenerJobs(null, null, null, null, null, null, 0, 100); }, []); + useEffect(() => { + listListeners(skip, limit, 0, null, null); + listListenerJobs(null, null, null, null, null, null, 0, 100); + }, [adminMode]); + useEffect(() => { if (selectedExtractor) { listListenerJobs( diff --git a/frontend/src/components/metadata/MetadataDefinitions.tsx b/frontend/src/components/metadata/MetadataDefinitions.tsx index 0ba28a7a4..38cf612e0 100644 --- a/frontend/src/components/metadata/MetadataDefinitions.tsx +++ b/frontend/src/components/metadata/MetadataDefinitions.tsx @@ -8,7 +8,7 @@ import { DialogTitle, Grid, IconButton, - InputBase, Snackbar, + Snackbar, } from "@mui/material"; import { RootState } from "../../types/data"; import { useDispatch, useSelector } from "react-redux"; @@ -49,6 +49,7 @@ export function MetadataDefinitions() { const metadataDefinitions = useSelector( (state: RootState) => state.metadata.metadataDefinitionList ); + const adminMode = useSelector((state: RootState) => state.user.adminMode); // TODO add option to determine limit number; default show 5 metadata definitions each time const [currPageNum, setCurrPageNum] = useState(0); @@ -66,7 +67,7 @@ export function MetadataDefinitions() { const [selectedMetadataDefinition, setSelectedMetadataDefinition] = useState(); - // snack bar + // snack bar const [snackBarOpen, setSnackBarOpen] = useState(false); const [snackBarMessage, setSnackBarMessage] = useState(""); @@ -83,6 +84,11 @@ export function MetadataDefinitions() { listMetadataDefinitions(null, skip, limit); }, []); + // Admin mode will fetch all datasets + useEffect(() => { + listMetadataDefinitions(null, skip, limit); + }, [adminMode]); + useEffect(() => { // disable flipping if reaches the last page if (metadataDefinitions.length < limit) setNextDisabled(true); From 9375cc2a9731c5d1389926c7dfecaa9dfa0a3552 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Tue, 12 Dec 2023 15:43:44 -0600 Subject: [PATCH 35/43] fix typo and bug in groups --- backend/app/routers/authorization.py | 152 +++++++++--------- backend/app/routers/groups.py | 94 ++++++----- .../v2/services/AuthorizationService.ts | 2 +- .../src/openapi/v2/services/GroupsService.ts | 6 + 4 files changed, 133 insertions(+), 121 deletions(-) diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index b25f23e38..bc238c92f 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -38,11 +38,11 @@ @router.post("/datasets/{dataset_id}", response_model=AuthorizationOut) async def save_authorization( - dataset_id: str, - authorization_in: AuthorizationBase, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_username), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + authorization_in: AuthorizationBase, + admin_mode: bool = Depends(get_admin_mode), + user=Depends(get_current_username), + allow: bool = Depends(Authorization("editor")), ): """Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id.""" @@ -69,10 +69,10 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationOut) async def get_dataset_role( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - admin=Depends(get_admin), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization @@ -90,7 +90,7 @@ async def get_dataset_role( ) if auth_db is None: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if current_dataset.status == DatasetStatus.AUTHENTICATED.name: public_authorization_in = { @@ -112,9 +112,9 @@ async def get_dataset_role( @router.get("/datasets/{dataset_id}/role/viewer}") async def get_dataset_role_viewer( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("viewer")), ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -123,9 +123,9 @@ async def get_dataset_role_viewer( @router.get("/datasets/{dataset_id}/role/owner}") async def get_dataset_role_owner( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("owner")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("owner")), ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -134,11 +134,11 @@ async def get_dataset_role_owner( @router.get("/files/{file_id}/role}", response_model=RoleType) async def get_file_role( - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_file), - admin=Depends(get_admin), + file_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_file), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -149,11 +149,11 @@ async def get_file_role( @router.get("/metadata/{metadata_id}/role}", response_model=AuthorizationMetadata) async def get_metadata_role( - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_metadata), - admin=Depends(get_admin), + metadata_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_metadata), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -162,13 +162,13 @@ async def get_metadata_role( return role -@router.get("/groups/{group_id}/role}", response_model=RoleType) +@router.get("/groups/{group_id}/role", response_model=RoleType) async def get_group_role( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_group), - admin=Depends(get_admin), + group_id: str, + admin_mode: bool = Depends(get_admin_mode), + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_group), + admin=Depends(get_admin), ): # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode if admin and admin_mode: @@ -182,13 +182,13 @@ async def get_group_role( response_model=AuthorizationOut, ) async def set_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - role: RoleType, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + role: RoleType, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign an entire group a specific role for a dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: @@ -198,10 +198,10 @@ async def set_dataset_group_role( dataset_id, group_id, admin_mode, es, user_id, allow ) if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.role == role, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.role == role, + ) ) is not None: if group_id not in auth_db.group_ids: auth_db.group_ids.append(group_id) @@ -236,13 +236,13 @@ async def set_dataset_group_role( response_model=AuthorizationOut, ) async def set_dataset_user_role( - dataset_id: str, - username: str, - role: RoleType, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + role: RoleType, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign a single user a specific role for a dataset.""" @@ -296,22 +296,22 @@ async def set_dataset_user_role( response_model=AuthorizationOut, ) async def remove_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the group has with a specific dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: if (group := await GroupDB.get(group_id)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == dataset_id, - AuthorizationDB.group_ids == group_id, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == dataset_id, + AuthorizationDB.group_ids == group_id, + ) ) is not None: auth_db.group_ids.remove(PyObjectId(group_id)) for u in group.users: @@ -332,22 +332,22 @@ async def remove_dataset_group_role( response_model=AuthorizationOut, ) async def remove_dataset_user_role( - dataset_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + admin_mode: bool = Depends(get_admin_mode), + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the user has with a specific dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (await UserDB.find_one(UserDB.email == username)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.user_ids == username, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.user_ids == username, + ) ) is not None: auth_db.user_ids.remove(username) await auth_db.save() @@ -362,16 +362,16 @@ async def remove_dataset_user_role( @router.get("/datasets/{dataset_id}/roles}", response_model=DatasetRoles) async def get_dataset_roles( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + admin_mode: bool = Depends(get_admin_mode), + allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: roles = DatasetRoles(dataset_id=str(dataset.id)) async for auth in AuthorizationDB.find( - AuthorizationDB.dataset_id == ObjectId(dataset_id) + AuthorizationDB.dataset_id == ObjectId(dataset_id) ): # First, fetch all groups that have a role on the dataset group_user_counts = {} @@ -389,8 +389,8 @@ async def get_dataset_roles( # Next, get all users but omit those that are included in a group above async for user in UserDB.find(In(UserDB.email, auth.user_ids)): if ( - user.email in group_user_counts - and auth.user_ids.count(user.email) == group_user_counts[user.email] + user.email in group_user_counts + and auth.user_ids.count(user.email) == group_user_counts[user.email] ): continue # TODO: Why is this necessary here but not on root-level ObjectIDs? diff --git a/backend/app/routers/groups.py b/backend/app/routers/groups.py index 91cb55309..6102bd7db 100644 --- a/backend/app/routers/groups.py +++ b/backend/app/routers/groups.py @@ -11,15 +11,15 @@ from app.models.authorization import RoleType from app.models.groups import GroupOut, GroupIn, GroupDB, GroupBase, Member from app.models.users import UserOut, UserDB -from app.routers.authentication import get_admin_mode +from app.routers.authentication import get_admin_mode, get_admin router = APIRouter() @router.post("", response_model=GroupOut) async def save_group( - group_in: GroupIn, - user=Depends(get_current_user), + group_in: GroupIn, + user=Depends(get_current_user), ): group_db = GroupDB(**group_in.dict(), creator=user.email) user_member = Member(user=user, editor=True) @@ -31,9 +31,11 @@ async def save_group( @router.get("", response_model=List[GroupOut]) async def get_groups( - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ): """Get a list of all Groups in the db the user is a member/owner of. @@ -43,11 +45,15 @@ async def get_groups( """ - groups = await GroupDB.find( - Or( + criteria_list = [] + if not admin or not admin_mode: + criteria_list.append(Or( GroupDB.creator == user_id, GroupDB.users.user.email == user_id, - ), + )) + + groups = await GroupDB.find( + *criteria_list, sort=(-GroupDB.created), skip=skip, limit=limit, @@ -57,10 +63,12 @@ async def get_groups( @router.get("/search/{search_term}", response_model=List[GroupOut]) async def search_group( - search_term: str, - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, + search_term: str, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ): """Search all groups in the db based on text. @@ -70,13 +78,17 @@ async def search_group( limit -- restrict number of records to be returned (i.e. for pagination) """ + criteria_list = [ + Or(RegEx(field=GroupDB.name, pattern=search_term), + RegEx(field=GroupDB.description, pattern=search_term)), ] + if not admin or not admin_mode: + criteria_list.append( + Or(GroupDB.creator == user_id, GroupDB.users.user.email == user_id) + ) + # user has to be the creator or member first; then apply search groups = await GroupDB.find( - Or(GroupDB.creator == user_id, GroupDB.users.user.email == user_id), - Or( - RegEx(field=GroupDB.name, pattern=search_term), - RegEx(field=GroupDB.description, pattern=search_term), - ), + *criteria_list, skip=skip, limit=limit, ).to_list() @@ -86,9 +98,8 @@ async def search_group( @router.get("/{group_id}", response_model=GroupOut) async def get_group( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("viewer")), + group_id: str, + allow: bool = Depends(GroupAuthorization("viewer")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: return group.dict() @@ -97,11 +108,10 @@ async def get_group( @router.put("/{group_id}", response_model=GroupOut) async def edit_group( - group_id: str, - group_info: GroupBase, - admin_mode: bool = Depends(get_admin_mode), - user_id=Depends(get_user), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + group_info: GroupBase, + user_id=Depends(get_user), + allow: bool = Depends(GroupAuthorization("editor")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: group_dict = dict(group_info) if group_info is not None else {} @@ -124,7 +134,7 @@ async def edit_group( if original_user not in groups_users: # remove them from auth async for auth in AuthorizationDB.find( - {"group_ids": ObjectId(group_id)} + {"group_ids": ObjectId(group_id)} ): auth.user_ids.remove(original_user.user.email) await auth.replace() @@ -168,9 +178,8 @@ async def edit_group( @router.delete("/{group_id}", response_model=GroupOut) async def delete_group( - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("owner")), + group_id: str, + allow: bool = Depends(GroupAuthorization("owner")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: await group.delete() @@ -181,11 +190,10 @@ async def delete_group( @router.post("/{group_id}/add/{username}", response_model=GroupOut) async def add_member( - group_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - role: Optional[str] = None, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + role: Optional[str] = None, + allow: bool = Depends(GroupAuthorization("editor")), ): """Add a new user to a group.""" if (user := await UserDB.find_one(UserDB.email == username)) is not None: @@ -219,10 +227,9 @@ async def add_member( @router.post("/{group_id}/remove/{username}", response_model=GroupOut) async def remove_member( - group_id: str, - username: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + allow: bool = Depends(GroupAuthorization("editor")), ): """Remove a user from a group.""" @@ -252,11 +259,10 @@ async def remove_member( @router.put("/{group_id}/update/{username}", response_model=GroupOut) async def update_member( - group_id: str, - username: str, - role: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + role: str, + allow: bool = Depends(GroupAuthorization("editor")), ): """Update user role.""" if (user := await UserDB.find_one({"email": username})) is not None: diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index 22e2fe267..fc16149d9 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -152,7 +152,7 @@ export class AuthorizationService { ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/groups/${groupId}/role}`, + path: `/api/v2/authorizations/groups/${groupId}/role`, query: { 'dataset_id': datasetId, }, diff --git a/frontend/src/openapi/v2/services/GroupsService.ts b/frontend/src/openapi/v2/services/GroupsService.ts index 6c20074ce..404a84348 100644 --- a/frontend/src/openapi/v2/services/GroupsService.ts +++ b/frontend/src/openapi/v2/services/GroupsService.ts @@ -18,12 +18,14 @@ export class GroupsService { * limit -- restrict number of records to be returned (i.e. for pagination) * @param skip * @param limit + * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ public static getGroupsApiV2GroupsGet( skip?: number, limit: number = 10, + datasetId?: string, ): CancelablePromise> { return __request({ method: 'GET', @@ -31,6 +33,7 @@ export class GroupsService { query: { 'skip': skip, 'limit': limit, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, @@ -69,6 +72,7 @@ export class GroupsService { * @param searchTerm * @param skip * @param limit + * @param datasetId * @returns GroupOut Successful Response * @throws ApiError */ @@ -76,6 +80,7 @@ export class GroupsService { searchTerm: string, skip?: number, limit: number = 10, + datasetId?: string, ): CancelablePromise> { return __request({ method: 'GET', @@ -83,6 +88,7 @@ export class GroupsService { query: { 'skip': skip, 'limit': limit, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, From 78cc77dbe0c368f5371544cd1a8b392c0f6d4921 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 13 Dec 2023 09:29:56 -0600 Subject: [PATCH 36/43] only check admin in authorization dependency --- backend/app/deps/authorization_deps.py | 114 +++++++++++++++---------- backend/app/routers/authorization.py | 52 +++-------- 2 files changed, 83 insertions(+), 83 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index fdf29f9ad..92a57728d 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -14,11 +14,16 @@ async def get_role( - dataset_id: str, - current_user=Depends(get_current_username), + dataset_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ) -> RoleType: """Returns the role a specific user has on a dataset. If the user is a creator (owner), they are not listed in the user_ids list.""" + if admin and admin_mode: + return RoleType.OWNER + authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), Or( @@ -30,9 +35,14 @@ async def get_role( async def get_role_by_file( - file_id: str, - current_user=Depends(get_current_username), + file_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ) -> RoleType: + if admin and admin_mode: + return RoleType.OWNER + if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == file.dataset_id, @@ -43,7 +53,7 @@ async def get_role_by_file( ) if authorization is None: if ( - dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) ) is not None: if dataset.status == DatasetStatus.AUTHENTICATED.name: auth_dict = { @@ -64,9 +74,14 @@ async def get_role_by_file( async def get_role_by_metadata( - metadata_id: str, - current_user=Depends(get_current_username), + metadata_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ) -> RoleType: + if admin and admin_mode: + return RoleType.OWNER + if (md_out := await MetadataDB.get(PydanticObjectId(metadata_id))) is not None: resource_type = md_out.resource.collection resource_id = md_out.resource.resource_id @@ -82,7 +97,7 @@ async def get_role_by_metadata( return authorization.role elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -95,9 +110,14 @@ async def get_role_by_metadata( async def get_role_by_group( - group_id: str, - current_user=Depends(get_current_username), + group_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ) -> RoleType: + if admin and admin_mode: + return RoleType.OWNER + if (group := await GroupDB.get(group_id)) is not None: if group.creator == current_user: # Creator can do everything @@ -116,7 +136,7 @@ async def get_role_by_group( async def is_public_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is public.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -127,7 +147,7 @@ async def is_public_dataset( async def is_authenticated_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is authenticated.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -145,11 +165,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - dataset_id: str, - current_user: str = Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + self, + dataset_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here. @@ -175,11 +195,11 @@ async def __call__( ) else: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if ( - current_dataset.status == DatasetStatus.AUTHENTICATED.name - and self.role == "viewer" + current_dataset.status == DatasetStatus.AUTHENTICATED.name + and self.role == "viewer" ): return True else: @@ -202,11 +222,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - file_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + file_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -240,11 +260,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + metadata_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -256,7 +276,7 @@ async def __call__( resource_id = md_out.resource.resource_id if resource_type == "files": if ( - file := await FileDB.get(PydanticObjectId(resource_id)) + file := await FileDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == file.dataset_id, @@ -278,7 +298,7 @@ async def __call__( ) elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -307,11 +327,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - group_id: str, - admin_mode: bool = Depends(get_admin_mode), - current_user: str = Depends(get_current_username), - admin: bool = Depends(get_admin), + self, + group_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -343,8 +363,8 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - dataset_id: str, + self, + dataset_id: str, ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if dataset.status == self.status: @@ -363,13 +383,13 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - file_id: str, + self, + file_id: str, ): if (file_out := await FileDB.get(PydanticObjectId(file_id))) is not None: dataset_id = file_out.dataset_id if ( - dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if dataset.status == self.status: return True @@ -381,9 +401,13 @@ async def __call__( return False -def access(user_role: RoleType, role_required: RoleType) -> bool: - """Enforce implied role hierarchy OWNER > EDITOR > UPLOADER > VIEWER""" - if user_role == RoleType.OWNER: +def access(user_role: RoleType, + role_required: RoleType, + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), + ) -> bool: + """Enforce implied role hierarchy ADMIN = OWNER > EDITOR > UPLOADER > VIEWER""" + if user_role == RoleType.OWNER or (admin and admin_mode): return True elif user_role == RoleType.EDITOR and role_required in [ RoleType.EDITOR, diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index bc238c92f..3e3bf0147 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -40,7 +40,6 @@ async def save_authorization( dataset_id: str, authorization_in: AuthorizationBase, - admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_username), allow: bool = Depends(Authorization("editor")), ): @@ -70,24 +69,23 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationOut) async def get_dataset_role( dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization - if admin and admin_mode: - auth_db = await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id) - ) - else: - auth_db = await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - Or( - AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user, - ), - ) + criteria = [] + if not admin or not admin_mode: + criteria.append(Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + )) + + auth_db = await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + *criteria, + ) if auth_db is None: if ( current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) @@ -113,7 +111,6 @@ async def get_dataset_role( @router.get("/datasets/{dataset_id}/role/viewer}") async def get_dataset_role_viewer( dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("viewer")), ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. @@ -124,7 +121,6 @@ async def get_dataset_role_viewer( @router.get("/datasets/{dataset_id}/role/owner}") async def get_dataset_role_owner( dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("owner")), ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. @@ -135,14 +131,9 @@ async def get_dataset_role_owner( @router.get("/files/{file_id}/role}", response_model=RoleType) async def get_file_role( file_id: str, - admin_mode: bool = Depends(get_admin_mode), current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_file), - admin=Depends(get_admin), ): - # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode - if admin and admin_mode: - return RoleType.OWNER """Retrieve role of user for an individual file. Role cannot change between file versions.""" return role @@ -150,14 +141,9 @@ async def get_file_role( @router.get("/metadata/{metadata_id}/role}", response_model=AuthorizationMetadata) async def get_metadata_role( metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_metadata), - admin=Depends(get_admin), ): - # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode - if admin and admin_mode: - return RoleType.OWNER """Retrieve role of user for group. Group roles can be OWNER, EDITOR, or VIEWER (for regular Members).""" return role @@ -165,14 +151,9 @@ async def get_metadata_role( @router.get("/groups/{group_id}/role", response_model=RoleType) async def get_group_role( group_id: str, - admin_mode: bool = Depends(get_admin_mode), current_user=Depends(get_current_username), role: RoleType = Depends(get_role_by_group), - admin=Depends(get_admin), ): - # admin is a superuser and has all the privileges, only show if the user has turned on the admin mode - if admin and admin_mode: - return RoleType.OWNER """Retrieve role of user on a particular group (i.e. whether they can change group memberships).""" return role @@ -185,7 +166,6 @@ async def set_dataset_group_role( dataset_id: PydanticObjectId, group_id: PydanticObjectId, role: RoleType, - admin_mode: bool = Depends(get_admin_mode), es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -195,7 +175,7 @@ async def set_dataset_group_role( if (group := await GroupDB.get(group_id)) is not None: # First, remove any existing role the group has on the dataset await remove_dataset_group_role( - dataset_id, group_id, admin_mode, es, user_id, allow + dataset_id, group_id, es, user_id, allow ) if ( auth_db := await AuthorizationDB.find_one( @@ -239,7 +219,6 @@ async def set_dataset_user_role( dataset_id: str, username: str, role: RoleType, - admin_mode: bool = Depends(get_admin_mode), es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -250,7 +229,7 @@ async def set_dataset_user_role( if (await UserDB.find_one(UserDB.email == username)) is not None: # First, remove any existing role the user has on the dataset await remove_dataset_user_role( - dataset_id, username, admin_mode, es, user_id, allow + dataset_id, username, es, user_id, allow ) auth_db = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), @@ -298,7 +277,6 @@ async def set_dataset_user_role( async def remove_dataset_group_role( dataset_id: PydanticObjectId, group_id: PydanticObjectId, - admin_mode: bool = Depends(get_admin_mode), es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -334,7 +312,6 @@ async def remove_dataset_group_role( async def remove_dataset_user_role( dataset_id: str, username: str, - admin_mode: bool = Depends(get_admin_mode), es=Depends(get_elasticsearchclient), user_id=Depends(get_user), allow: bool = Depends(Authorization("editor")), @@ -363,7 +340,6 @@ async def remove_dataset_user_role( @router.get("/datasets/{dataset_id}/roles}", response_model=DatasetRoles) async def get_dataset_roles( dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" From 7d22319134dc5ea7b31e221f7c51491df32a2933 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 13 Dec 2023 09:36:18 -0600 Subject: [PATCH 37/43] remove unnecessary admin_mode --- backend/app/routers/datasets.py | 285 +++++++++++++++----------------- 1 file changed, 133 insertions(+), 152 deletions(-) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index 2f22177c9..a482ceb52 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -56,7 +56,7 @@ from app.rabbitmq.listeners import submit_dataset_job from app.routers.authentication import get_admin from app.routers.authentication import get_admin_mode -from app.routers.files import add_file_entry, remove_file_entry +from app.routers.files import add_file_entry, remove_file_entry, add_local_file_entry from app.search.connect import ( delete_document_by_id, ) @@ -134,12 +134,12 @@ def nested_update(target_dict, update_dict): async def _create_folder_structure( - dataset_id: str, - contents: dict, - folder_path: str, - folder_lookup: dict, - user: UserOut, - parent_folder_id: Optional[str] = None, + dataset_id: str, + contents: dict, + folder_path: str, + folder_lookup: dict, + user: UserOut, + parent_folder_id: Optional[str] = None, ): """Recursively create folders encountered in folder_path until the target folder is created. Arguments: @@ -174,8 +174,8 @@ async def _create_folder_structure( async def _get_folder_hierarchy( - folder_id: str, - hierarchy: str, + folder_id: str, + hierarchy: str, ): """Generate a string of nested path to folder for use in zip file creation.""" folder = await FolderDB.get(PydanticObjectId(folder_id)) @@ -187,9 +187,9 @@ async def _get_folder_hierarchy( @router.post("", response_model=DatasetOut) async def save_dataset( - dataset_in: DatasetIn, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + dataset_in: DatasetIn, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): dataset = DatasetDB(**dataset_in.dict(), creator=user) await dataset.insert() @@ -208,47 +208,41 @@ async def save_dataset( @router.get("", response_model=List[DatasetOut]) async def get_datasets( - admin_mode: bool = Depends(get_admin_mode), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - mine: bool = False, - admin=Depends(get_admin), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + mine: bool = False, + admin=Depends(get_admin), + admin_mode: bool = Depends(get_admin_mode), + ): - if admin_mode and admin: - datasets = await DatasetDBViewList.find( - sort=(-DatasetDBViewList.created), - skip=skip, - limit=limit, - ).to_list() - elif mine: - datasets = await DatasetDBViewList.find( - DatasetDBViewList.creator.email == user_id, - sort=(-DatasetDBViewList.created), - skip=skip, - limit=limit, - ).to_list() - else: - datasets = await DatasetDBViewList.find( - Or( - DatasetDBViewList.creator.email == user_id, - DatasetDBViewList.auth.user_ids == user_id, - DatasetDBViewList.status == DatasetStatus.AUTHENTICATED.name, - ), - sort=(-DatasetDBViewList.created), - skip=skip, - limit=limit, - ).to_list() + criteria = [] + if not admin_mode or not admin: + if mine: + criteria.append(Or(DatasetDB.creator.email == user_id)) + else: + criteria.append( + Or( + DatasetDB.auth.user_ids == user_id, + DatasetDB.status == DatasetStatus.AUTHENTICATED.name, + ) + ) + + datasets = await DatasetDBViewList.find( + *criteria, + sort=(-DatasetDBViewList.created), + skip=skip, + limit=limit, + ).to_list() return [dataset.dict() for dataset in datasets] @router.get("/{dataset_id}", response_model=DatasetOut) async def get_dataset( - dataset_id: str, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: return dataset.dict() @@ -257,14 +251,13 @@ async def get_dataset( @router.get("/{dataset_id}/files", response_model=List[FileOut]) async def get_dataset_files( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - folder_id: Optional[str] = None, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + folder_id: Optional[str] = None, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if authenticated: query = [ @@ -286,12 +279,11 @@ async def get_dataset_files( @router.put("/{dataset_id}", response_model=DatasetOut) async def edit_dataset( - dataset_id: str, - dataset_info: DatasetBase, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetBase, + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Refactor this with permissions checks etc. @@ -307,12 +299,11 @@ async def edit_dataset( @router.patch("/{dataset_id}", response_model=DatasetOut) async def patch_dataset( - dataset_id: str, - dataset_info: DatasetPatch, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetPatch, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Update method not working properly @@ -332,11 +323,10 @@ async def patch_dataset( @router.delete("/{dataset_id}") async def delete_dataset( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # delete from elasticsearch @@ -347,7 +337,7 @@ async def delete_dataset( MetadataDB.resource.resource_id == PydanticObjectId(dataset_id) ).delete() async for file in FileDB.find( - FileDB.dataset_id == PydanticObjectId(dataset_id) + FileDB.dataset_id == PydanticObjectId(dataset_id) ): await remove_file_entry(file.id, fs, es) await FolderDB.find( @@ -362,11 +352,10 @@ async def delete_dataset( @router.post("/{dataset_id}/folders", response_model=FolderOut) async def add_folder( - dataset_id: str, - folder_in: FolderIn, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_in: FolderIn, + user=Depends(get_current_user), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: parent_folder = folder_in.parent_folder @@ -385,14 +374,13 @@ async def add_folder( @router.get("/{dataset_id}/folders", response_model=List[FolderOut]) async def get_dataset_folders( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - parent_folder: Optional[str] = None, - user_id=Depends(get_user), - authenticated: bool = Depends(CheckStatus("authenticated")), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + parent_folder: Optional[str] = None, + user_id=Depends(get_user), + authenticated: bool = Depends(CheckStatus("authenticated")), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if (await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if authenticated: @@ -418,12 +406,11 @@ async def get_dataset_folders( @router.delete("/{dataset_id}/folders/{folder_id}") async def delete_folder( - dataset_id: str, - folder_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + folder_id: str, + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (folder := await FolderDB.get(PydanticObjectId(folder_id))) is not None: @@ -434,14 +421,14 @@ async def delete_folder( # recursively delete child folder and files async def _delete_nested_folders(parent_folder_id): while ( - await FolderDB.find_one( - FolderDB.dataset_id == ObjectId(dataset_id), - FolderDB.parent_folder == ObjectId(parent_folder_id), - ) + await FolderDB.find_one( + FolderDB.dataset_id == ObjectId(dataset_id), + FolderDB.parent_folder == ObjectId(parent_folder_id), + ) ) is not None: async for subfolder in FolderDB.find( - FolderDB.dataset_id == PydanticObjectId(dataset_id), - FolderDB.parent_folder == PydanticObjectId(parent_folder_id), + FolderDB.dataset_id == PydanticObjectId(dataset_id), + FolderDB.parent_folder == PydanticObjectId(parent_folder_id), ): async for file in FileDB.find(FileDB.folder_id == subfolder.id): await remove_file_entry(file.id, fs, es) @@ -462,15 +449,14 @@ async def _delete_nested_folders(parent_folder_id): @router.post("/{dataset_id}/files", response_model=FileOut) async def save_file( - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - admin_mode: bool = Depends(get_admin_mode), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -503,15 +489,14 @@ async def save_file( @router.post("/{dataset_id}/filesMultiple", response_model=List[FileOut]) async def save_files( - dataset_id: str, - files: List[UploadFile], - admin_mode: bool = Depends(get_admin_mode), - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + files: List[UploadFile], + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: files_added = [] @@ -526,7 +511,7 @@ async def save_files( if folder_id is not None: if ( - folder := await FolderDB.get(PydanticObjectId(folder_id)) + folder := await FolderDB.get(PydanticObjectId(folder_id)) ) is not None: new_file.folder_id = folder.id else: @@ -552,13 +537,13 @@ async def save_files( @router.post("/{dataset_id}/local_files", response_model=FileOut) async def save_local_file( - localfile_in: LocalFileIn, - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + localfile_in: LocalFileIn, + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -607,12 +592,12 @@ async def save_local_file( @router.post("/createFromZip", response_model=DatasetOut) async def create_dataset_from_zip( - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - token: str = Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + token: str = Depends(get_token), ): if file.filename.endswith(".zip") == False: raise HTTPException(status_code=404, detail=f"File is not a zip file") @@ -680,11 +665,10 @@ async def create_dataset_from_zip( @router.get("/{dataset_id}/download", response_model=DatasetOut) async def download_dataset( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload") @@ -839,15 +823,14 @@ async def download_dataset( # can handle parameeters pass in as key/values in info @router.post("/{dataset_id}/extract") async def get_dataset_extract( - dataset_id: str, - extractorName: str, - request: Request, - admin_mode: bool = Depends(get_admin_mode), - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + extractorName: str, + request: Request, + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -867,10 +850,9 @@ async def get_dataset_extract( @router.get("/{dataset_id}/thumbnail") async def download_dataset_thumbnail( - dataset_id: str, - admin_mode: bool = Depends(get_admin_mode), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): # If dataset exists in MongoDB, download from Minio if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -895,14 +877,13 @@ async def download_dataset_thumbnail( @router.patch("/{dataset_id}/thumbnail/{thumbnail_id}", response_model=DatasetOut) async def add_dataset_thumbnail( - dataset_id: str, - thumbnail_id: str, - admin_mode: bool = Depends(get_admin_mode), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + thumbnail_id: str, + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? dataset.thumbnail_id = thumbnail_id From 93395cbe2bfcf123e7516e68a4c6bb39f29a668d Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 13 Dec 2023 09:48:36 -0600 Subject: [PATCH 38/43] codegen/black and remove redandunt admin_mode flag --- backend/app/deps/authorization_deps.py | 111 ++++----- backend/app/routers/authorization.py | 140 ++++++----- backend/app/routers/datasets.py | 227 +++++++++--------- backend/app/routers/files.py | 12 - backend/app/routers/groups.py | 83 ++++--- backend/app/routers/metadata.py | 3 - backend/app/routers/metadata_datasets.py | 6 - backend/app/routers/metadata_files.py | 6 - .../v2/services/AuthorizationService.ts | 3 + 9 files changed, 285 insertions(+), 306 deletions(-) diff --git a/backend/app/deps/authorization_deps.py b/backend/app/deps/authorization_deps.py index 92a57728d..ac5997637 100644 --- a/backend/app/deps/authorization_deps.py +++ b/backend/app/deps/authorization_deps.py @@ -14,10 +14,10 @@ async def get_role( - dataset_id: str, - current_user=Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin=Depends(get_admin), + dataset_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ) -> RoleType: """Returns the role a specific user has on a dataset. If the user is a creator (owner), they are not listed in the user_ids list.""" @@ -35,10 +35,10 @@ async def get_role( async def get_role_by_file( - file_id: str, - current_user=Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin=Depends(get_admin), + file_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ) -> RoleType: if admin and admin_mode: return RoleType.OWNER @@ -53,7 +53,7 @@ async def get_role_by_file( ) if authorization is None: if ( - dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(file.dataset_id)) ) is not None: if dataset.status == DatasetStatus.AUTHENTICATED.name: auth_dict = { @@ -74,10 +74,10 @@ async def get_role_by_file( async def get_role_by_metadata( - metadata_id: str, - current_user=Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin=Depends(get_admin), + metadata_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ) -> RoleType: if admin and admin_mode: return RoleType.OWNER @@ -97,7 +97,7 @@ async def get_role_by_metadata( return authorization.role elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -110,10 +110,10 @@ async def get_role_by_metadata( async def get_role_by_group( - group_id: str, - current_user=Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin=Depends(get_admin), + group_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ) -> RoleType: if admin and admin_mode: return RoleType.OWNER @@ -136,7 +136,7 @@ async def get_role_by_group( async def is_public_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is public.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -147,7 +147,7 @@ async def is_public_dataset( async def is_authenticated_dataset( - dataset_id: str, + dataset_id: str, ) -> bool: """Checks if a dataset is authenticated.""" if (dataset_out := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -165,11 +165,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - dataset_id: str, - current_user: str = Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + self, + dataset_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # TODO: Make sure we enforce only one role per user per dataset, or find_one could yield wrong answer here. @@ -195,11 +195,11 @@ async def __call__( ) else: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if ( - current_dataset.status == DatasetStatus.AUTHENTICATED.name - and self.role == "viewer" + current_dataset.status == DatasetStatus.AUTHENTICATED.name + and self.role == "viewer" ): return True else: @@ -222,11 +222,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - file_id: str, - current_user: str = Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + self, + file_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -260,11 +260,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - metadata_id: str, - current_user: str = Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + self, + metadata_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -276,7 +276,7 @@ async def __call__( resource_id = md_out.resource.resource_id if resource_type == "files": if ( - file := await FileDB.get(PydanticObjectId(resource_id)) + file := await FileDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == file.dataset_id, @@ -298,7 +298,7 @@ async def __call__( ) elif resource_type == "datasets": if ( - dataset := await DatasetDB.get(PydanticObjectId(resource_id)) + dataset := await DatasetDB.get(PydanticObjectId(resource_id)) ) is not None: authorization = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == dataset.id, @@ -327,11 +327,11 @@ def __init__(self, role: str): self.role = role async def __call__( - self, - group_id: str, - current_user: str = Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + self, + group_id: str, + current_user: str = Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), ): # If the current user is admin and has turned on admin_mode, user has access irrespective of any role assigned if admin and admin_mode: @@ -363,8 +363,8 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - dataset_id: str, + self, + dataset_id: str, ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if dataset.status == self.status: @@ -383,13 +383,13 @@ def __init__(self, status: str): self.status = status async def __call__( - self, - file_id: str, + self, + file_id: str, ): if (file_out := await FileDB.get(PydanticObjectId(file_id))) is not None: dataset_id = file_out.dataset_id if ( - dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if dataset.status == self.status: return True @@ -401,11 +401,12 @@ async def __call__( return False -def access(user_role: RoleType, - role_required: RoleType, - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), - ) -> bool: +def access( + user_role: RoleType, + role_required: RoleType, + admin_mode: bool = Depends(get_admin_mode), + admin: bool = Depends(get_admin), +) -> bool: """Enforce implied role hierarchy ADMIN = OWNER > EDITOR > UPLOADER > VIEWER""" if user_role == RoleType.OWNER or (admin and admin_mode): return True diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index 3e3bf0147..e21d7dc0d 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -38,10 +38,10 @@ @router.post("/datasets/{dataset_id}", response_model=AuthorizationOut) async def save_authorization( - dataset_id: str, - authorization_in: AuthorizationBase, - user=Depends(get_current_username), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + authorization_in: AuthorizationBase, + user=Depends(get_current_username), + allow: bool = Depends(Authorization("editor")), ): """Save authorization info in Mongo. This is a triple of dataset_id/user_id/role/group_id.""" @@ -68,19 +68,21 @@ async def save_authorization( @router.get("/datasets/{dataset_id}/role", response_model=AuthorizationOut) async def get_dataset_role( - dataset_id: str, - current_user=Depends(get_current_username), - admin_mode: bool = Depends(get_admin_mode), - admin=Depends(get_admin), + dataset_id: str, + current_user=Depends(get_current_username), + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ): """Retrieve role of user for a specific dataset.""" # Get group id and the associated users from authorization criteria = [] if not admin or not admin_mode: - criteria.append(Or( - AuthorizationDB.creator == current_user, - AuthorizationDB.user_ids == current_user, - )) + criteria.append( + Or( + AuthorizationDB.creator == current_user, + AuthorizationDB.user_ids == current_user, + ) + ) auth_db = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), @@ -88,7 +90,7 @@ async def get_dataset_role( ) if auth_db is None: if ( - current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) + current_dataset := await DatasetDB.get(PydanticObjectId(dataset_id)) ) is not None: if current_dataset.status == DatasetStatus.AUTHENTICATED.name: public_authorization_in = { @@ -110,8 +112,8 @@ async def get_dataset_role( @router.get("/datasets/{dataset_id}/role/viewer}") async def get_dataset_role_viewer( - dataset_id: str, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + allow: bool = Depends(Authorization("viewer")), ): """Used for testing only. Returns true if user has viewer permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -120,8 +122,8 @@ async def get_dataset_role_viewer( @router.get("/datasets/{dataset_id}/role/owner}") async def get_dataset_role_owner( - dataset_id: str, - allow: bool = Depends(Authorization("owner")), + dataset_id: str, + allow: bool = Depends(Authorization("owner")), ): """Used for testing only. Returns true if user has owner permission on dataset, otherwise throws a 403 Forbidden HTTP exception. See `routers/authorization.py` for more info.""" @@ -130,9 +132,9 @@ async def get_dataset_role_owner( @router.get("/files/{file_id}/role}", response_model=RoleType) async def get_file_role( - file_id: str, - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_file), + file_id: str, + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_file), ): """Retrieve role of user for an individual file. Role cannot change between file versions.""" return role @@ -140,9 +142,9 @@ async def get_file_role( @router.get("/metadata/{metadata_id}/role}", response_model=AuthorizationMetadata) async def get_metadata_role( - metadata_id: str, - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_metadata), + metadata_id: str, + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_metadata), ): """Retrieve role of user for group. Group roles can be OWNER, EDITOR, or VIEWER (for regular Members).""" return role @@ -150,9 +152,9 @@ async def get_metadata_role( @router.get("/groups/{group_id}/role", response_model=RoleType) async def get_group_role( - group_id: str, - current_user=Depends(get_current_username), - role: RoleType = Depends(get_role_by_group), + group_id: str, + current_user=Depends(get_current_username), + role: RoleType = Depends(get_role_by_group), ): """Retrieve role of user on a particular group (i.e. whether they can change group memberships).""" return role @@ -163,25 +165,23 @@ async def get_group_role( response_model=AuthorizationOut, ) async def set_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - role: RoleType, - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + role: RoleType, + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign an entire group a specific role for a dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: if (group := await GroupDB.get(group_id)) is not None: # First, remove any existing role the group has on the dataset - await remove_dataset_group_role( - dataset_id, group_id, es, user_id, allow - ) + await remove_dataset_group_role(dataset_id, group_id, es, user_id, allow) if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.role == role, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.role == role, + ) ) is not None: if group_id not in auth_db.group_ids: auth_db.group_ids.append(group_id) @@ -216,21 +216,19 @@ async def set_dataset_group_role( response_model=AuthorizationOut, ) async def set_dataset_user_role( - dataset_id: str, - username: str, - role: RoleType, - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + role: RoleType, + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Assign a single user a specific role for a dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (await UserDB.find_one(UserDB.email == username)) is not None: # First, remove any existing role the user has on the dataset - await remove_dataset_user_role( - dataset_id, username, es, user_id, allow - ) + await remove_dataset_user_role(dataset_id, username, es, user_id, allow) auth_db = await AuthorizationDB.find_one( AuthorizationDB.dataset_id == PyObjectId(dataset_id), AuthorizationDB.role == role, @@ -275,21 +273,21 @@ async def set_dataset_user_role( response_model=AuthorizationOut, ) async def remove_dataset_group_role( - dataset_id: PydanticObjectId, - group_id: PydanticObjectId, - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: PydanticObjectId, + group_id: PydanticObjectId, + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the group has with a specific dataset.""" if (dataset := await DatasetDB.get(dataset_id)) is not None: if (group := await GroupDB.get(group_id)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == dataset_id, - AuthorizationDB.group_ids == group_id, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == dataset_id, + AuthorizationDB.group_ids == group_id, + ) ) is not None: auth_db.group_ids.remove(PyObjectId(group_id)) for u in group.users: @@ -310,21 +308,21 @@ async def remove_dataset_group_role( response_model=AuthorizationOut, ) async def remove_dataset_user_role( - dataset_id: str, - username: str, - es=Depends(get_elasticsearchclient), - user_id=Depends(get_user), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + username: str, + es=Depends(get_elasticsearchclient), + user_id=Depends(get_user), + allow: bool = Depends(Authorization("editor")), ): """Remove any role the user has with a specific dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (await UserDB.find_one(UserDB.email == username)) is not None: if ( - auth_db := await AuthorizationDB.find_one( - AuthorizationDB.dataset_id == PyObjectId(dataset_id), - AuthorizationDB.user_ids == username, - ) + auth_db := await AuthorizationDB.find_one( + AuthorizationDB.dataset_id == PyObjectId(dataset_id), + AuthorizationDB.user_ids == username, + ) ) is not None: auth_db.user_ids.remove(username) await auth_db.save() @@ -339,15 +337,15 @@ async def remove_dataset_user_role( @router.get("/datasets/{dataset_id}/roles}", response_model=DatasetRoles) async def get_dataset_roles( - dataset_id: str, - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + allow: bool = Depends(Authorization("editor")), ): """Get a list of all users and groups that have assigned roles on this dataset.""" if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: roles = DatasetRoles(dataset_id=str(dataset.id)) async for auth in AuthorizationDB.find( - AuthorizationDB.dataset_id == ObjectId(dataset_id) + AuthorizationDB.dataset_id == ObjectId(dataset_id) ): # First, fetch all groups that have a role on the dataset group_user_counts = {} @@ -365,8 +363,8 @@ async def get_dataset_roles( # Next, get all users but omit those that are included in a group above async for user in UserDB.find(In(UserDB.email, auth.user_ids)): if ( - user.email in group_user_counts - and auth.user_ids.count(user.email) == group_user_counts[user.email] + user.email in group_user_counts + and auth.user_ids.count(user.email) == group_user_counts[user.email] ): continue # TODO: Why is this necessary here but not on root-level ObjectIDs? diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index a482ceb52..c11c0f1f2 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -134,12 +134,12 @@ def nested_update(target_dict, update_dict): async def _create_folder_structure( - dataset_id: str, - contents: dict, - folder_path: str, - folder_lookup: dict, - user: UserOut, - parent_folder_id: Optional[str] = None, + dataset_id: str, + contents: dict, + folder_path: str, + folder_lookup: dict, + user: UserOut, + parent_folder_id: Optional[str] = None, ): """Recursively create folders encountered in folder_path until the target folder is created. Arguments: @@ -174,8 +174,8 @@ async def _create_folder_structure( async def _get_folder_hierarchy( - folder_id: str, - hierarchy: str, + folder_id: str, + hierarchy: str, ): """Generate a string of nested path to folder for use in zip file creation.""" folder = await FolderDB.get(PydanticObjectId(folder_id)) @@ -187,9 +187,9 @@ async def _get_folder_hierarchy( @router.post("", response_model=DatasetOut) async def save_dataset( - dataset_in: DatasetIn, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + dataset_in: DatasetIn, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): dataset = DatasetDB(**dataset_in.dict(), creator=user) await dataset.insert() @@ -208,13 +208,12 @@ async def save_dataset( @router.get("", response_model=List[DatasetOut]) async def get_datasets( - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - mine: bool = False, - admin=Depends(get_admin), - admin_mode: bool = Depends(get_admin_mode), - + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + mine: bool = False, + admin=Depends(get_admin), + admin_mode: bool = Depends(get_admin_mode), ): criteria = [] if not admin_mode or not admin: @@ -240,9 +239,9 @@ async def get_datasets( @router.get("/{dataset_id}", response_model=DatasetOut) async def get_dataset( - dataset_id: str, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: return dataset.dict() @@ -251,13 +250,13 @@ async def get_dataset( @router.get("/{dataset_id}/files", response_model=List[FileOut]) async def get_dataset_files( - dataset_id: str, - folder_id: Optional[str] = None, - authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + folder_id: Optional[str] = None, + authenticated: bool = Depends(CheckStatus("AUTHENTICATED")), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if authenticated: query = [ @@ -279,11 +278,11 @@ async def get_dataset_files( @router.put("/{dataset_id}", response_model=DatasetOut) async def edit_dataset( - dataset_id: str, - dataset_info: DatasetBase, - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetBase, + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Refactor this with permissions checks etc. @@ -299,11 +298,11 @@ async def edit_dataset( @router.patch("/{dataset_id}", response_model=DatasetOut) async def patch_dataset( - dataset_id: str, - dataset_info: DatasetPatch, - user=Depends(get_current_user), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + dataset_info: DatasetPatch, + user=Depends(get_current_user), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # TODO: Update method not working properly @@ -323,10 +322,10 @@ async def patch_dataset( @router.delete("/{dataset_id}") async def delete_dataset( - dataset_id: str, - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: # delete from elasticsearch @@ -337,7 +336,7 @@ async def delete_dataset( MetadataDB.resource.resource_id == PydanticObjectId(dataset_id) ).delete() async for file in FileDB.find( - FileDB.dataset_id == PydanticObjectId(dataset_id) + FileDB.dataset_id == PydanticObjectId(dataset_id) ): await remove_file_entry(file.id, fs, es) await FolderDB.find( @@ -352,10 +351,10 @@ async def delete_dataset( @router.post("/{dataset_id}/folders", response_model=FolderOut) async def add_folder( - dataset_id: str, - folder_in: FolderIn, - user=Depends(get_current_user), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_in: FolderIn, + user=Depends(get_current_user), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: parent_folder = folder_in.parent_folder @@ -374,13 +373,13 @@ async def add_folder( @router.get("/{dataset_id}/folders", response_model=List[FolderOut]) async def get_dataset_folders( - dataset_id: str, - parent_folder: Optional[str] = None, - user_id=Depends(get_user), - authenticated: bool = Depends(CheckStatus("authenticated")), - skip: int = 0, - limit: int = 10, - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + parent_folder: Optional[str] = None, + user_id=Depends(get_user), + authenticated: bool = Depends(CheckStatus("authenticated")), + skip: int = 0, + limit: int = 10, + allow: bool = Depends(Authorization("viewer")), ): if (await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if authenticated: @@ -406,11 +405,11 @@ async def get_dataset_folders( @router.delete("/{dataset_id}/folders/{folder_id}") async def delete_folder( - dataset_id: str, - folder_id: str, - fs: Minio = Depends(dependencies.get_fs), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + folder_id: str, + fs: Minio = Depends(dependencies.get_fs), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if (folder := await FolderDB.get(PydanticObjectId(folder_id))) is not None: @@ -421,14 +420,14 @@ async def delete_folder( # recursively delete child folder and files async def _delete_nested_folders(parent_folder_id): while ( - await FolderDB.find_one( - FolderDB.dataset_id == ObjectId(dataset_id), - FolderDB.parent_folder == ObjectId(parent_folder_id), - ) + await FolderDB.find_one( + FolderDB.dataset_id == ObjectId(dataset_id), + FolderDB.parent_folder == ObjectId(parent_folder_id), + ) ) is not None: async for subfolder in FolderDB.find( - FolderDB.dataset_id == PydanticObjectId(dataset_id), - FolderDB.parent_folder == PydanticObjectId(parent_folder_id), + FolderDB.dataset_id == PydanticObjectId(dataset_id), + FolderDB.parent_folder == PydanticObjectId(parent_folder_id), ): async for file in FileDB.find(FileDB.folder_id == subfolder.id): await remove_file_entry(file.id, fs, es) @@ -449,14 +448,14 @@ async def _delete_nested_folders(parent_folder_id): @router.post("/{dataset_id}/files", response_model=FileOut) async def save_file( - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -489,14 +488,14 @@ async def save_file( @router.post("/{dataset_id}/filesMultiple", response_model=List[FileOut]) async def save_files( - dataset_id: str, - files: List[UploadFile], - folder_id: Optional[str] = None, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + files: List[UploadFile], + folder_id: Optional[str] = None, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: files_added = [] @@ -511,7 +510,7 @@ async def save_files( if folder_id is not None: if ( - folder := await FolderDB.get(PydanticObjectId(folder_id)) + folder := await FolderDB.get(PydanticObjectId(folder_id)) ) is not None: new_file.folder_id = folder.id else: @@ -537,13 +536,13 @@ async def save_files( @router.post("/{dataset_id}/local_files", response_model=FileOut) async def save_local_file( - localfile_in: LocalFileIn, - dataset_id: str, - folder_id: Optional[str] = None, - user=Depends(get_current_user), - es=Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + localfile_in: LocalFileIn, + dataset_id: str, + folder_id: Optional[str] = None, + user=Depends(get_current_user), + es=Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if user is None: @@ -592,12 +591,12 @@ async def save_local_file( @router.post("/createFromZip", response_model=DatasetOut) async def create_dataset_from_zip( - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - file: UploadFile = File(...), - es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - token: str = Depends(get_token), + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + file: UploadFile = File(...), + es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + token: str = Depends(get_token), ): if file.filename.endswith(".zip") == False: raise HTTPException(status_code=404, detail=f"File is not a zip file") @@ -665,10 +664,10 @@ async def create_dataset_from_zip( @router.get("/{dataset_id}/download", response_model=DatasetOut) async def download_dataset( - dataset_id: str, - user=Depends(get_current_user), - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + user=Depends(get_current_user), + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: current_temp_dir = tempfile.mkdtemp(prefix="rocratedownload") @@ -823,14 +822,14 @@ async def download_dataset( # can handle parameeters pass in as key/values in info @router.post("/{dataset_id}/extract") async def get_dataset_extract( - dataset_id: str, - extractorName: str, - request: Request, - # parameters don't have a fixed model shape - parameters: dict = None, - user=Depends(get_current_user), - rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), - allow: bool = Depends(Authorization("uploader")), + dataset_id: str, + extractorName: str, + request: Request, + # parameters don't have a fixed model shape + parameters: dict = None, + user=Depends(get_current_user), + rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), + allow: bool = Depends(Authorization("uploader")), ): if extractorName is None: raise HTTPException(status_code=400, detail=f"No extractorName specified") @@ -850,9 +849,9 @@ async def get_dataset_extract( @router.get("/{dataset_id}/thumbnail") async def download_dataset_thumbnail( - dataset_id: str, - fs: Minio = Depends(dependencies.get_fs), - allow: bool = Depends(Authorization("viewer")), + dataset_id: str, + fs: Minio = Depends(dependencies.get_fs), + allow: bool = Depends(Authorization("viewer")), ): # If dataset exists in MongoDB, download from Minio if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -877,13 +876,13 @@ async def download_dataset_thumbnail( @router.patch("/{dataset_id}/thumbnail/{thumbnail_id}", response_model=DatasetOut) async def add_dataset_thumbnail( - dataset_id: str, - thumbnail_id: str, - allow: bool = Depends(Authorization("editor")), + dataset_id: str, + thumbnail_id: str, + allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: if ( - thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) + thumbnail := await ThumbnailDB.get(PydanticObjectId(thumbnail_id)) ) is not None: # TODO: Should we garbage collect existing thumbnail if nothing else points to it? dataset.thumbnail_id = thumbnail_id diff --git a/backend/app/routers/files.py b/backend/app/routers/files.py index 81e231583..f71b06377 100644 --- a/backend/app/routers/files.py +++ b/backend/app/routers/files.py @@ -30,7 +30,6 @@ from app.models.thumbnails import ThumbnailDB from app.models.users import UserOut from app.rabbitmq.listeners import submit_file_job, EventListenerJobDB -from app.routers.authentication import get_admin_mode from app.routers.feeds import check_feed_listeners from app.routers.utils import get_content_type from app.search.connect import ( @@ -210,7 +209,6 @@ async def remove_local_file_entry(file_id: Union[str, ObjectId], es: Elasticsear @router.put("/{file_id}", response_model=FileOut) async def update_file( file_id: str, - admin_mode: bool = Depends(get_admin_mode), token=Depends(get_token), user=Depends(get_current_user), fs: Minio = Depends(dependencies.get_fs), @@ -301,7 +299,6 @@ async def update_file( @router.get("/{file_id}") async def download_file( file_id: str, - admin_mode: bool = Depends(get_admin_mode), version: Optional[int] = None, increment: Optional[bool] = True, fs: Minio = Depends(dependencies.get_fs), @@ -363,7 +360,6 @@ async def download_file( @router.get("/{file_id}/url/") async def download_file_url( file_id: str, - admin_mode: bool = Depends(get_admin_mode), version: Optional[int] = None, expires_in_seconds: Optional[int] = 3600, external_fs: Minio = Depends(dependencies.get_external_fs), @@ -416,7 +412,6 @@ async def download_file_url( @router.delete("/{file_id}") async def delete_file( file_id: str, - admin_mode: bool = Depends(get_admin_mode), fs: Minio = Depends(dependencies.get_fs), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(FileAuthorization("editor")), @@ -434,7 +429,6 @@ async def delete_file( @router.get("/{file_id}/summary", response_model=FileOut) async def get_file_summary( file_id: str, - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(FileAuthorization("viewer")), ): if (file := await FileDB.get(PydanticObjectId(file_id))) is not None: @@ -449,7 +443,6 @@ async def get_file_summary( @router.get("/{file_id}/version_details", response_model=FileOut) async def get_file_version_details( file_id: str, - admin_mode: bool = Depends(get_admin_mode), version_num: Optional[int] = 0, allow: bool = Depends(FileAuthorization("viewer")), ): @@ -472,7 +465,6 @@ async def get_file_version_details( @router.get("/{file_id}/versions", response_model=List[FileVersion]) async def get_file_versions( file_id: str, - admin_mode: bool = Depends(get_admin_mode), skip: int = 0, limit: int = 20, allow: bool = Depends(FileAuthorization("viewer")), @@ -494,7 +486,6 @@ async def get_file_versions( async def post_file_extract( file_id: str, extractorName: str, - admin_mode: bool = Depends(get_admin_mode), # parameters don't have a fixed model shape parameters: dict = None, user=Depends(get_current_user), @@ -526,7 +517,6 @@ async def post_file_extract( @router.post("/{file_id}/resubmit_extract") async def resubmit_file_extractions( file_id: str, - admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), credentials: HTTPAuthorizationCredentials = Security(security), rabbitmq_client: BlockingChannel = Depends(dependencies.get_rabbitmq), @@ -553,7 +543,6 @@ async def resubmit_file_extractions( @router.get("/{file_id}/thumbnail") async def download_file_thumbnail( file_id: str, - admin_mode: bool = Depends(get_admin_mode), fs: Minio = Depends(dependencies.get_fs), allow: bool = Depends(FileAuthorization("viewer")), ): @@ -579,7 +568,6 @@ async def download_file_thumbnail( async def add_file_thumbnail( file_id: str, thumbnail_id: str, - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(FileAuthorization("editor")), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), ): diff --git a/backend/app/routers/groups.py b/backend/app/routers/groups.py index 6102bd7db..2c2af59d9 100644 --- a/backend/app/routers/groups.py +++ b/backend/app/routers/groups.py @@ -18,8 +18,8 @@ @router.post("", response_model=GroupOut) async def save_group( - group_in: GroupIn, - user=Depends(get_current_user), + group_in: GroupIn, + user=Depends(get_current_user), ): group_db = GroupDB(**group_in.dict(), creator=user.email) user_member = Member(user=user, editor=True) @@ -31,11 +31,11 @@ async def save_group( @router.get("", response_model=List[GroupOut]) async def get_groups( - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - admin_mode: bool = Depends(get_admin_mode), - admin=Depends(get_admin), + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ): """Get a list of all Groups in the db the user is a member/owner of. @@ -47,10 +47,12 @@ async def get_groups( """ criteria_list = [] if not admin or not admin_mode: - criteria_list.append(Or( - GroupDB.creator == user_id, - GroupDB.users.user.email == user_id, - )) + criteria_list.append( + Or( + GroupDB.creator == user_id, + GroupDB.users.user.email == user_id, + ) + ) groups = await GroupDB.find( *criteria_list, @@ -63,12 +65,12 @@ async def get_groups( @router.get("/search/{search_term}", response_model=List[GroupOut]) async def search_group( - search_term: str, - user_id=Depends(get_user), - skip: int = 0, - limit: int = 10, - admin_mode: bool = Depends(get_admin_mode), - admin=Depends(get_admin), + search_term: str, + user_id=Depends(get_user), + skip: int = 0, + limit: int = 10, + admin_mode: bool = Depends(get_admin_mode), + admin=Depends(get_admin), ): """Search all groups in the db based on text. @@ -79,8 +81,11 @@ async def search_group( """ criteria_list = [ - Or(RegEx(field=GroupDB.name, pattern=search_term), - RegEx(field=GroupDB.description, pattern=search_term)), ] + Or( + RegEx(field=GroupDB.name, pattern=search_term), + RegEx(field=GroupDB.description, pattern=search_term), + ), + ] if not admin or not admin_mode: criteria_list.append( Or(GroupDB.creator == user_id, GroupDB.users.user.email == user_id) @@ -98,8 +103,8 @@ async def search_group( @router.get("/{group_id}", response_model=GroupOut) async def get_group( - group_id: str, - allow: bool = Depends(GroupAuthorization("viewer")), + group_id: str, + allow: bool = Depends(GroupAuthorization("viewer")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: return group.dict() @@ -108,10 +113,10 @@ async def get_group( @router.put("/{group_id}", response_model=GroupOut) async def edit_group( - group_id: str, - group_info: GroupBase, - user_id=Depends(get_user), - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + group_info: GroupBase, + user_id=Depends(get_user), + allow: bool = Depends(GroupAuthorization("editor")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: group_dict = dict(group_info) if group_info is not None else {} @@ -134,7 +139,7 @@ async def edit_group( if original_user not in groups_users: # remove them from auth async for auth in AuthorizationDB.find( - {"group_ids": ObjectId(group_id)} + {"group_ids": ObjectId(group_id)} ): auth.user_ids.remove(original_user.user.email) await auth.replace() @@ -178,8 +183,8 @@ async def edit_group( @router.delete("/{group_id}", response_model=GroupOut) async def delete_group( - group_id: str, - allow: bool = Depends(GroupAuthorization("owner")), + group_id: str, + allow: bool = Depends(GroupAuthorization("owner")), ): if (group := await GroupDB.get(PydanticObjectId(group_id))) is not None: await group.delete() @@ -190,10 +195,10 @@ async def delete_group( @router.post("/{group_id}/add/{username}", response_model=GroupOut) async def add_member( - group_id: str, - username: str, - role: Optional[str] = None, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + role: Optional[str] = None, + allow: bool = Depends(GroupAuthorization("editor")), ): """Add a new user to a group.""" if (user := await UserDB.find_one(UserDB.email == username)) is not None: @@ -227,9 +232,9 @@ async def add_member( @router.post("/{group_id}/remove/{username}", response_model=GroupOut) async def remove_member( - group_id: str, - username: str, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + allow: bool = Depends(GroupAuthorization("editor")), ): """Remove a user from a group.""" @@ -259,10 +264,10 @@ async def remove_member( @router.put("/{group_id}/update/{username}", response_model=GroupOut) async def update_member( - group_id: str, - username: str, - role: str, - allow: bool = Depends(GroupAuthorization("editor")), + group_id: str, + username: str, + role: str, + allow: bool = Depends(GroupAuthorization("editor")), ): """Update user role.""" if (user := await UserDB.find_one({"email": username})) is not None: diff --git a/backend/app/routers/metadata.py b/backend/app/routers/metadata.py index d9aa51242..21f829e83 100644 --- a/backend/app/routers/metadata.py +++ b/backend/app/routers/metadata.py @@ -23,7 +23,6 @@ MetadataDB, ) from app.models.pyobjectid import PyObjectId -from app.routers.authentication import get_admin_mode router = APIRouter() @@ -154,7 +153,6 @@ async def search_metadata_definition( async def update_metadata( metadata_in: MetadataPatch, metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), user=Depends(get_current_user), allow: bool = Depends(MetadataAuthorization("editor")), @@ -176,7 +174,6 @@ async def update_metadata( @router.delete("/{metadata_id}") async def delete_metadata( metadata_id: str, - admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), allow: bool = Depends(MetadataAuthorization("editor")), ): diff --git a/backend/app/routers/metadata_datasets.py b/backend/app/routers/metadata_datasets.py index 10701009a..08598498b 100644 --- a/backend/app/routers/metadata_datasets.py +++ b/backend/app/routers/metadata_datasets.py @@ -25,7 +25,6 @@ MetadataDelete, MetadataDefinitionDB, ) -from app.routers.authentication import get_admin_mode from app.search.connect import delete_document_by_id from app.search.index import index_dataset @@ -74,7 +73,6 @@ async def add_dataset_metadata( dataset_id: str, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("uploader")), ): """Attach new metadata to a dataset. The body must include a contents field with the JSON metadata, and either a @@ -124,7 +122,6 @@ async def replace_dataset_metadata( dataset_id: str, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or @@ -179,7 +176,6 @@ async def update_dataset_metadata( dataset_id: str, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("editor")), ): """Update metadata. Any fields provided in the contents JSON will be added or updated in the metadata. If context or @@ -249,7 +245,6 @@ async def get_dataset_metadata( listener_name: Optional[str] = Form(None), listener_version: Optional[float] = Form(None), user=Depends(get_current_user), - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("viewer")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: @@ -281,7 +276,6 @@ async def delete_dataset_metadata( dataset_id: str, user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), - admin_mode: bool = Depends(get_admin_mode), allow: bool = Depends(Authorization("editor")), ): if (dataset := await DatasetDB.get(PydanticObjectId(dataset_id))) is not None: diff --git a/backend/app/routers/metadata_files.py b/backend/app/routers/metadata_files.py index 6cc156212..ad8a53ffa 100644 --- a/backend/app/routers/metadata_files.py +++ b/backend/app/routers/metadata_files.py @@ -29,7 +29,6 @@ MetadataDelete, MetadataDefinitionDB, ) -from app.routers.authentication import get_admin_mode from app.search.connect import delete_document_by_id from app.search.index import index_file @@ -106,7 +105,6 @@ async def _build_metadata_db_obj( async def add_file_metadata( metadata_in: MetadataIn, file_id: str, - admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(FileAuthorization("uploader")), @@ -159,7 +157,6 @@ async def add_file_metadata( async def replace_file_metadata( metadata_in: MetadataPatch, file_id: str, - admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(FileAuthorization("editor")), @@ -233,7 +230,6 @@ async def replace_file_metadata( async def update_file_metadata( metadata_in: MetadataPatch, file_id: str, - admin_mode: bool = Depends(get_admin_mode), user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), allow: bool = Depends(FileAuthorization("editor")), @@ -330,7 +326,6 @@ async def update_file_metadata( @router.get("/{file_id}/metadata", response_model=List[MetadataOut]) async def get_file_metadata( file_id: str, - admin_mode: bool = Depends(get_admin_mode), version: Optional[int] = None, all_versions: Optional[bool] = False, definition: Optional[str] = Form(None), @@ -390,7 +385,6 @@ async def get_file_metadata( async def delete_file_metadata( metadata_in: MetadataDelete, file_id: str, - admin_mode: bool = Depends(get_admin_mode), # version: Optional[int] = Form(None), user=Depends(get_current_user), es: Elasticsearch = Depends(dependencies.get_elasticsearchclient), diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index fc16149d9..3a4fb08da 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -95,6 +95,7 @@ export class AuthorizationService { /** * Get File Role + * Retrieve role of user for an individual file. Role cannot change between file versions. * @param fileId * @param datasetId * @returns RoleType Successful Response @@ -118,6 +119,7 @@ export class AuthorizationService { /** * Get Metadata Role + * Retrieve role of user for group. Group roles can be OWNER, EDITOR, or VIEWER (for regular Members). * @param metadataId * @param datasetId * @returns AuthorizationMetadata Successful Response @@ -141,6 +143,7 @@ export class AuthorizationService { /** * Get Group Role + * Retrieve role of user on a particular group (i.e. whether they can change group memberships). * @param groupId * @param datasetId * @returns RoleType Successful Response From 69f28aa54598f7081652c32d6716c1122d00edbd Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 13 Dec 2023 10:12:33 -0600 Subject: [PATCH 39/43] fix bug in dataset --- backend/app/routers/datasets.py | 42 +++++++++++++++++++-------------- 1 file changed, 24 insertions(+), 18 deletions(-) diff --git a/backend/app/routers/datasets.py b/backend/app/routers/datasets.py index c11c0f1f2..5caa22537 100644 --- a/backend/app/routers/datasets.py +++ b/backend/app/routers/datasets.py @@ -215,24 +215,30 @@ async def get_datasets( admin=Depends(get_admin), admin_mode: bool = Depends(get_admin_mode), ): - criteria = [] - if not admin_mode or not admin: - if mine: - criteria.append(Or(DatasetDB.creator.email == user_id)) - else: - criteria.append( - Or( - DatasetDB.auth.user_ids == user_id, - DatasetDB.status == DatasetStatus.AUTHENTICATED.name, - ) - ) - - datasets = await DatasetDBViewList.find( - *criteria, - sort=(-DatasetDBViewList.created), - skip=skip, - limit=limit, - ).to_list() + if admin and admin_mode: + datasets = await DatasetDBViewList.find( + sort=(-DatasetDBViewList.created), + skip=skip, + limit=limit, + ).to_list() + elif mine: + datasets = await DatasetDBViewList.find( + DatasetDBViewList.creator.email == user_id, + sort=(-DatasetDBViewList.created), + skip=skip, + limit=limit, + ).to_list() + else: + datasets = await DatasetDBViewList.find( + Or( + DatasetDBViewList.creator.email == user_id, + DatasetDBViewList.auth.user_ids == user_id, + DatasetDBViewList.status == DatasetStatus.AUTHENTICATED.name, + ), + sort=(-DatasetDBViewList.created), + skip=skip, + limit=limit, + ).to_list() return [dataset.dict() for dataset in datasets] From 48ae259cf473a8277d53913554a709910f655df3 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 13 Dec 2023 10:49:13 -0600 Subject: [PATCH 40/43] fix search clause --- backend/app/routers/elasticsearch.py | 19 +++++++++++++------ .../v2/services/ElasticsearchService.ts | 14 +++++++++++++- 2 files changed, 26 insertions(+), 7 deletions(-) diff --git a/backend/app/routers/elasticsearch.py b/backend/app/routers/elasticsearch.py index 4f93941af..6c463b8b0 100644 --- a/backend/app/routers/elasticsearch.py +++ b/backend/app/routers/elasticsearch.py @@ -15,12 +15,11 @@ def _add_permissions_clause( query, username: str, - admin_mode: bool = Depends(get_admin_mode), - admin: bool = Depends(get_admin), + admin_mode: bool, + admin: bool, ): """Append filter to Elasticsearch object that restricts permissions based on the requesting user.""" # TODO: Add public filter once added - user_clause = { "bool": { "should": [ @@ -48,9 +47,15 @@ def _add_permissions_clause( @router.put("/search", response_model=str) -async def search(index_name: str, query: str, username=Depends(get_current_username)): +async def search( + index_name: str, + query: str, + username=Depends(get_current_username), + admin=Depends(get_admin), + admin_mode: bool = Depends(get_admin_mode), +): es = await connect_elasticsearch() - query = _add_permissions_clause(query, username) + query = _add_permissions_clause(query, username, admin, admin_mode) return search_index(es, index_name, query) @@ -58,9 +63,11 @@ async def search(index_name: str, query: str, username=Depends(get_current_usern async def msearch( request: Request, username=Depends(get_current_username), + admin=Depends(get_admin), + admin_mode: bool = Depends(get_admin_mode), ): es = await connect_elasticsearch() query = await request.body() - query = _add_permissions_clause(query, username) + query = _add_permissions_clause(query, username, admin, admin_mode) r = search_index(es, [settings.elasticsearch_index], query) return r diff --git a/frontend/src/openapi/v2/services/ElasticsearchService.ts b/frontend/src/openapi/v2/services/ElasticsearchService.ts index be50e0964..b0342e284 100644 --- a/frontend/src/openapi/v2/services/ElasticsearchService.ts +++ b/frontend/src/openapi/v2/services/ElasticsearchService.ts @@ -10,12 +10,14 @@ export class ElasticsearchService { * Search * @param indexName * @param query + * @param datasetId * @returns string Successful Response * @throws ApiError */ public static searchApiV2ElasticsearchSearchPut( indexName: string, query: string, + datasetId?: string, ): CancelablePromise { return __request({ method: 'PUT', @@ -23,6 +25,7 @@ export class ElasticsearchService { query: { 'index_name': indexName, 'query': query, + 'dataset_id': datasetId, }, errors: { 422: `Validation Error`, @@ -32,13 +35,22 @@ export class ElasticsearchService { /** * Msearch + * @param datasetId * @returns any Successful Response * @throws ApiError */ - public static msearchApiV2ElasticsearchAllMsearchPost(): CancelablePromise { + public static msearchApiV2ElasticsearchAllMsearchPost( + datasetId?: string, + ): CancelablePromise { return __request({ method: 'POST', path: `/api/v2/elasticsearch/all/_msearch`, + query: { + 'dataset_id': datasetId, + }, + errors: { + 422: `Validation Error`, + }, }); } From e050586212888ac0339fd5e1a766b1cfcfd9f288 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 13 Dec 2023 13:16:00 -0600 Subject: [PATCH 41/43] add more trigger on adminMode --- frontend/src/components/datasets/Dataset.tsx | 17 ++++++++++------- frontend/src/components/files/File.tsx | 15 +++++++++++++++ frontend/src/components/groups/Group.tsx | 9 +++++++-- frontend/src/components/search/Search.tsx | 6 ++++++ 4 files changed, 38 insertions(+), 9 deletions(-) diff --git a/frontend/src/components/datasets/Dataset.tsx b/frontend/src/components/datasets/Dataset.tsx index 8c252da7a..e6389026d 100644 --- a/frontend/src/components/datasets/Dataset.tsx +++ b/frontend/src/components/datasets/Dataset.tsx @@ -110,6 +110,7 @@ export const Dataset = (): JSX.Element => { (state: RootState) => state.dataset.datasetRole ); const folderPath = useSelector((state: RootState) => state.folder.folderPath); + const adminMode = useSelector((state: RootState) => state.user.adminMode); // state const [selectedTabIndex, setSelectedTabIndex] = useState(0); @@ -117,7 +118,6 @@ export const Dataset = (): JSX.Element => { React.useState(false); const [metadataRequestForms, setMetadataRequestForms] = useState({}); - const [allowSubmit, setAllowSubmit] = React.useState(false); // Error msg dialog const [errorOpen, setErrorOpen] = useState(false); const [showForbiddenPage, setShowForbiddenPage] = useState(false); @@ -136,11 +136,11 @@ export const Dataset = (): JSX.Element => { (state: RootState) => state.folder.folders ); - const metadataDefinitionList = useSelector( - (state: RootState) => state.metadata.metadataDefinitionList - ); - // component did mount list all files in dataset + useEffect(() => { + getMetadatDefinitions(null, 0, 100); + }, []); + useEffect(() => { listFilesInDataset(datasetId, folderId, skip, limit); listFoldersInDataset(datasetId, folderId, skip, limit); @@ -149,8 +149,11 @@ export const Dataset = (): JSX.Element => { }, [searchParams]); useEffect(() => { - getMetadatDefinitions(null, 0, 100); - }, []); + listFilesInDataset(datasetId, folderId, skip, limit); + listFoldersInDataset(datasetId, folderId, skip, limit); + listDatasetAbout(datasetId); + getFolderPath(folderId); + }, [adminMode]); useEffect(() => { // disable flipping if reaches the last page diff --git a/frontend/src/components/files/File.tsx b/frontend/src/components/files/File.tsx index cf99861ee..3ac438382 100644 --- a/frontend/src/components/files/File.tsx +++ b/frontend/src/components/files/File.tsx @@ -98,6 +98,7 @@ export const File = (): JSX.Element => { const storageType = useSelector( (state: RootState) => state.file.fileSummary.storage_type ); + const adminMode = useSelector((state: RootState) => state.user.adminMode); const [selectedTabIndex, setSelectedTabIndex] = useState(0); const [previews, setPreviews] = useState([]); @@ -129,6 +130,20 @@ export const File = (): JSX.Element => { } }, []); + // component did mount + useEffect(() => { + // load file information + listFileSummary(fileId); + listFileVersions(fileId); + // FIXME replace checks for null with logic to load this info from redux instead of the page parameters + if (datasetId != "null" && datasetId != "undefined") { + listDatasetAbout(datasetId); // get dataset name + } + if (folderId != "null" && folderId != "undefined") { + getFolderPath(folderId); // get folder path + } + }, [adminMode]); + // for breadcrumb useEffect(() => { const tmpPaths = [ diff --git a/frontend/src/components/groups/Group.tsx b/frontend/src/components/groups/Group.tsx index 9d80dcf2c..ad46c6027 100644 --- a/frontend/src/components/groups/Group.tsx +++ b/frontend/src/components/groups/Group.tsx @@ -30,12 +30,12 @@ export function Group() { dispatch(fetchGroupRole(groupId)); const groupAbout = useSelector((state: RootState) => state.group.about); - const role = useSelector((state: RootState) => state.group.role); - const groupCreatorEmail = useSelector( (state: RootState) => state.group.about.creator ); + const adminMode = useSelector((state: RootState) => state.user.adminMode); + const groupCreatorEmailLink = `mailto:${groupCreatorEmail}`; const [addMemberModalOpen, setAddMemberModalOpen] = useState(false); const [deleteGroupConfirmOpen, setDeleteGroupConfirmOpen] = useState(false); @@ -46,6 +46,11 @@ export function Group() { fetchCurrentGroupRole(groupId); }, []); + useEffect(() => { + fetchGroupInfo(groupId); + fetchCurrentGroupRole(groupId); + }, [adminMode]); + // Error msg dialog const [errorOpen, setErrorOpen] = useState(false); diff --git a/frontend/src/components/search/Search.tsx b/frontend/src/components/search/Search.tsx index 91383354c..f24ce91d3 100644 --- a/frontend/src/components/search/Search.tsx +++ b/frontend/src/components/search/Search.tsx @@ -13,9 +13,12 @@ import { SearchResult } from "./SearchResult"; import { searchTheme, theme } from "../../theme"; import config from "../../app.config"; import Cookies from "universal-cookie"; +import { useSelector } from "react-redux"; +import { RootState } from "../../types/data"; export function Search() { const [luceneOn, setLuceneOn] = useState(false); + const cookies = new Cookies(); const [authorizationHeader, setAuthorizationHeader] = useState({ Authorization: cookies.get("Authorization"), @@ -34,6 +37,9 @@ export function Search() { return () => clearInterval(intervalId); }, []); + // toggle super admin + const adminMode = useSelector((state: RootState) => state.user.adminMode); + // @ts-ignore return ( From f6546ef13fbb39f9cbba482d91677797ecf86747 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 13 Dec 2023 13:23:03 -0600 Subject: [PATCH 42/43] fix two bugs --- backend/app/routers/authorization.py | 2 +- frontend/src/components/files/UploadFile.tsx | 2 +- frontend/src/openapi/v2/services/AuthorizationService.ts | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/backend/app/routers/authorization.py b/backend/app/routers/authorization.py index e21d7dc0d..ef9ddf930 100644 --- a/backend/app/routers/authorization.py +++ b/backend/app/routers/authorization.py @@ -130,7 +130,7 @@ async def get_dataset_role_owner( return {"dataset_id": dataset_id, "allow": allow} -@router.get("/files/{file_id}/role}", response_model=RoleType) +@router.get("/files/{file_id}/role", response_model=RoleType) async def get_file_role( file_id: str, current_user=Depends(get_current_username), diff --git a/frontend/src/components/files/UploadFile.tsx b/frontend/src/components/files/UploadFile.tsx index 44caf288a..bb7ace434 100644 --- a/frontend/src/components/files/UploadFile.tsx +++ b/frontend/src/components/files/UploadFile.tsx @@ -58,7 +58,7 @@ export const UploadFile: React.FC = ( selectedFile: File ) => dispatch( - createFileAction(selectedDatasetId, selectedFile, selectedFolderId) + createFileAction(selectedDatasetId, selectedFolderId, selectedFile) ); const newFile = useSelector((state: RootState) => state.dataset.newFile); const metadataDefinitionList = useSelector( diff --git a/frontend/src/openapi/v2/services/AuthorizationService.ts b/frontend/src/openapi/v2/services/AuthorizationService.ts index 3a4fb08da..d38af4eb7 100644 --- a/frontend/src/openapi/v2/services/AuthorizationService.ts +++ b/frontend/src/openapi/v2/services/AuthorizationService.ts @@ -107,7 +107,7 @@ export class AuthorizationService { ): CancelablePromise { return __request({ method: 'GET', - path: `/api/v2/authorizations/files/${fileId}/role}`, + path: `/api/v2/authorizations/files/${fileId}/role`, query: { 'dataset_id': datasetId, }, From 2b2a238f4a330c842fc16bd5f1c42fff77b65589 Mon Sep 17 00:00:00 2001 From: Chen Wang Date: Wed, 13 Dec 2023 14:08:50 -0600 Subject: [PATCH 43/43] fix bug on error --- frontend/src/routes.tsx | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/frontend/src/routes.tsx b/frontend/src/routes.tsx index 2cdd433ef..17922d031 100644 --- a/frontend/src/routes.tsx +++ b/frontend/src/routes.tsx @@ -21,7 +21,12 @@ import { Search } from "./components/search/Search"; import { isAuthorized } from "./utils/common"; import { useDispatch, useSelector } from "react-redux"; import { RootState } from "./types/data"; -import { refreshToken, resetLogout } from "./actions/common"; +import { + refreshToken, + resetFailedReason, + resetFailedReasonInline, + resetLogout, +} from "./actions/common"; import { Explore } from "./components/Explore"; import { ExtractionHistory } from "./components/listeners/ExtractionHistory"; import { fetchDatasetRole, fetchFileRole } from "./actions/authorization"; @@ -73,11 +78,17 @@ const PrivateRoute = (props): JSX.Element => { } }, [loggedOut]); - // not found or unauthorized + // not found or unauthorized redirect useEffect(() => { if (reason == "Forbidden") { + // if redirect to new page, reset error so the error modal/message doesn't stuck in "Forbidden" state + dispatch(resetFailedReason()); + dispatch(resetFailedReasonInline()); history("/forbidden"); } else if (reason == "Not Found") { + // if redirect to new page, reset error so the error modal/message doesn't stuck in "Forbidden" state + dispatch(resetFailedReason()); + dispatch(resetFailedReasonInline()); history("/not-found"); } }, [reason]);