From 9854962f7e0292505aa57e8aec0193ca207d4980 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Fri, 26 Sep 2025 00:20:08 -0700 Subject: [PATCH 01/20] implementation --- .../workflow/WorkflowExecutionsResource.scala | 158 ++++++++- .../result-exportation.component.html | 248 +++++++------- .../result-exportation.component.ts | 35 +- .../workflow-result-export.service.ts | 307 ++++++++++++++++-- 4 files changed, 593 insertions(+), 155 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index c3b63a2df81..4ee80cbc690 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -28,10 +28,11 @@ import edu.uci.ics.amber.engine.architecture.logreplay.{ReplayDestination, Repla import edu.uci.ics.amber.engine.common.Utils.{maptoStatusCode, stringToAggregatedState} import edu.uci.ics.amber.engine.common.storage.SequentialRecordStorage import edu.uci.ics.amber.util.serde.GlobalPortIdentitySerde.SerdeOps +import edu.uci.ics.amber.util.JSONUtils.objectMapper import edu.uci.ics.texera.dao.SqlServer import edu.uci.ics.texera.dao.jooq.generated.Tables._ import edu.uci.ics.texera.dao.jooq.generated.tables.daos.WorkflowExecutionsDao -import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowExecutions +import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.{User => UserPojo, WorkflowExecutions} import edu.uci.ics.texera.auth.SessionUser import edu.uci.ics.texera.config.UserSystemConfig import edu.uci.ics.texera.dao.SqlServer.withTransaction @@ -102,6 +103,140 @@ object WorkflowExecutionsResource { } } + private case class RestrictedDataset(ownerEmail: String, datasetName: String) { + def cacheKey: (String, String) = (ownerEmail.toLowerCase, datasetName.toLowerCase) + def label: String = s"$datasetName ($ownerEmail)" + } + + private def parseDatasetPath(path: String): Option[RestrictedDataset] = { + if (path == null) { + return None + } + val trimmed = path.trim + if (!trimmed.startsWith("/")) { + return None + } + val segments = trimmed.split("/").filter(_.nonEmpty) + if (segments.length < 4) { + return None + } + val ownerEmail = segments(0) + val datasetName = segments(1) + Some(RestrictedDataset(ownerEmail, datasetName)) + } + + private def lookupDatasetDownloadable( + dataset: RestrictedDataset, + cache: mutable.Map[(String, String), Option[Boolean]] + ): Option[Boolean] = { + cache.getOrElseUpdate( + dataset.cacheKey, { + val record = context + .select(DATASET.IS_DOWNLOADABLE) + .from(DATASET) + .join(USER) + .on(DATASET.OWNER_UID.eq(USER.UID)) + .where( + USER.EMAIL + .equalIgnoreCase(dataset.ownerEmail) + .and(DATASET.NAME.equalIgnoreCase(dataset.datasetName)) + ) + .fetchOne() + if (record == null) { + None + } else { + Option(record.value1()) + } + } + ) + } + + private def computeDatasetRestrictionMap( + wid: Int, + currentUser: UserPojo + ): Map[String, Set[RestrictedDataset]] = { + val workflowRecord = context + .select(WORKFLOW.CONTENT) + .from(WORKFLOW) + .where(WORKFLOW.WID.eq(wid)) + .fetchOne() + + if (workflowRecord == null) { + return Map.empty + } + + val content = workflowRecord.value1() + if (content == null || content.isEmpty) { + return Map.empty + } + + val rootNode = + try { + objectMapper.readTree(content) + } catch { + case _: Exception => return Map.empty + } + + val operatorsNode = rootNode.path("operators") + val linksNode = rootNode.path("links") + + val datasetStatusCache = mutable.Map.empty[(String, String), Option[Boolean]] + val restrictedSourceMap = mutable.Map.empty[String, RestrictedDataset] + val adjacency = mutable.Map.empty[String, mutable.ListBuffer[String]] + + operatorsNode.elements().asScala.foreach { operatorNode => + val operatorId = operatorNode.path("operatorID").asText("") + if (operatorId.nonEmpty) { + val fileNameNode = operatorNode.path("operatorProperties").path("fileName") + if (fileNameNode.isTextual) { + parseDatasetPath(fileNameNode.asText()).foreach { dataset => + val isOwner = + Option(currentUser.getEmail) + .exists(_.equalsIgnoreCase(dataset.ownerEmail)) + if (!isOwner) { + lookupDatasetDownloadable(dataset, datasetStatusCache) match { + case Some(value) if !value => + restrictedSourceMap.update(operatorId, dataset) + case _ => + } + } + } + } + } + } + + linksNode.elements().asScala.foreach { linkNode => + val sourceId = linkNode.path("source").path("operatorID").asText("") + val targetId = linkNode.path("target").path("operatorID").asText("") + if (sourceId.nonEmpty && targetId.nonEmpty) { + val targets = adjacency.getOrElseUpdate(sourceId, mutable.ListBuffer.empty[String]) + targets += targetId + } + } + + val restrictionMap = mutable.Map.empty[String, Set[RestrictedDataset]] + val queue = mutable.Queue.empty[(String, Set[RestrictedDataset])] + + restrictedSourceMap.foreach { + case (operatorId, dataset) => + queue.enqueue(operatorId -> Set(dataset)) + } + + while (queue.nonEmpty) { + val (currentOperatorId, datasetSet) = queue.dequeue() + val existing = restrictionMap.getOrElse(currentOperatorId, Set.empty) + val merged = existing ++ datasetSet + if (merged != existing) { + restrictionMap.update(currentOperatorId, merged) + adjacency + .get(currentOperatorId) + .foreach(_.foreach(nextOperator => queue.enqueue(nextOperator -> merged))) + } + } + + restrictionMap.toMap + } + def insertOperatorPortResultUri( eid: ExecutionIdentity, globalPortId: GlobalPortIdentity, @@ -695,12 +830,29 @@ class WorkflowExecutionsResource { @Auth user: SessionUser ): Response = { - if (request.operators.size <= 0) - Response + if (request.operators.isEmpty) { + return Response .status(Response.Status.BAD_REQUEST) .`type`(MediaType.APPLICATION_JSON) .entity(Map("error" -> "No operator selected").asJava) .build() + } + val datasetRestrictions = computeDatasetRestrictionMap(request.workflowId, user.user) + val restrictedOperators = request.operators.filter(op => datasetRestrictions.contains(op.id)) + if (restrictedOperators.nonEmpty) { + val errorMessage = restrictedOperators + .map { op => + val datasets = datasetRestrictions(op.id).map(_.label).toList.sorted + s"Operator ${op.id} cannot be exported because it depends on dataset(s): ${datasets.mkString(", ")}" + } + .mkString("; ") + + return Response + .status(Response.Status.FORBIDDEN) + .`type`(MediaType.APPLICATION_JSON) + .entity(Map("error" -> errorMessage).asJava) + .build() + } try { request.destination match { diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.html b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.html index 756e5f54112..344fd4ec89d 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.html +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.html @@ -18,126 +18,144 @@ -->
-
-
- - - - Export Type - - - - - - - - - - - - - - - Filename - - - - - - - - - - Destination - - - - - - - - - -
-
- - - -
-
{{dataset.dataset.did?.toString()}}
+ *ngIf="hasPartialRestriction && blockingDatasetLabels.length > 0"> + +
+ +
+
+ + + + Export Type + + + + + + + + + + + + + + + Filename + + + + + + + + + + Destination + + + + + + + + + +
+
+
+ + + +
+
{{dataset.dataset.did?.toString()}}
- {{ dataset.dataset.name }} + {{ dataset.dataset.name }} - -
-
-
- + +
+
+
+ + +
-
- + + +
+ +
+
diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts index c053b1545f4..a501bc5247f 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts @@ -52,6 +52,11 @@ export class ResultExportationComponent implements OnInit { containsBinaryData: boolean = false; inputDatasetName = ""; selectedComputingUnit: DashboardWorkflowComputingUnit | null = null; + exportableOperatorIds: string[] = []; + blockedOperatorIds: string[] = []; + isExportRestricted: boolean = false; + hasPartialRestriction: boolean = false; + blockingDatasetLabels: string[] = []; userAccessibleDatasets: DashboardDataset[] = []; filteredUserAccessibleDatasets: DashboardDataset[] = []; @@ -74,7 +79,11 @@ export class ResultExportationComponent implements OnInit { this.userAccessibleDatasets = datasets.filter(dataset => dataset.accessPrivilege === "WRITE"); this.filteredUserAccessibleDatasets = [...this.userAccessibleDatasets]; }); - this.updateOutputType(); + + this.workflowResultExportService + .refreshDatasetMetadata() + .pipe(untilDestroyed(this)) + .subscribe(() => this.updateOutputType()); this.computingUnitStatusService .getSelectedComputingUnit() @@ -98,6 +107,12 @@ export class ResultExportationComponent implements OnInit { operatorIds = this.workflowActionService.getJointGraphWrapper().getCurrentHighlightedOperatorIDs(); } + this.exportableOperatorIds = this.workflowResultExportService.getExportableOperatorIds(operatorIds); + this.blockedOperatorIds = this.workflowResultExportService.getBlockedOperatorIds(operatorIds); + this.blockingDatasetLabels = this.workflowResultExportService.getBlockingDatasets(operatorIds); + this.isExportRestricted = this.exportableOperatorIds.length === 0 && operatorIds.length > 0; + this.hasPartialRestriction = this.exportableOperatorIds.length > 0 && this.blockedOperatorIds.length > 0; + if (operatorIds.length === 0) { // No operators highlighted this.isTableOutput = false; @@ -106,13 +121,22 @@ export class ResultExportationComponent implements OnInit { return; } + if (this.isExportRestricted) { + this.isTableOutput = false; + this.isVisualizationOutput = false; + this.containsBinaryData = false; + return; + } + + const idsToEvaluate = this.exportableOperatorIds; + // Assume they're all table or visualization // until we find an operator that isn't let allTable = true; let allVisualization = true; let anyBinaryData = false; - for (const operatorId of operatorIds) { + for (const operatorId of idsToEvaluate) { const outputTypes = this.workflowResultService.determineOutputTypes(operatorId); if (!outputTypes.hasAnyResult) { continue; @@ -146,6 +170,9 @@ export class ResultExportationComponent implements OnInit { } onClickExportResult(destination: "dataset" | "local", dataset: DashboardDataset = {} as DashboardDataset) { + if (this.isExportRestricted) { + return; + } const datasetIds = destination === "dataset" ? [dataset.dataset.did].filter((id): id is number => id !== undefined) : []; this.workflowResultExportService.exportWorkflowExecutionResult( @@ -181,4 +208,8 @@ export class ResultExportationComponent implements OnInit { } }); } + + get blockingDatasetSummary(): string { + return this.blockingDatasetLabels.join(", "); + } } diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts index d2a4b5ae4f2..3cf65533058 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts @@ -26,13 +26,15 @@ import { PaginatedResultEvent, ResultExportResponse } from "../../types/workflow import { NotificationService } from "../../../common/service/notification/notification.service"; import { ExecuteWorkflowService } from "../execute-workflow/execute-workflow.service"; import { ExecutionState, isNotInExecution } from "../../types/execute-workflow.interface"; -import { filter } from "rxjs/operators"; +import { catchError, filter, map, take, tap } from "rxjs/operators"; import { OperatorResultService, WorkflowResultService } from "../workflow-result/workflow-result.service"; import { DownloadService } from "../../../dashboard/service/user/download/download.service"; import { HttpResponse } from "@angular/common/http"; import { ExportWorkflowJsonResponse } from "../../../dashboard/service/user/download/download.service"; import { DashboardWorkflowComputingUnit } from "../../types/workflow-computing-unit"; import { GuiConfigService } from "../../../common/service/gui-config.service"; +import { DatasetService } from "../../../dashboard/service/user/dataset/dataset.service"; +import { parseFilePathToDatasetFile } from "../../../common/type/dataset-file"; @Injectable({ providedIn: "root", @@ -40,6 +42,10 @@ import { GuiConfigService } from "../../../common/service/gui-config.service"; export class WorkflowResultExportService { hasResultToExportOnHighlightedOperators: boolean = false; hasResultToExportOnAllOperators = new BehaviorSubject(false); + private datasetDownloadableMap = new Map(); + private datasetLabelMap = new Map(); + private restrictedOperatorMap = new Map>(); + private datasetListLoaded = false; constructor( private workflowWebsocketService: WorkflowWebsocketService, private workflowActionService: WorkflowActionService, @@ -47,9 +53,12 @@ export class WorkflowResultExportService { private executeWorkflowService: ExecuteWorkflowService, private workflowResultService: WorkflowResultService, private downloadService: DownloadService, + private datasetService: DatasetService, private config: GuiConfigService ) { this.registerResultToExportUpdateHandler(); + this.registerRestrictionRecomputeTriggers(); + this.refreshDatasetMetadata().subscribe(); } registerResultToExportUpdateHandler() { @@ -60,34 +69,217 @@ export class WorkflowResultExportService { this.workflowActionService.getJointGraphWrapper().getJointOperatorHighlightStream(), this.workflowActionService.getJointGraphWrapper().getJointOperatorUnhighlightStream() ).subscribe(() => { - // check if there are any results to export on highlighted operators (either paginated or snapshot) - this.hasResultToExportOnHighlightedOperators = - isNotInExecution(this.executeWorkflowService.getExecutionState().state) && - this.workflowActionService - .getJointGraphWrapper() - .getCurrentHighlightedOperatorIDs() - .filter( - operatorId => - this.workflowResultService.hasAnyResult(operatorId) || - this.workflowResultService.getResultService(operatorId)?.getCurrentResultSnapshot() !== undefined - ).length > 0; - - // check if there are any results to export on all operators (either paginated or snapshot) - let staticHasResultToExportOnAllOperators = - isNotInExecution(this.executeWorkflowService.getExecutionState().state) && - this.workflowActionService - .getTexeraGraph() - .getAllOperators() - .map(operator => operator.operatorID) - .filter( - operatorId => - this.workflowResultService.hasAnyResult(operatorId) || - this.workflowResultService.getResultService(operatorId)?.getCurrentResultSnapshot() !== undefined - ).length > 0; - - // Notify subscribers of changes - this.hasResultToExportOnAllOperators.next(staticHasResultToExportOnAllOperators); + this.updateExportAvailabilityFlags(); + }); + } + + private registerRestrictionRecomputeTriggers(): void { + const texeraGraph = this.workflowActionService.getTexeraGraph(); + merge( + texeraGraph.getOperatorAddStream(), + texeraGraph.getOperatorDeleteStream(), + texeraGraph.getOperatorPropertyChangeStream(), + texeraGraph.getLinkAddStream(), + texeraGraph.getLinkDeleteStream(), + texeraGraph.getDisabledOperatorsChangedStream() + ).subscribe(() => { + this.runRestrictionAnalysis(); + }); + } + + public refreshDatasetMetadata(): Observable { + this.datasetListLoaded = false; + return this.datasetService.retrieveAccessibleDatasets().pipe( + take(1), + tap(datasets => { + this.datasetDownloadableMap.clear(); + this.datasetLabelMap.clear(); + datasets.forEach(dataset => { + const key = this.buildDatasetKey(dataset.ownerEmail, dataset.dataset.name); + const isDownloadable = dataset.dataset.isDownloadable || dataset.isOwner; + this.datasetDownloadableMap.set(key, isDownloadable); + this.datasetLabelMap.set(key, `${dataset.dataset.name} (${dataset.ownerEmail})`); + }); + this.datasetListLoaded = true; + this.runRestrictionAnalysis(); + }), + map(() => undefined), + catchError(() => { + this.datasetDownloadableMap.clear(); + this.datasetLabelMap.clear(); + this.datasetListLoaded = true; + this.runRestrictionAnalysis(); + return of(undefined); + }) + ); + } + + private buildDatasetKey(ownerEmail: string, datasetName: string): string { + return `${ownerEmail.toLowerCase()}::${datasetName.toLowerCase()}`; + } + + private extractDatasetInfo(fileName: unknown): { key: string; label: string } | null { + if (typeof fileName !== "string") { + return null; + } + const trimmed = fileName.trim(); + if (!trimmed.startsWith("/")) { + return null; + } + try { + const { ownerEmail, datasetName } = parseFilePathToDatasetFile(trimmed); + if (!ownerEmail || !datasetName) { + return null; + } + const key = this.buildDatasetKey(ownerEmail, datasetName); + if (!this.datasetDownloadableMap.has(key)) { + return null; + } + const label = this.datasetLabelMap.get(key) ?? `${datasetName} (${ownerEmail})`; + return { key, label }; + } catch { + return null; + } + } + + private runRestrictionAnalysis(): void { + if (!this.datasetListLoaded) { + this.restrictedOperatorMap.clear(); + this.updateExportAvailabilityFlags(); + return; + } + + const texeraGraph = this.workflowActionService.getTexeraGraph(); + const allOperators = texeraGraph.getAllOperators(); + const operatorById = new Map(allOperators.map(op => [op.operatorID, op] as const)); + const enabledOperators = allOperators.filter(operator => !operator.isDisabled); + const datasetSources: Array<{ operatorId: string; label: string }> = []; + + enabledOperators.forEach(operator => { + const datasetInfo = this.extractDatasetInfo(operator.operatorProperties?.fileName); + if (!datasetInfo) { + return; + } + const isDownloadable = this.datasetDownloadableMap.get(datasetInfo.key); + if (isDownloadable === false) { + datasetSources.push({ operatorId: operator.operatorID, label: datasetInfo.label }); + } + }); + + const restrictions = new Map>(); + + if (datasetSources.length === 0) { + this.restrictedOperatorMap = restrictions; + this.updateExportAvailabilityFlags(); + return; + } + + const adjacency = new Map(); + texeraGraph.getAllLinks().forEach(link => { + const sourceId = link.source.operatorID; + const targetId = link.target.operatorID; + const sourceOperator = operatorById.get(sourceId); + const targetOperator = operatorById.get(targetId); + if (!sourceOperator || !targetOperator) { + return; + } + if (sourceOperator.isDisabled || targetOperator.isDisabled) { + return; + } + const neighbors = adjacency.get(sourceId); + if (neighbors) { + neighbors.push(targetId); + } else { + adjacency.set(sourceId, [targetId]); + } + }); + + const queue: Array<{ operatorId: string; datasets: Set }> = []; + datasetSources.forEach(source => { + queue.push({ operatorId: source.operatorId, datasets: new Set([source.label]) }); + }); + + while (queue.length > 0) { + const current = queue.shift()!; + const existing = restrictions.get(current.operatorId) ?? new Set(); + let updated = false; + current.datasets.forEach(label => { + if (!existing.has(label)) { + existing.add(label); + updated = true; + } + }); + if (updated || !restrictions.has(current.operatorId)) { + restrictions.set(current.operatorId, existing); + const neighbors = adjacency.get(current.operatorId) ?? []; + neighbors.forEach(nextOperatorId => { + queue.push({ operatorId: nextOperatorId, datasets: new Set(existing) }); + }); + } + } + + this.restrictedOperatorMap = restrictions; + this.updateExportAvailabilityFlags(); + } + + private updateExportAvailabilityFlags(): void { + const executionIdle = isNotInExecution(this.executeWorkflowService.getExecutionState().state); + + const highlightedOperators = this.workflowActionService.getJointGraphWrapper().getCurrentHighlightedOperatorIDs(); + + const highlightedHasResult = highlightedOperators.some( + operatorId => + this.workflowResultService.hasAnyResult(operatorId) || + this.workflowResultService.getResultService(operatorId)?.getCurrentResultSnapshot() !== undefined + ); + + this.hasResultToExportOnHighlightedOperators = executionIdle && highlightedHasResult; + + const allOperatorIds = this.workflowActionService + .getTexeraGraph() + .getAllOperators() + .map(operator => operator.operatorID); + + const hasAnyResult = + executionIdle && + allOperatorIds.some( + operatorId => + this.workflowResultService.hasAnyResult(operatorId) || + this.workflowResultService.getResultService(operatorId)?.getCurrentResultSnapshot() !== undefined + ); + + this.hasResultToExportOnAllOperators.next(hasAnyResult); + } + + private isOperatorEligibleForExport(operatorId: string): boolean { + if (this.restrictedOperatorMap.has(operatorId)) { + return false; + } + return ( + this.workflowResultService.hasAnyResult(operatorId) || + this.workflowResultService.getResultService(operatorId)?.getCurrentResultSnapshot() !== undefined + ); + } + + public getExportableOperatorIds(operatorIds: readonly string[]): string[] { + return operatorIds.filter(operatorId => !this.restrictedOperatorMap.has(operatorId)); + } + + public getBlockedOperatorIds(operatorIds: readonly string[]): string[] { + return operatorIds.filter(operatorId => this.restrictedOperatorMap.has(operatorId)); + } + + public hasBlockedOperators(operatorIds: readonly string[]): boolean { + return operatorIds.some(operatorId => this.restrictedOperatorMap.has(operatorId)); + } + + public getBlockingDatasets(operatorIds: readonly string[]): string[] { + const labels = new Set(); + operatorIds.forEach(operatorId => { + const datasets = this.restrictedOperatorMap.get(operatorId); + datasets?.forEach(label => labels.add(label)); }); + return Array.from(labels); } /** @@ -105,6 +297,34 @@ export class WorkflowResultExportService { // which means export button is selected from context-menu destination: "dataset" | "local" = "dataset", // default to dataset unit: DashboardWorkflowComputingUnit | null // computing unit for cluster setting + ): void { + this.refreshDatasetMetadata() + .pipe(take(1)) + .subscribe(() => + this.performExport( + exportType, + workflowName, + datasetIds, + rowIndex, + columnIndex, + filename, + exportAll, + destination, + unit + ) + ); + } + + private performExport( + exportType: string, + workflowName: string, + datasetIds: number[], + rowIndex: number, + columnIndex: number, + filename: string, + exportAll: boolean, + destination: "dataset" | "local", + unit: DashboardWorkflowComputingUnit | null ): void { if (!this.config.env.exportExecutionResultEnabled) { return; @@ -128,17 +348,34 @@ export class WorkflowResultExportService { .map(operator => operator.operatorID) : [...this.workflowActionService.getJointGraphWrapper().getCurrentHighlightedOperatorIDs()]; - const operatorArray = operatorIds.map(operatorId => { - return { - id: operatorId, - outputType: this.workflowResultService.determineOutputExtension(operatorId, exportType), - }; - }); - if (operatorIds.length === 0) { return; } + const exportableOperatorIds = this.getExportableOperatorIds(operatorIds); + + if (exportableOperatorIds.length === 0) { + const datasets = this.getBlockingDatasets(operatorIds); + const suffix = datasets.length > 0 ? `: ${datasets.join(", ")}` : ""; + this.notificationService.error( + `Cannot export result: selection depends on dataset(s) that are not downloadable${suffix}` + ); + return; + } + + if (exportableOperatorIds.length < operatorIds.length) { + const datasets = this.getBlockingDatasets(operatorIds); + const suffix = datasets.length > 0 ? ` (${datasets.join(", ")})` : ""; + this.notificationService.warning( + `Some operators were skipped because their results depend on dataset(s) that are not downloadable${suffix}` + ); + } + + const operatorArray = exportableOperatorIds.map(operatorId => ({ + id: operatorId, + outputType: this.workflowResultService.determineOutputExtension(operatorId, exportType), + })); + // show loading this.notificationService.loading("Exporting..."); From 69dcee08d60e8eb8eba272af925759f1e51023c0 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Fri, 26 Sep 2025 00:54:08 -0700 Subject: [PATCH 02/20] remove --- .../workflow-result-export.service.ts | 10 ---------- 1 file changed, 10 deletions(-) diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts index 3cf65533058..5109fc37a42 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts @@ -251,16 +251,6 @@ export class WorkflowResultExportService { this.hasResultToExportOnAllOperators.next(hasAnyResult); } - private isOperatorEligibleForExport(operatorId: string): boolean { - if (this.restrictedOperatorMap.has(operatorId)) { - return false; - } - return ( - this.workflowResultService.hasAnyResult(operatorId) || - this.workflowResultService.getResultService(operatorId)?.getCurrentResultSnapshot() !== undefined - ); - } - public getExportableOperatorIds(operatorIds: readonly string[]): string[] { return operatorIds.filter(operatorId => !this.restrictedOperatorMap.has(operatorId)); } From 99bf2a210a37749676ff276e74c189eaf99d7ba0 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Fri, 26 Sep 2025 10:20:24 -0700 Subject: [PATCH 03/20] fix test --- .../workflow-result-export.service.spec.ts | 25 ++++++++++++++++++- 1 file changed, 24 insertions(+), 1 deletion(-) diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.spec.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.spec.ts index 7e45c34de2a..98706d26aa2 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.spec.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.spec.ts @@ -35,6 +35,7 @@ import { PaginatedResultEvent } from "../../types/workflow-websocket.interface"; import { ExecutionState } from "../../types/execute-workflow.interface"; import * as JSZip from "jszip"; import { DownloadService } from "src/app/dashboard/service/user/download/download.service"; +import { DatasetService } from "../../../dashboard/service/user/dataset/dataset.service"; import { commonTestProviders } from "../../../common/testing/test-utils"; describe("WorkflowResultExportService", () => { @@ -45,6 +46,7 @@ describe("WorkflowResultExportService", () => { let executeWorkflowServiceSpy: jasmine.SpyObj; let workflowResultServiceSpy: jasmine.SpyObj; let downloadServiceSpy: jasmine.SpyObj; + let datasetServiceSpy: jasmine.SpyObj; let jointGraphWrapperSpy: jasmine.SpyObj; let texeraGraphSpy: jasmine.SpyObj; @@ -60,8 +62,24 @@ describe("WorkflowResultExportService", () => { jointGraphWrapperSpy.getJointOperatorHighlightStream.and.returnValue(of()); jointGraphWrapperSpy.getJointOperatorUnhighlightStream.and.returnValue(of()); - texeraGraphSpy = jasmine.createSpyObj("TexeraGraph", ["getAllOperators"]); + texeraGraphSpy = jasmine.createSpyObj("TexeraGraph", [ + "getAllOperators", + "getOperatorAddStream", + "getOperatorDeleteStream", + "getOperatorPropertyChangeStream", + "getLinkAddStream", + "getLinkDeleteStream", + "getDisabledOperatorsChangedStream", + "getAllLinks" + ]); texeraGraphSpy.getAllOperators.and.returnValue([]); + texeraGraphSpy.getOperatorAddStream.and.returnValue(of()); + texeraGraphSpy.getOperatorDeleteStream.and.returnValue(of()); + texeraGraphSpy.getOperatorPropertyChangeStream.and.returnValue(of()); + texeraGraphSpy.getLinkAddStream.and.returnValue(of()); + texeraGraphSpy.getLinkDeleteStream.and.returnValue(of()); + texeraGraphSpy.getDisabledOperatorsChangedStream.and.returnValue(of()); + texeraGraphSpy.getAllLinks.and.returnValue([]); const wsSpy = jasmine.createSpyObj("WorkflowWebsocketService", ["subscribeToEvent", "send"]); wsSpy.subscribeToEvent.and.returnValue(of()); // Return an empty observable @@ -87,6 +105,9 @@ describe("WorkflowResultExportService", () => { const downloadSpy = jasmine.createSpyObj("DownloadService", ["downloadOperatorsResult"]); downloadSpy.downloadOperatorsResult.and.returnValue(of(new Blob())); + const datasetSpy = jasmine.createSpyObj("DatasetService", ["retrieveAccessibleDatasets"]); + datasetSpy.retrieveAccessibleDatasets.and.returnValue(of([])); + TestBed.configureTestingModule({ imports: [HttpClientTestingModule], providers: [ @@ -97,6 +118,7 @@ describe("WorkflowResultExportService", () => { { provide: ExecuteWorkflowService, useValue: ewSpy }, { provide: WorkflowResultService, useValue: wrSpy }, { provide: DownloadService, useValue: downloadSpy }, + { provide: DatasetService, useValue: datasetSpy }, ...commonTestProviders, ], }); @@ -109,6 +131,7 @@ describe("WorkflowResultExportService", () => { executeWorkflowServiceSpy = TestBed.inject(ExecuteWorkflowService) as jasmine.SpyObj; workflowResultServiceSpy = TestBed.inject(WorkflowResultService) as jasmine.SpyObj; downloadServiceSpy = TestBed.inject(DownloadService) as jasmine.SpyObj; + datasetServiceSpy = TestBed.inject(DatasetService) as jasmine.SpyObj; }); it("should be created", () => { From 7284e3387056d7187f987fd72e51c967143f847d Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Fri, 26 Sep 2025 10:59:55 -0700 Subject: [PATCH 04/20] format --- .../workflow-result-export.service.spec.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.spec.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.spec.ts index 98706d26aa2..7dea1f097eb 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.spec.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.spec.ts @@ -70,7 +70,7 @@ describe("WorkflowResultExportService", () => { "getLinkAddStream", "getLinkDeleteStream", "getDisabledOperatorsChangedStream", - "getAllLinks" + "getAllLinks", ]); texeraGraphSpy.getAllOperators.and.returnValue([]); texeraGraphSpy.getOperatorAddStream.and.returnValue(of()); From 34219c374fd37a181a8e01ffda1894447c03dfd2 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Sat, 27 Sep 2025 18:26:21 -0700 Subject: [PATCH 05/20] add comment --- .../workflow/WorkflowExecutionsResource.scala | 35 ++++++ .../result-exportation.component.ts | 6 ++ .../workflow-result-export.service.ts | 100 +++++++++++++++++- 3 files changed, 140 insertions(+), 1 deletion(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 4ee80cbc690..27e147a3c98 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -103,11 +103,25 @@ object WorkflowExecutionsResource { } } + /** + * Represents a dataset that has access restrictions for export. + * Used to track which datasets are non-downloadable and owned by other users. + * + * @param ownerEmail The email of the dataset owner + * @param datasetName The name of the dataset + */ private case class RestrictedDataset(ownerEmail: String, datasetName: String) { def cacheKey: (String, String) = (ownerEmail.toLowerCase, datasetName.toLowerCase) def label: String = s"$datasetName ($ownerEmail)" } + /** + * Parses a file path to extract dataset information. + * Expected format: /ownerEmail/datasetName/... + * + * @param path The file path from operator properties + * @return Some(RestrictedDataset) if path is valid, None otherwise + */ private def parseDatasetPath(path: String): Option[RestrictedDataset] = { if (path == null) { return None @@ -125,6 +139,14 @@ object WorkflowExecutionsResource { Some(RestrictedDataset(ownerEmail, datasetName)) } + /** + * Checks if a dataset is downloadable by querying the database. + * Uses caching to avoid repeated database queries for the same dataset. + * + * @param dataset The dataset to check + * @param cache A cache to store lookup results + * @return Some(true) if downloadable, Some(false) if not, None if dataset doesn't exist + */ private def lookupDatasetDownloadable( dataset: RestrictedDataset, cache: mutable.Map[(String, String), Option[Boolean]] @@ -151,6 +173,19 @@ object WorkflowExecutionsResource { ) } + /** + * Computes which operators in a workflow are restricted due to dataset access controls. + * + * This function: + * 1. Parses the workflow JSON to find all operators and their dataset dependencies + * 2. Identifies operators using non-downloadable datasets that the user doesn't own + * 3. Uses BFS to propagate restrictions through the workflow graph + * 4. Returns a map of operator IDs to the restricted datasets they depend on + * + * @param wid The workflow ID + * @param currentUser The current user making the export request + * @return Map of operator ID -> Set of restricted datasets that block its export + */ private def computeDatasetRestrictionMap( wid: Int, currentUser: UserPojo diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts index a501bc5247f..fef9fe1386d 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts @@ -209,6 +209,12 @@ export class ResultExportationComponent implements OnInit { }); } + /** + * Getter that returns a comma-separated string of blocking dataset labels. + * Used in the template to display which datasets are preventing export. + * + * @returns String like "Dataset1 (user1@example.com), Dataset2 (user2@example.com)" + */ get blockingDatasetSummary(): string { return this.blockingDatasetLabels.join(", "); } diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts index 5109fc37a42..2511e9c268e 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts @@ -73,6 +73,10 @@ export class WorkflowResultExportService { }); } + /** + * Registers triggers to recompute dataset restrictions when the workflow graph changes. + * Monitors operator/link additions, deletions, property changes, and operator enable/disable events. + */ private registerRestrictionRecomputeTriggers(): void { const texeraGraph = this.workflowActionService.getTexeraGraph(); merge( @@ -87,6 +91,19 @@ export class WorkflowResultExportService { }); } + /** + * Refreshes dataset metadata from the server and rebuilds local caches. + * + * Fetches all accessible datasets and their permissions, then updates: + * - datasetDownloadableMap: tracks which datasets are downloadable + * - datasetLabelMap: stores human-readable dataset labels + * + * A dataset is considered downloadable if either: + * - The dataset's isDownloadable flag is true, OR + * - The current user is the dataset owner + * + * @returns Observable that completes when metadata is refreshed + */ public refreshDatasetMetadata(): Observable { this.datasetListLoaded = false; return this.datasetService.retrieveAccessibleDatasets().pipe( @@ -114,10 +131,27 @@ export class WorkflowResultExportService { ); } + /** + * Builds a normalized key for dataset lookup in caches. + * Converts both email and dataset name to lowercase for case-insensitive matching. + * + * @param ownerEmail The dataset owner's email + * @param datasetName The dataset name + * @returns Normalized key in format "email::dataset" + */ private buildDatasetKey(ownerEmail: string, datasetName: string): string { return `${ownerEmail.toLowerCase()}::${datasetName.toLowerCase()}`; } + /** + * Extracts dataset information from an operator's fileName property. + * + * Parses file paths in the expected format and validates that the dataset + * exists in our accessible datasets cache. + * + * @param fileName The fileName property from operator properties + * @returns Object with dataset key and label, or null if invalid/not found + */ private extractDatasetInfo(fileName: unknown): { key: string; label: string } | null { if (typeof fileName !== "string") { return null; @@ -142,6 +176,18 @@ export class WorkflowResultExportService { } } + /** + * Performs client-side restriction analysis to mirror backend validation. + * + * This function: + * 1. Identifies operators using non-downloadable datasets + * 2. Builds a workflow dependency graph from operator links + * 3. Uses BFS to propagate restrictions through the graph + * 4. Updates restrictedOperatorMap with results + * + * The analysis considers only enabled operators and ignores disabled ones. + * Restrictions flow downstream through operator dependencies. + */ private runRestrictionAnalysis(): void { if (!this.datasetListLoaded) { this.restrictedOperatorMap.clear(); @@ -155,6 +201,7 @@ export class WorkflowResultExportService { const enabledOperators = allOperators.filter(operator => !operator.isDisabled); const datasetSources: Array<{ operatorId: string; label: string }> = []; + // Identify source operators that use non-downloadable datasets enabledOperators.forEach(operator => { const datasetInfo = this.extractDatasetInfo(operator.operatorProperties?.fileName); if (!datasetInfo) { @@ -174,6 +221,7 @@ export class WorkflowResultExportService { return; } + // Build Workflow Dependency Graph const adjacency = new Map(); texeraGraph.getAllLinks().forEach(link => { const sourceId = link.source.operatorID; @@ -194,6 +242,7 @@ export class WorkflowResultExportService { } }); + // BFS const queue: Array<{ operatorId: string; datasets: Set }> = []; datasetSources.forEach(source => { queue.push({ operatorId: source.operatorId, datasets: new Set([source.label]) }); @@ -218,10 +267,20 @@ export class WorkflowResultExportService { } } + // Update State this.restrictedOperatorMap = restrictions; this.updateExportAvailabilityFlags(); } + /** + * Updates UI flags that control export button visibility and availability. + * + * Checks execution state and result availability to determine: + * - hasResultToExportOnHighlightedOperators: for context menu export button + * - hasResultToExportOnAllOperators: for top menu export button + * + * Export is only available when execution is idle and operators have results. + */ private updateExportAvailabilityFlags(): void { const executionIdle = isNotInExecution(this.executeWorkflowService.getExecutionState().state); @@ -251,18 +310,43 @@ export class WorkflowResultExportService { this.hasResultToExportOnAllOperators.next(hasAnyResult); } + /** + * Filters operator IDs to return only those that are not restricted by dataset access controls. + * + * @param operatorIds Array of operator IDs to filter + * @returns Array of operator IDs that can be exported + */ public getExportableOperatorIds(operatorIds: readonly string[]): string[] { return operatorIds.filter(operatorId => !this.restrictedOperatorMap.has(operatorId)); } + /** + * Filters operator IDs to return only those that are restricted by dataset access controls. + * + * @param operatorIds Array of operator IDs to filter + * @returns Array of operator IDs that are blocked from export + */ public getBlockedOperatorIds(operatorIds: readonly string[]): string[] { return operatorIds.filter(operatorId => this.restrictedOperatorMap.has(operatorId)); } + /** + * Checks if any of the provided operator IDs are blocked by dataset restrictions. + * + * @param operatorIds Array of operator IDs to check + * @returns True if any operators are blocked, false otherwise + */ public hasBlockedOperators(operatorIds: readonly string[]): boolean { return operatorIds.some(operatorId => this.restrictedOperatorMap.has(operatorId)); } + /** + * Gets the list of dataset labels that are blocking export for the given operators. + * Used to display user-friendly error messages about which datasets are causing restrictions. + * + * @param operatorIds Array of operator IDs to check + * @returns Array of dataset labels (e.g., "Dataset1 (user@example.com)") + */ public getBlockingDatasets(operatorIds: readonly string[]): string[] { const labels = new Set(); operatorIds.forEach(operatorId => { @@ -305,6 +389,18 @@ export class WorkflowResultExportService { ); } + /** + * Performs the actual export operation with restriction validation. + * + * This method handles the core export logic: + * 1. Validates configuration and computing unit availability + * 2. Determines operator scope (all vs highlighted) + * 3. Applies restriction filtering with user feedback + * 4. Makes the export API call + * 5. Handles response and shows appropriate notifications + * + * Shows error messages if all operators are blocked, warning messages if some are blocked. + */ private performExport( exportType: string, workflowName: string, @@ -316,6 +412,7 @@ export class WorkflowResultExportService { destination: "dataset" | "local", unit: DashboardWorkflowComputingUnit | null ): void { + // Validates configuration and computing unit availability if (!this.config.env.exportExecutionResultEnabled) { return; } @@ -330,7 +427,7 @@ export class WorkflowResultExportService { return; } - // gather operator IDs + // Determines operator scope const operatorIds = exportAll ? this.workflowActionService .getTexeraGraph() @@ -342,6 +439,7 @@ export class WorkflowResultExportService { return; } + // Applies restriction filtering with user feedback const exportableOperatorIds = this.getExportableOperatorIds(operatorIds); if (exportableOperatorIds.length === 0) { From 145121a7b41e8bafb8d150f9b463358e76d014cd Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Tue, 30 Sep 2025 14:54:18 -0700 Subject: [PATCH 06/20] add comment --- .../user/workflow/WorkflowExecutionsResource.scala | 10 ++++++++++ .../workflow-result-export.service.ts | 10 ---------- 2 files changed, 10 insertions(+), 10 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 27e147a3c98..82eb4b8b645 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -103,6 +103,7 @@ object WorkflowExecutionsResource { } } +// /** * Represents a dataset that has access restrictions for export. * Used to track which datasets are non-downloadable and owned by other users. @@ -190,6 +191,7 @@ object WorkflowExecutionsResource { wid: Int, currentUser: UserPojo ): Map[String, Set[RestrictedDataset]] = { + // Load workflow val workflowRecord = context .select(WORKFLOW.CONTENT) .from(WORKFLOW) @@ -215,6 +217,7 @@ object WorkflowExecutionsResource { val operatorsNode = rootNode.path("operators") val linksNode = rootNode.path("links") + // Find source operators val datasetStatusCache = mutable.Map.empty[(String, String), Option[Boolean]] val restrictedSourceMap = mutable.Map.empty[String, RestrictedDataset] val adjacency = mutable.Map.empty[String, mutable.ListBuffer[String]] @@ -240,6 +243,7 @@ object WorkflowExecutionsResource { } } + // Build dependency graph linksNode.elements().asScala.foreach { linkNode => val sourceId = linkNode.path("source").path("operatorID").asText("") val targetId = linkNode.path("target").path("operatorID").asText("") @@ -249,6 +253,7 @@ object WorkflowExecutionsResource { } } + // BFS to propagate restrictions val restrictionMap = mutable.Map.empty[String, Set[RestrictedDataset]] val queue = mutable.Queue.empty[(String, Set[RestrictedDataset])] @@ -271,6 +276,7 @@ object WorkflowExecutionsResource { restrictionMap.toMap } + // def insertOperatorPortResultUri( eid: ExecutionIdentity, @@ -872,8 +878,12 @@ class WorkflowExecutionsResource { .entity(Map("error" -> "No operator selected").asJava) .build() } + + // Get ALL restrictions in workflow val datasetRestrictions = computeDatasetRestrictionMap(request.workflowId, user.user) + // Filter to only user's selection val restrictedOperators = request.operators.filter(op => datasetRestrictions.contains(op.id)) + // Check if any selected operator is restricted if (restrictedOperators.nonEmpty) { val errorMessage = restrictedOperators .map { op => diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts index 2511e9c268e..59bc1a1b3a3 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts @@ -330,16 +330,6 @@ export class WorkflowResultExportService { return operatorIds.filter(operatorId => this.restrictedOperatorMap.has(operatorId)); } - /** - * Checks if any of the provided operator IDs are blocked by dataset restrictions. - * - * @param operatorIds Array of operator IDs to check - * @returns True if any operators are blocked, false otherwise - */ - public hasBlockedOperators(operatorIds: readonly string[]): boolean { - return operatorIds.some(operatorId => this.restrictedOperatorMap.has(operatorId)); - } - /** * Gets the list of dataset labels that are blocking export for the given operators. * Used to display user-friendly error messages about which datasets are causing restrictions. From 895f88b4e6fb06f5495b940e9f39860a5c70bd45 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Tue, 30 Sep 2025 14:57:10 -0700 Subject: [PATCH 07/20] add comment --- .../dashboard/user/workflow/WorkflowExecutionsResource.scala | 2 -- 1 file changed, 2 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 82eb4b8b645..ce0fb5f14f9 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -103,7 +103,6 @@ object WorkflowExecutionsResource { } } -// /** * Represents a dataset that has access restrictions for export. * Used to track which datasets are non-downloadable and owned by other users. @@ -276,7 +275,6 @@ object WorkflowExecutionsResource { restrictionMap.toMap } - // def insertOperatorPortResultUri( eid: ExecutionIdentity, From b926ccc41dab99b1bfbb372c69a3b73054070284 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Tue, 30 Sep 2025 18:37:01 -0700 Subject: [PATCH 08/20] rename funtions + remove dead code --- .../user/workflow/WorkflowExecutionsResource.scala | 6 +++--- .../result-exportation/result-exportation.component.ts | 3 --- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index ce0fb5f14f9..ecab446828a 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -186,7 +186,7 @@ object WorkflowExecutionsResource { * @param currentUser The current user making the export request * @return Map of operator ID -> Set of restricted datasets that block its export */ - private def computeDatasetRestrictionMap( + private def getNonDownloadableOperatorMap( wid: Int, currentUser: UserPojo ): Map[String, Set[RestrictedDataset]] = { @@ -877,8 +877,8 @@ class WorkflowExecutionsResource { .build() } - // Get ALL restrictions in workflow - val datasetRestrictions = computeDatasetRestrictionMap(request.workflowId, user.user) + // Get ALL non-downloadable in workflow + val datasetRestrictions = getNonDownloadableOperatorMap(request.workflowId, user.user) // Filter to only user's selection val restrictedOperators = request.operators.filter(op => datasetRestrictions.contains(op.id)) // Check if any selected operator is restricted diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts index fef9fe1386d..74b1a75dbfa 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts @@ -170,9 +170,6 @@ export class ResultExportationComponent implements OnInit { } onClickExportResult(destination: "dataset" | "local", dataset: DashboardDataset = {} as DashboardDataset) { - if (this.isExportRestricted) { - return; - } const datasetIds = destination === "dataset" ? [dataset.dataset.did].filter((id): id is number => id !== undefined) : []; this.workflowResultExportService.exportWorkflowExecutionResult( From 0e6c62493d5df580685f2d98356fcd7c2cf40abc Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Tue, 30 Sep 2025 18:45:40 -0700 Subject: [PATCH 09/20] fix --- .../dashboard/user/workflow/WorkflowExecutionsResource.scala | 4 ++-- .../result-exportation/result-exportation.component.html | 2 +- .../result-exportation/result-exportation.component.ts | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index ecab446828a..fca3848fd70 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -147,7 +147,7 @@ object WorkflowExecutionsResource { * @param cache A cache to store lookup results * @return Some(true) if downloadable, Some(false) if not, None if dataset doesn't exist */ - private def lookupDatasetDownloadable( + private def isDownloadableDataset( dataset: RestrictedDataset, cache: mutable.Map[(String, String), Option[Boolean]] ): Option[Boolean] = { @@ -231,7 +231,7 @@ object WorkflowExecutionsResource { Option(currentUser.getEmail) .exists(_.equalsIgnoreCase(dataset.ownerEmail)) if (!isOwner) { - lookupDatasetDownloadable(dataset, datasetStatusCache) match { + isDownloadableDataset(dataset, datasetStatusCache) match { case Some(value) if !value => restrictedSourceMap.update(operatorId, dataset) case _ => diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.html b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.html index 344fd4ec89d..1624a53a04d 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.html +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.html @@ -20,7 +20,7 @@
+ *ngIf="hasPartialNonDownloadable && blockingDatasetLabels.length > 0"> 0; - this.hasPartialRestriction = this.exportableOperatorIds.length > 0 && this.blockedOperatorIds.length > 0; + this.hasPartialNonDownloadable = this.exportableOperatorIds.length > 0 && this.blockedOperatorIds.length > 0; if (operatorIds.length === 0) { // No operators highlighted From d3b5dfacaa863bdd07190e4266364b44b5ffcc3d Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Tue, 30 Sep 2025 18:49:01 -0700 Subject: [PATCH 10/20] fix --- .../dashboard/user/workflow/WorkflowExecutionsResource.scala | 3 +-- .../result-exportation/result-exportation.component.ts | 4 +--- 2 files changed, 2 insertions(+), 5 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index fca3848fd70..51c9f73767d 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -869,13 +869,12 @@ class WorkflowExecutionsResource { @Auth user: SessionUser ): Response = { - if (request.operators.isEmpty) { + if (request.operators.size <= 0) return Response .status(Response.Status.BAD_REQUEST) .`type`(MediaType.APPLICATION_JSON) .entity(Map("error" -> "No operator selected").asJava) .build() - } // Get ALL non-downloadable in workflow val datasetRestrictions = getNonDownloadableOperatorMap(request.workflowId, user.user) diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts index 1c11b5aa59c..e5834462611 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts @@ -128,15 +128,13 @@ export class ResultExportationComponent implements OnInit { return; } - const idsToEvaluate = this.exportableOperatorIds; - // Assume they're all table or visualization // until we find an operator that isn't let allTable = true; let allVisualization = true; let anyBinaryData = false; - for (const operatorId of idsToEvaluate) { + for (const operatorId of this.exportableOperatorIds) { const outputTypes = this.workflowResultService.determineOutputTypes(operatorId); if (!outputTypes.hasAnyResult) { continue; From 46ce1638a18a67c34f48b2776965d9f36395c4e3 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Tue, 30 Sep 2025 18:52:37 -0700 Subject: [PATCH 11/20] fix --- .../dashboard/user/workflow/WorkflowExecutionsResource.scala | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 51c9f73767d..4a2e9d21017 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -194,7 +194,7 @@ object WorkflowExecutionsResource { val workflowRecord = context .select(WORKFLOW.CONTENT) .from(WORKFLOW) - .where(WORKFLOW.WID.eq(wid)) + .where(WORKFLOW.WID.eq(wid).and(WORKFLOW.CONTENT.isNotNull).and(WORKFLOW.CONTENT.ne(""))) .fetchOne() if (workflowRecord == null) { @@ -202,9 +202,6 @@ object WorkflowExecutionsResource { } val content = workflowRecord.value1() - if (content == null || content.isEmpty) { - return Map.empty - } val rootNode = try { From 5cdfc558c658e210c5ee0aefe447e8bdd6c8c854 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Tue, 30 Sep 2025 19:04:14 -0700 Subject: [PATCH 12/20] remove RestrictedDataset --- .../workflow/WorkflowExecutionsResource.scala | 74 +++++++++---------- 1 file changed, 33 insertions(+), 41 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 4a2e9d21017..94b8d12db1e 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -103,26 +103,14 @@ object WorkflowExecutionsResource { } } - /** - * Represents a dataset that has access restrictions for export. - * Used to track which datasets are non-downloadable and owned by other users. - * - * @param ownerEmail The email of the dataset owner - * @param datasetName The name of the dataset - */ - private case class RestrictedDataset(ownerEmail: String, datasetName: String) { - def cacheKey: (String, String) = (ownerEmail.toLowerCase, datasetName.toLowerCase) - def label: String = s"$datasetName ($ownerEmail)" - } - /** * Parses a file path to extract dataset information. * Expected format: /ownerEmail/datasetName/... * * @param path The file path from operator properties - * @return Some(RestrictedDataset) if path is valid, None otherwise + * @return Some((ownerEmail, datasetName)) if path is valid, None otherwise */ - private def parseDatasetPath(path: String): Option[RestrictedDataset] = { + private def parseDatasetPath(path: String): Option[(String, String)] = { if (path == null) { return None } @@ -136,23 +124,26 @@ object WorkflowExecutionsResource { } val ownerEmail = segments(0) val datasetName = segments(1) - Some(RestrictedDataset(ownerEmail, datasetName)) + Some((ownerEmail, datasetName)) } /** * Checks if a dataset is downloadable by querying the database. * Uses caching to avoid repeated database queries for the same dataset. * - * @param dataset The dataset to check + * @param ownerEmail The email of the dataset owner + * @param datasetName The name of the dataset * @param cache A cache to store lookup results * @return Some(true) if downloadable, Some(false) if not, None if dataset doesn't exist */ private def isDownloadableDataset( - dataset: RestrictedDataset, + ownerEmail: String, + datasetName: String, cache: mutable.Map[(String, String), Option[Boolean]] ): Option[Boolean] = { + val cacheKey = (ownerEmail.toLowerCase, datasetName.toLowerCase) cache.getOrElseUpdate( - dataset.cacheKey, { + cacheKey, { val record = context .select(DATASET.IS_DOWNLOADABLE) .from(DATASET) @@ -160,8 +151,8 @@ object WorkflowExecutionsResource { .on(DATASET.OWNER_UID.eq(USER.UID)) .where( USER.EMAIL - .equalIgnoreCase(dataset.ownerEmail) - .and(DATASET.NAME.equalIgnoreCase(dataset.datasetName)) + .equalIgnoreCase(ownerEmail) + .and(DATASET.NAME.equalIgnoreCase(datasetName)) ) .fetchOne() if (record == null) { @@ -184,12 +175,12 @@ object WorkflowExecutionsResource { * * @param wid The workflow ID * @param currentUser The current user making the export request - * @return Map of operator ID -> Set of restricted datasets that block its export + * @return Map of operator ID -> Set of (ownerEmail, datasetName) tuples that block its export */ private def getNonDownloadableOperatorMap( wid: Int, currentUser: UserPojo - ): Map[String, Set[RestrictedDataset]] = { + ): Map[String, Set[(String, String)]] = { // Load workflow val workflowRecord = context .select(WORKFLOW.CONTENT) @@ -215,7 +206,7 @@ object WorkflowExecutionsResource { // Find source operators val datasetStatusCache = mutable.Map.empty[(String, String), Option[Boolean]] - val restrictedSourceMap = mutable.Map.empty[String, RestrictedDataset] + val restrictedSourceMap = mutable.Map.empty[String, (String, String)] val adjacency = mutable.Map.empty[String, mutable.ListBuffer[String]] operatorsNode.elements().asScala.foreach { operatorNode => @@ -223,17 +214,18 @@ object WorkflowExecutionsResource { if (operatorId.nonEmpty) { val fileNameNode = operatorNode.path("operatorProperties").path("fileName") if (fileNameNode.isTextual) { - parseDatasetPath(fileNameNode.asText()).foreach { dataset => - val isOwner = - Option(currentUser.getEmail) - .exists(_.equalsIgnoreCase(dataset.ownerEmail)) - if (!isOwner) { - isDownloadableDataset(dataset, datasetStatusCache) match { - case Some(value) if !value => - restrictedSourceMap.update(operatorId, dataset) - case _ => + parseDatasetPath(fileNameNode.asText()).foreach { + case (ownerEmail, datasetName) => + val isOwner = + Option(currentUser.getEmail) + .exists(_.equalsIgnoreCase(ownerEmail)) + if (!isOwner) { + isDownloadableDataset(ownerEmail, datasetName, datasetStatusCache) match { + case Some(value) if !value => + restrictedSourceMap.update(operatorId, (ownerEmail, datasetName)) + case _ => + } } - } } } } @@ -250,8 +242,8 @@ object WorkflowExecutionsResource { } // BFS to propagate restrictions - val restrictionMap = mutable.Map.empty[String, Set[RestrictedDataset]] - val queue = mutable.Queue.empty[(String, Set[RestrictedDataset])] + val restrictionMap = mutable.Map.empty[String, Set[(String, String)]] + val queue = mutable.Queue.empty[(String, Set[(String, String)])] restrictedSourceMap.foreach { case (operatorId, dataset) => @@ -879,17 +871,17 @@ class WorkflowExecutionsResource { val restrictedOperators = request.operators.filter(op => datasetRestrictions.contains(op.id)) // Check if any selected operator is restricted if (restrictedOperators.nonEmpty) { - val errorMessage = restrictedOperators - .map { op => - val datasets = datasetRestrictions(op.id).map(_.label).toList.sorted - s"Operator ${op.id} cannot be exported because it depends on dataset(s): ${datasets.mkString(", ")}" + val restrictedDatasets = restrictedOperators.flatMap { op => + datasetRestrictions(op.id).map { + case (ownerEmail, datasetName) => + Map("operatorId" -> op.id, "ownerEmail" -> ownerEmail, "datasetName" -> datasetName).asJava } - .mkString("; ") + } return Response .status(Response.Status.FORBIDDEN) .`type`(MediaType.APPLICATION_JSON) - .entity(Map("error" -> errorMessage).asJava) + .entity(Map("error" -> "Export blocked due to dataset restrictions", "restrictedDatasets" -> restrictedDatasets.asJava).asJava) .build() } From 227f3818011b5b86d2baddbe8b5736a57cc7c6c7 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Tue, 30 Sep 2025 19:08:08 -0700 Subject: [PATCH 13/20] change location parseDatasetPath --- .../workflow/WorkflowExecutionsResource.scala | 28 ++----------------- .../ics/amber/core/storage/FileResolver.scala | 24 ++++++++++++++++ 2 files changed, 26 insertions(+), 26 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 94b8d12db1e..30329ea9df3 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -20,7 +20,7 @@ package edu.uci.ics.texera.web.resource.dashboard.user.workflow import edu.uci.ics.amber.core.storage.result.ExecutionResourcesMapping -import edu.uci.ics.amber.core.storage.{DocumentFactory, VFSResourceType, VFSURIFactory} +import edu.uci.ics.amber.core.storage.{DocumentFactory, FileResolver, VFSResourceType, VFSURIFactory} import edu.uci.ics.amber.core.tuple.Tuple import edu.uci.ics.amber.core.virtualidentity._ import edu.uci.ics.amber.core.workflow.{GlobalPortIdentity, PortIdentity} @@ -103,30 +103,6 @@ object WorkflowExecutionsResource { } } - /** - * Parses a file path to extract dataset information. - * Expected format: /ownerEmail/datasetName/... - * - * @param path The file path from operator properties - * @return Some((ownerEmail, datasetName)) if path is valid, None otherwise - */ - private def parseDatasetPath(path: String): Option[(String, String)] = { - if (path == null) { - return None - } - val trimmed = path.trim - if (!trimmed.startsWith("/")) { - return None - } - val segments = trimmed.split("/").filter(_.nonEmpty) - if (segments.length < 4) { - return None - } - val ownerEmail = segments(0) - val datasetName = segments(1) - Some((ownerEmail, datasetName)) - } - /** * Checks if a dataset is downloadable by querying the database. * Uses caching to avoid repeated database queries for the same dataset. @@ -214,7 +190,7 @@ object WorkflowExecutionsResource { if (operatorId.nonEmpty) { val fileNameNode = operatorNode.path("operatorProperties").path("fileName") if (fileNameNode.isTextual) { - parseDatasetPath(fileNameNode.asText()).foreach { + FileResolver.parseDatasetPath(fileNameNode.asText()).foreach { case (ownerEmail, datasetName) => val isOwner = Option(currentUser.getEmail) diff --git a/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/FileResolver.scala b/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/FileResolver.scala index 533b84f9861..7643c72c312 100644 --- a/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/FileResolver.scala +++ b/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/FileResolver.scala @@ -168,4 +168,28 @@ object FileResolver { case _: Exception => false // Invalid URI format } } + + /** + * Parses a dataset file path to extract owner email and dataset name. + * Expected format: /ownerEmail/datasetName/versionName/fileRelativePath + * + * @param path The file path from operator properties + * @return Some((ownerEmail, datasetName)) if path is valid, None otherwise + */ + def parseDatasetPath(path: String): Option[(String, String)] = { + if (path == null) { + return None + } + val trimmed = path.trim + if (!trimmed.startsWith("/")) { + return None + } + val segments = trimmed.split("/").filter(_.nonEmpty) + if (segments.length < 4) { + return None + } + val ownerEmail = segments(0) + val datasetName = segments(1) + Some((ownerEmail, datasetName)) + } } From 3afbd419441024a4f2b85726d73ac3d0f0faca90 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Wed, 1 Oct 2025 15:03:12 -0700 Subject: [PATCH 14/20] single query to check whether each dataset is downloadable or not --- .../workflow/WorkflowExecutionsResource.scala | 105 +++++++++--------- 1 file changed, 53 insertions(+), 52 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 30329ea9df3..c469bea2331 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -20,7 +20,12 @@ package edu.uci.ics.texera.web.resource.dashboard.user.workflow import edu.uci.ics.amber.core.storage.result.ExecutionResourcesMapping -import edu.uci.ics.amber.core.storage.{DocumentFactory, FileResolver, VFSResourceType, VFSURIFactory} +import edu.uci.ics.amber.core.storage.{ + DocumentFactory, + FileResolver, + VFSResourceType, + VFSURIFactory +} import edu.uci.ics.amber.core.tuple.Tuple import edu.uci.ics.amber.core.virtualidentity._ import edu.uci.ics.amber.core.workflow.{GlobalPortIdentity, PortIdentity} @@ -103,43 +108,6 @@ object WorkflowExecutionsResource { } } - /** - * Checks if a dataset is downloadable by querying the database. - * Uses caching to avoid repeated database queries for the same dataset. - * - * @param ownerEmail The email of the dataset owner - * @param datasetName The name of the dataset - * @param cache A cache to store lookup results - * @return Some(true) if downloadable, Some(false) if not, None if dataset doesn't exist - */ - private def isDownloadableDataset( - ownerEmail: String, - datasetName: String, - cache: mutable.Map[(String, String), Option[Boolean]] - ): Option[Boolean] = { - val cacheKey = (ownerEmail.toLowerCase, datasetName.toLowerCase) - cache.getOrElseUpdate( - cacheKey, { - val record = context - .select(DATASET.IS_DOWNLOADABLE) - .from(DATASET) - .join(USER) - .on(DATASET.OWNER_UID.eq(USER.UID)) - .where( - USER.EMAIL - .equalIgnoreCase(ownerEmail) - .and(DATASET.NAME.equalIgnoreCase(datasetName)) - ) - .fetchOne() - if (record == null) { - None - } else { - Option(record.value1()) - } - } - ) - } - /** * Computes which operators in a workflow are restricted due to dataset access controls. * @@ -180,10 +148,8 @@ object WorkflowExecutionsResource { val operatorsNode = rootNode.path("operators") val linksNode = rootNode.path("links") - // Find source operators - val datasetStatusCache = mutable.Map.empty[(String, String), Option[Boolean]] - val restrictedSourceMap = mutable.Map.empty[String, (String, String)] - val adjacency = mutable.Map.empty[String, mutable.ListBuffer[String]] + // Collect all datasets used by operators (that user doesn't own) + val operatorDatasets = mutable.Map.empty[String, (String, String)] operatorsNode.elements().asScala.foreach { operatorNode => val operatorId = operatorNode.path("operatorID").asText("") @@ -196,24 +162,50 @@ object WorkflowExecutionsResource { Option(currentUser.getEmail) .exists(_.equalsIgnoreCase(ownerEmail)) if (!isOwner) { - isDownloadableDataset(ownerEmail, datasetName, datasetStatusCache) match { - case Some(value) if !value => - restrictedSourceMap.update(operatorId, (ownerEmail, datasetName)) - case _ => - } + operatorDatasets.update(operatorId, (ownerEmail, datasetName)) } } } } } + if (operatorDatasets.isEmpty) { + return Map.empty + } + + // Query all datasets + val uniqueDatasets = operatorDatasets.values.toSet + val conditions = uniqueDatasets.map { + case (ownerEmail, datasetName) => + USER.EMAIL.equalIgnoreCase(ownerEmail).and(DATASET.NAME.equalIgnoreCase(datasetName)) + } + + val nonDownloadableDatasets = context + .select(USER.EMAIL, DATASET.NAME) + .from(DATASET) + .join(USER) + .on(DATASET.OWNER_UID.eq(USER.UID)) + .where(conditions.reduce((a, b) => a.or(b))) + .and(DATASET.IS_DOWNLOADABLE.eq(false)) + .fetch() + .asScala + .map(record => (record.value1().toLowerCase, record.value2().toLowerCase)) + .toSet + + // Filter to only operators with non-downloadable datasets + val restrictedSourceMap = operatorDatasets.filter { + case (_, (ownerEmail, datasetName)) => + nonDownloadableDatasets.contains((ownerEmail.toLowerCase, datasetName.toLowerCase)) + } + // Build dependency graph + val adjacency = mutable.Map.empty[String, mutable.ListBuffer[String]] + linksNode.elements().asScala.foreach { linkNode => val sourceId = linkNode.path("source").path("operatorID").asText("") val targetId = linkNode.path("target").path("operatorID").asText("") if (sourceId.nonEmpty && targetId.nonEmpty) { - val targets = adjacency.getOrElseUpdate(sourceId, mutable.ListBuffer.empty[String]) - targets += targetId + adjacency.getOrElseUpdate(sourceId, mutable.ListBuffer.empty[String]) += targetId } } @@ -834,7 +826,7 @@ class WorkflowExecutionsResource { @Auth user: SessionUser ): Response = { - if (request.operators.size <= 0) + if (request.operators.size <= 0) return Response .status(Response.Status.BAD_REQUEST) .`type`(MediaType.APPLICATION_JSON) @@ -850,14 +842,23 @@ class WorkflowExecutionsResource { val restrictedDatasets = restrictedOperators.flatMap { op => datasetRestrictions(op.id).map { case (ownerEmail, datasetName) => - Map("operatorId" -> op.id, "ownerEmail" -> ownerEmail, "datasetName" -> datasetName).asJava + Map( + "operatorId" -> op.id, + "ownerEmail" -> ownerEmail, + "datasetName" -> datasetName + ).asJava } } return Response .status(Response.Status.FORBIDDEN) .`type`(MediaType.APPLICATION_JSON) - .entity(Map("error" -> "Export blocked due to dataset restrictions", "restrictedDatasets" -> restrictedDatasets.asJava).asJava) + .entity( + Map( + "error" -> "Export blocked due to dataset restrictions", + "restrictedDatasets" -> restrictedDatasets.asJava + ).asJava + ) .build() } From 91ed30c19558b0cfd1b0dbf521aaaed4e11bee89 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Wed, 1 Oct 2025 15:25:15 -0700 Subject: [PATCH 15/20] removed tolower --- .../user/workflow/WorkflowExecutionsResource.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index c469bea2331..df924775e2f 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -189,13 +189,13 @@ object WorkflowExecutionsResource { .and(DATASET.IS_DOWNLOADABLE.eq(false)) .fetch() .asScala - .map(record => (record.value1().toLowerCase, record.value2().toLowerCase)) + .map(record => (record.value1(), record.value2())) .toSet // Filter to only operators with non-downloadable datasets val restrictedSourceMap = operatorDatasets.filter { - case (_, (ownerEmail, datasetName)) => - nonDownloadableDatasets.contains((ownerEmail.toLowerCase, datasetName.toLowerCase)) + case (_, dataset) => + nonDownloadableDatasets.contains(dataset) } // Build dependency graph From e91bb1836579129fd1a8e2f9c285e9c544841d4f Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Wed, 1 Oct 2025 16:25:35 -0700 Subject: [PATCH 16/20] reuse code --- .../workflow/WorkflowExecutionsResource.scala | 4 +- .../ics/amber/core/storage/FileResolver.scala | 52 ++++++++++++------- 2 files changed, 36 insertions(+), 20 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index df924775e2f..ad4814d8e68 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -156,7 +156,7 @@ object WorkflowExecutionsResource { if (operatorId.nonEmpty) { val fileNameNode = operatorNode.path("operatorProperties").path("fileName") if (fileNameNode.isTextual) { - FileResolver.parseDatasetPath(fileNameNode.asText()).foreach { + FileResolver.parseDatasetOwnerAndName(fileNameNode.asText()).foreach { case (ownerEmail, datasetName) => val isOwner = Option(currentUser.getEmail) @@ -827,7 +827,7 @@ class WorkflowExecutionsResource { ): Response = { if (request.operators.size <= 0) - return Response + Response .status(Response.Status.BAD_REQUEST) .`type`(MediaType.APPLICATION_JSON) .entity(Map("error" -> "No operator selected").asJava) diff --git a/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/FileResolver.scala b/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/FileResolver.scala index 7643c72c312..70d1cccab5e 100644 --- a/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/FileResolver.scala +++ b/core/workflow-core/src/main/scala/edu/uci/ics/amber/core/storage/FileResolver.scala @@ -75,6 +75,31 @@ object FileResolver { filePath.toUri } + /** + * Parses a dataset file path and extracts its components. + * Expected format: /ownerEmail/datasetName/versionName/fileRelativePath + * + * @param fileName The file path to parse + * @return Some((ownerEmail, datasetName, versionName, fileRelativePath)) if valid, None otherwise + */ + private def parseDatasetFilePath( + fileName: String + ): Option[(String, String, String, Array[String])] = { + val filePath = Paths.get(fileName) + val pathSegments = (0 until filePath.getNameCount).map(filePath.getName(_).toString).toArray + + if (pathSegments.length < 4) { + return None + } + + val ownerEmail = pathSegments(0) + val datasetName = pathSegments(1) + val versionName = pathSegments(2) + val fileRelativePathSegments = pathSegments.drop(3) + + Some((ownerEmail, datasetName, versionName, fileRelativePathSegments)) + } + /** * Attempts to resolve a given fileName to a URI. * @@ -88,14 +113,13 @@ object FileResolver { * @throws FileNotFoundException if the dataset file does not exist or cannot be created */ private def datasetResolveFunc(fileName: String): URI = { - val filePath = Paths.get(fileName) - val pathSegments = (0 until filePath.getNameCount).map(filePath.getName(_).toString).toArray + val (ownerEmail, datasetName, versionName, fileRelativePathSegments) = + parseDatasetFilePath(fileName).getOrElse( + throw new FileNotFoundException(s"Dataset file $fileName not found.") + ) - // extract info from the user-given fileName - val ownerEmail = pathSegments(0) - val datasetName = pathSegments(1) - val versionName = pathSegments(2) - val fileRelativePath = Paths.get(pathSegments.drop(3).head, pathSegments.drop(3).tail: _*) + val fileRelativePath = + Paths.get(fileRelativePathSegments.head, fileRelativePathSegments.tail: _*) // fetch the dataset and version from DB to get dataset ID and version hash val (dataset, datasetVersion) = @@ -176,20 +200,12 @@ object FileResolver { * @param path The file path from operator properties * @return Some((ownerEmail, datasetName)) if path is valid, None otherwise */ - def parseDatasetPath(path: String): Option[(String, String)] = { + def parseDatasetOwnerAndName(path: String): Option[(String, String)] = { if (path == null) { return None } - val trimmed = path.trim - if (!trimmed.startsWith("/")) { - return None - } - val segments = trimmed.split("/").filter(_.nonEmpty) - if (segments.length < 4) { - return None + parseDatasetFilePath(path).map { + case (ownerEmail, datasetName, _, _) => (ownerEmail, datasetName) } - val ownerEmail = segments(0) - val datasetName = segments(1) - Some((ownerEmail, datasetName)) } } From 9e07a000d4d538bc48d570ea3580dc643920a827 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Sun, 5 Oct 2025 16:11:17 -0700 Subject: [PATCH 17/20] fix --- .../result-exportation.component.ts | 28 ++- .../workflow-result-export.service.ts | 161 +++++++++--------- 2 files changed, 106 insertions(+), 83 deletions(-) diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts index e5834462611..993598891d3 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts @@ -19,7 +19,10 @@ import { UntilDestroy, untilDestroyed } from "@ngneat/until-destroy"; import { Component, inject, Input, OnInit } from "@angular/core"; -import { WorkflowResultExportService } from "../../service/workflow-result-export/workflow-result-export.service"; +import { + WorkflowResultExportService, + RestrictionAnalysisResult, +} from "../../service/workflow-result-export/workflow-result-export.service"; import { DashboardDataset } from "../../../dashboard/type/dashboard-dataset.interface"; import { DatasetService } from "../../../dashboard/service/user/dataset/dataset.service"; import { NZ_MODAL_DATA, NzModalRef, NzModalService } from "ng-zorro-antd/modal"; @@ -57,6 +60,7 @@ export class ResultExportationComponent implements OnInit { isExportRestricted: boolean = false; hasPartialNonDownloadable: boolean = false; blockingDatasetLabels: string[] = []; + restrictedOperatorMap = new Map>(); userAccessibleDatasets: DashboardDataset[] = []; filteredUserAccessibleDatasets: DashboardDataset[] = []; @@ -81,9 +85,12 @@ export class ResultExportationComponent implements OnInit { }); this.workflowResultExportService - .refreshDatasetMetadata() + .computeRestrictionAnalysis() .pipe(untilDestroyed(this)) - .subscribe(() => this.updateOutputType()); + .subscribe(restrictionResult => { + this.restrictedOperatorMap = restrictionResult.restrictedOperatorMap; + this.updateOutputType(); + }); this.computingUnitStatusService .getSelectedComputingUnit() @@ -107,9 +114,18 @@ export class ResultExportationComponent implements OnInit { operatorIds = this.workflowActionService.getJointGraphWrapper().getCurrentHighlightedOperatorIDs(); } - this.exportableOperatorIds = this.workflowResultExportService.getExportableOperatorIds(operatorIds); - this.blockedOperatorIds = this.workflowResultExportService.getBlockedOperatorIds(operatorIds); - this.blockingDatasetLabels = this.workflowResultExportService.getBlockingDatasets(operatorIds); + this.exportableOperatorIds = this.workflowResultExportService.getExportableOperatorIds( + operatorIds, + this.restrictedOperatorMap + ); + this.blockedOperatorIds = this.workflowResultExportService.getBlockedOperatorIds( + operatorIds, + this.restrictedOperatorMap + ); + this.blockingDatasetLabels = this.workflowResultExportService.getBlockingDatasets( + operatorIds, + this.restrictedOperatorMap + ); this.isExportRestricted = this.exportableOperatorIds.length === 0 && operatorIds.length > 0; this.hasPartialNonDownloadable = this.exportableOperatorIds.length > 0 && this.blockedOperatorIds.length > 0; diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts index 59bc1a1b3a3..1afd1186b3e 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts @@ -36,16 +36,18 @@ import { GuiConfigService } from "../../../common/service/gui-config.service"; import { DatasetService } from "../../../dashboard/service/user/dataset/dataset.service"; import { parseFilePathToDatasetFile } from "../../../common/type/dataset-file"; +export interface RestrictionAnalysisResult { + restrictedOperatorMap: Map>; + datasetDownloadableMap: Map; + datasetLabelMap: Map; +} + @Injectable({ providedIn: "root", }) export class WorkflowResultExportService { hasResultToExportOnHighlightedOperators: boolean = false; hasResultToExportOnAllOperators = new BehaviorSubject(false); - private datasetDownloadableMap = new Map(); - private datasetLabelMap = new Map(); - private restrictedOperatorMap = new Map>(); - private datasetListLoaded = false; constructor( private workflowWebsocketService: WorkflowWebsocketService, private workflowActionService: WorkflowActionService, @@ -57,8 +59,6 @@ export class WorkflowResultExportService { private config: GuiConfigService ) { this.registerResultToExportUpdateHandler(); - this.registerRestrictionRecomputeTriggers(); - this.refreshDatasetMetadata().subscribe(); } registerResultToExportUpdateHandler() { @@ -74,59 +74,43 @@ export class WorkflowResultExportService { } /** - * Registers triggers to recompute dataset restrictions when the workflow graph changes. - * Monitors operator/link additions, deletions, property changes, and operator enable/disable events. - */ - private registerRestrictionRecomputeTriggers(): void { - const texeraGraph = this.workflowActionService.getTexeraGraph(); - merge( - texeraGraph.getOperatorAddStream(), - texeraGraph.getOperatorDeleteStream(), - texeraGraph.getOperatorPropertyChangeStream(), - texeraGraph.getLinkAddStream(), - texeraGraph.getLinkDeleteStream(), - texeraGraph.getDisabledOperatorsChangedStream() - ).subscribe(() => { - this.runRestrictionAnalysis(); - }); - } - - /** - * Refreshes dataset metadata from the server and rebuilds local caches. + * Computes restriction analysis on-demand by fetching dataset metadata and analyzing workflow graph. * - * Fetches all accessible datasets and their permissions, then updates: - * - datasetDownloadableMap: tracks which datasets are downloadable - * - datasetLabelMap: stores human-readable dataset labels + * Fetches all accessible datasets and their permissions, then: + * - Builds datasetDownloadableMap: tracks which datasets are downloadable + * - Builds datasetLabelMap: stores human-readable dataset labels + * - Performs restriction analysis to identify operators blocked by dataset access controls * * A dataset is considered downloadable if either: * - The dataset's isDownloadable flag is true, OR * - The current user is the dataset owner * - * @returns Observable that completes when metadata is refreshed + * @returns Observable that emits the restriction analysis result */ - public refreshDatasetMetadata(): Observable { - this.datasetListLoaded = false; + public computeRestrictionAnalysis(): Observable { return this.datasetService.retrieveAccessibleDatasets().pipe( take(1), - tap(datasets => { - this.datasetDownloadableMap.clear(); - this.datasetLabelMap.clear(); + map(datasets => { + const datasetDownloadableMap = new Map(); + const datasetLabelMap = new Map(); + datasets.forEach(dataset => { const key = this.buildDatasetKey(dataset.ownerEmail, dataset.dataset.name); const isDownloadable = dataset.dataset.isDownloadable || dataset.isOwner; - this.datasetDownloadableMap.set(key, isDownloadable); - this.datasetLabelMap.set(key, `${dataset.dataset.name} (${dataset.ownerEmail})`); + datasetDownloadableMap.set(key, isDownloadable); + datasetLabelMap.set(key, `${dataset.dataset.name} (${dataset.ownerEmail})`); }); - this.datasetListLoaded = true; - this.runRestrictionAnalysis(); + + const restrictedOperatorMap = this.runRestrictionAnalysis(datasetDownloadableMap, datasetLabelMap); + + return { restrictedOperatorMap, datasetDownloadableMap, datasetLabelMap }; }), - map(() => undefined), catchError(() => { - this.datasetDownloadableMap.clear(); - this.datasetLabelMap.clear(); - this.datasetListLoaded = true; - this.runRestrictionAnalysis(); - return of(undefined); + return of({ + restrictedOperatorMap: new Map>(), + datasetDownloadableMap: new Map(), + datasetLabelMap: new Map(), + }); }) ); } @@ -147,12 +131,18 @@ export class WorkflowResultExportService { * Extracts dataset information from an operator's fileName property. * * Parses file paths in the expected format and validates that the dataset - * exists in our accessible datasets cache. + * exists in the provided dataset maps. * * @param fileName The fileName property from operator properties + * @param datasetDownloadableMap Map tracking which datasets are downloadable + * @param datasetLabelMap Map storing human-readable dataset labels * @returns Object with dataset key and label, or null if invalid/not found */ - private extractDatasetInfo(fileName: unknown): { key: string; label: string } | null { + private extractDatasetInfo( + fileName: unknown, + datasetDownloadableMap: Map, + datasetLabelMap: Map + ): { key: string; label: string } | null { if (typeof fileName !== "string") { return null; } @@ -166,10 +156,10 @@ export class WorkflowResultExportService { return null; } const key = this.buildDatasetKey(ownerEmail, datasetName); - if (!this.datasetDownloadableMap.has(key)) { + if (!datasetDownloadableMap.has(key)) { return null; } - const label = this.datasetLabelMap.get(key) ?? `${datasetName} (${ownerEmail})`; + const label = datasetLabelMap.get(key) ?? `${datasetName} (${ownerEmail})`; return { key, label }; } catch { return null; @@ -183,18 +173,19 @@ export class WorkflowResultExportService { * 1. Identifies operators using non-downloadable datasets * 2. Builds a workflow dependency graph from operator links * 3. Uses BFS to propagate restrictions through the graph - * 4. Updates restrictedOperatorMap with results + * 4. Returns a map of restricted operators * * The analysis considers only enabled operators and ignores disabled ones. * Restrictions flow downstream through operator dependencies. + * + * @param datasetDownloadableMap Map tracking which datasets are downloadable + * @param datasetLabelMap Map storing human-readable dataset labels + * @returns Map of operator IDs to sets of blocking dataset labels */ - private runRestrictionAnalysis(): void { - if (!this.datasetListLoaded) { - this.restrictedOperatorMap.clear(); - this.updateExportAvailabilityFlags(); - return; - } - + private runRestrictionAnalysis( + datasetDownloadableMap: Map, + datasetLabelMap: Map + ): Map> { const texeraGraph = this.workflowActionService.getTexeraGraph(); const allOperators = texeraGraph.getAllOperators(); const operatorById = new Map(allOperators.map(op => [op.operatorID, op] as const)); @@ -203,11 +194,15 @@ export class WorkflowResultExportService { // Identify source operators that use non-downloadable datasets enabledOperators.forEach(operator => { - const datasetInfo = this.extractDatasetInfo(operator.operatorProperties?.fileName); + const datasetInfo = this.extractDatasetInfo( + operator.operatorProperties?.fileName, + datasetDownloadableMap, + datasetLabelMap + ); if (!datasetInfo) { return; } - const isDownloadable = this.datasetDownloadableMap.get(datasetInfo.key); + const isDownloadable = datasetDownloadableMap.get(datasetInfo.key); if (isDownloadable === false) { datasetSources.push({ operatorId: operator.operatorID, label: datasetInfo.label }); } @@ -216,9 +211,7 @@ export class WorkflowResultExportService { const restrictions = new Map>(); if (datasetSources.length === 0) { - this.restrictedOperatorMap = restrictions; - this.updateExportAvailabilityFlags(); - return; + return restrictions; } // Build Workflow Dependency Graph @@ -267,9 +260,7 @@ export class WorkflowResultExportService { } } - // Update State - this.restrictedOperatorMap = restrictions; - this.updateExportAvailabilityFlags(); + return restrictions; } /** @@ -314,20 +305,28 @@ export class WorkflowResultExportService { * Filters operator IDs to return only those that are not restricted by dataset access controls. * * @param operatorIds Array of operator IDs to filter + * @param restrictedOperatorMap Map of restricted operators to blocking dataset labels * @returns Array of operator IDs that can be exported */ - public getExportableOperatorIds(operatorIds: readonly string[]): string[] { - return operatorIds.filter(operatorId => !this.restrictedOperatorMap.has(operatorId)); + public getExportableOperatorIds( + operatorIds: readonly string[], + restrictedOperatorMap: Map> + ): string[] { + return operatorIds.filter(operatorId => !restrictedOperatorMap.has(operatorId)); } /** * Filters operator IDs to return only those that are restricted by dataset access controls. * * @param operatorIds Array of operator IDs to filter + * @param restrictedOperatorMap Map of restricted operators to blocking dataset labels * @returns Array of operator IDs that are blocked from export */ - public getBlockedOperatorIds(operatorIds: readonly string[]): string[] { - return operatorIds.filter(operatorId => this.restrictedOperatorMap.has(operatorId)); + public getBlockedOperatorIds( + operatorIds: readonly string[], + restrictedOperatorMap: Map> + ): string[] { + return operatorIds.filter(operatorId => restrictedOperatorMap.has(operatorId)); } /** @@ -335,12 +334,16 @@ export class WorkflowResultExportService { * Used to display user-friendly error messages about which datasets are causing restrictions. * * @param operatorIds Array of operator IDs to check + * @param restrictedOperatorMap Map of restricted operators to blocking dataset labels * @returns Array of dataset labels (e.g., "Dataset1 (user@example.com)") */ - public getBlockingDatasets(operatorIds: readonly string[]): string[] { + public getBlockingDatasets( + operatorIds: readonly string[], + restrictedOperatorMap: Map> + ): string[] { const labels = new Set(); operatorIds.forEach(operatorId => { - const datasets = this.restrictedOperatorMap.get(operatorId); + const datasets = restrictedOperatorMap.get(operatorId); datasets?.forEach(label => labels.add(label)); }); return Array.from(labels); @@ -362,9 +365,9 @@ export class WorkflowResultExportService { destination: "dataset" | "local" = "dataset", // default to dataset unit: DashboardWorkflowComputingUnit | null // computing unit for cluster setting ): void { - this.refreshDatasetMetadata() + this.computeRestrictionAnalysis() .pipe(take(1)) - .subscribe(() => + .subscribe(restrictionResult => this.performExport( exportType, workflowName, @@ -374,7 +377,8 @@ export class WorkflowResultExportService { filename, exportAll, destination, - unit + unit, + restrictionResult.restrictedOperatorMap ) ); } @@ -390,6 +394,8 @@ export class WorkflowResultExportService { * 5. Handles response and shows appropriate notifications * * Shows error messages if all operators are blocked, warning messages if some are blocked. + * + * @param restrictedOperatorMap Map of restricted operators from restriction analysis */ private performExport( exportType: string, @@ -400,7 +406,8 @@ export class WorkflowResultExportService { filename: string, exportAll: boolean, destination: "dataset" | "local", - unit: DashboardWorkflowComputingUnit | null + unit: DashboardWorkflowComputingUnit | null, + restrictedOperatorMap: Map> ): void { // Validates configuration and computing unit availability if (!this.config.env.exportExecutionResultEnabled) { @@ -430,10 +437,10 @@ export class WorkflowResultExportService { } // Applies restriction filtering with user feedback - const exportableOperatorIds = this.getExportableOperatorIds(operatorIds); + const exportableOperatorIds = this.getExportableOperatorIds(operatorIds, restrictedOperatorMap); if (exportableOperatorIds.length === 0) { - const datasets = this.getBlockingDatasets(operatorIds); + const datasets = this.getBlockingDatasets(operatorIds, restrictedOperatorMap); const suffix = datasets.length > 0 ? `: ${datasets.join(", ")}` : ""; this.notificationService.error( `Cannot export result: selection depends on dataset(s) that are not downloadable${suffix}` @@ -442,7 +449,7 @@ export class WorkflowResultExportService { } if (exportableOperatorIds.length < operatorIds.length) { - const datasets = this.getBlockingDatasets(operatorIds); + const datasets = this.getBlockingDatasets(operatorIds, restrictedOperatorMap); const suffix = datasets.length > 0 ? ` (${datasets.join(", ")})` : ""; this.notificationService.warning( `Some operators were skipped because their results depend on dataset(s) that are not downloadable${suffix}` From c2a42d82e08c9f062db197181ed177e3def5bc65 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Mon, 6 Oct 2025 11:34:00 -0700 Subject: [PATCH 18/20] fix --- .../workflow/WorkflowExecutionsResource.scala | 31 +++ .../service/user/download/download.service.ts | 17 ++ .../result-exportation.component.ts | 2 +- .../workflow-result-export.service.ts | 211 +++--------------- 4 files changed, 80 insertions(+), 181 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index ad4814d8e68..94f421a4119 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -818,6 +818,37 @@ class WorkflowExecutionsResource { executionsDao.update(execution) } + /** + * Returns which operators are restricted from export due to dataset access controls. + * This endpoint allows the frontend to check restrictions before attempting export. + * + * @param wid The workflow ID to check + * @param user The authenticated user + * @return JSON map of operator ID -> array of {ownerEmail, datasetName} that block its export + */ + @GET + @Path("/{wid}/result/downloadability") + @Produces(Array(MediaType.APPLICATION_JSON)) + @RolesAllowed(Array("REGULAR", "ADMIN")) + def getWorkflowResultDownloadability( + @PathParam("wid") wid: Integer, + @Auth user: SessionUser + ): Response = { + validateUserCanAccessWorkflow(user.getUser.getUid, wid) + + val datasetRestrictions = getNonDownloadableOperatorMap(wid, user.user) + + // Convert to frontend-friendly format: Map[operatorId -> Array[datasetLabel]] + val restrictionMap = datasetRestrictions.map { + case (operatorId, datasets) => + operatorId -> datasets.map { + case (ownerEmail, datasetName) => s"$datasetName ($ownerEmail)" + }.toArray + }.asJava + + Response.ok(restrictionMap).build() + } + @POST @Path("/result/export") @RolesAllowed(Array("REGULAR", "ADMIN")) diff --git a/core/gui/src/app/dashboard/service/user/download/download.service.ts b/core/gui/src/app/dashboard/service/user/download/download.service.ts index f02a2411b54..cca7e7ac462 100644 --- a/core/gui/src/app/dashboard/service/user/download/download.service.ts +++ b/core/gui/src/app/dashboard/service/user/download/download.service.ts @@ -33,6 +33,7 @@ import { DashboardWorkflowComputingUnit } from "../../../../workspace/types/work var contentDisposition = require("content-disposition"); export const EXPORT_BASE_URL = "result/export"; +export const DOWNLOADABILITY_BASE_URL = "result/downloadability"; interface DownloadableItem { blob: Blob; @@ -44,6 +45,10 @@ export interface ExportWorkflowJsonResponse { message: string; } +export interface WorkflowResultDownloadability { + [operatorId: string]: string[]; // operatorId -> array of dataset labels blocking export +} + @Injectable({ providedIn: "root", }) @@ -115,6 +120,18 @@ export class DownloadService { ); } + /** + * Retrieves workflow result downloadability information from the backend. + * Returns a map of operator IDs to arrays of dataset labels that block their export. + * + * @param workflowId The workflow ID to check + * @returns Observable of downloadability information + */ + public getWorkflowResultDownloadability(workflowId: number): Observable { + const urlPath = `${WORKFLOW_EXECUTIONS_API_BASE_URL}/${workflowId}/${DOWNLOADABILITY_BASE_URL}`; + return this.http.get(urlPath); + } + /** * Export the workflow result. If destination = "local", the server returns a BLOB (file). * Otherwise, it returns JSON with a status message. diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts index 993598891d3..90a3a46ae6d 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts @@ -21,7 +21,7 @@ import { UntilDestroy, untilDestroyed } from "@ngneat/until-destroy"; import { Component, inject, Input, OnInit } from "@angular/core"; import { WorkflowResultExportService, - RestrictionAnalysisResult, + WorkflowResultDownloadability, } from "../../service/workflow-result-export/workflow-result-export.service"; import { DashboardDataset } from "../../../dashboard/type/dashboard-dataset.interface"; import { DatasetService } from "../../../dashboard/service/user/dataset/dataset.service"; diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts index 1afd1186b3e..ba1c7db7c80 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts @@ -33,13 +33,23 @@ import { HttpResponse } from "@angular/common/http"; import { ExportWorkflowJsonResponse } from "../../../dashboard/service/user/download/download.service"; import { DashboardWorkflowComputingUnit } from "../../types/workflow-computing-unit"; import { GuiConfigService } from "../../../common/service/gui-config.service"; -import { DatasetService } from "../../../dashboard/service/user/dataset/dataset.service"; -import { parseFilePathToDatasetFile } from "../../../common/type/dataset-file"; -export interface RestrictionAnalysisResult { +/** + * Result of workflow result downloadability analysis. + * Contains information about which operators are restricted from exporting + * due to non-downloadable dataset dependencies. + */ +export interface WorkflowResultDownloadability { + /** + * Map of operator IDs to sets of blocking dataset labels. + * Key: Operator ID + * Value: Set of human-readable dataset labels (e.g., "dataset_name (owner@email.com)") + * that are blocking this operator from being exported + * + * An operator appears in this map if it directly uses or depends on (through data flow) + * one or more datasets that the current user is not allowed to download. + */ restrictedOperatorMap: Map>; - datasetDownloadableMap: Map; - datasetLabelMap: Map; } @Injectable({ @@ -55,7 +65,6 @@ export class WorkflowResultExportService { private executeWorkflowService: ExecuteWorkflowService, private workflowResultService: WorkflowResultService, private downloadService: DownloadService, - private datasetService: DatasetService, private config: GuiConfigService ) { this.registerResultToExportUpdateHandler(); @@ -74,195 +83,37 @@ export class WorkflowResultExportService { } /** - * Computes restriction analysis on-demand by fetching dataset metadata and analyzing workflow graph. - * - * Fetches all accessible datasets and their permissions, then: - * - Builds datasetDownloadableMap: tracks which datasets are downloadable - * - Builds datasetLabelMap: stores human-readable dataset labels - * - Performs restriction analysis to identify operators blocked by dataset access controls + * Computes restriction analysis by calling the backend API. * - * A dataset is considered downloadable if either: - * - The dataset's isDownloadable flag is true, OR - * - The current user is the dataset owner + * The backend analyzes the workflow to identify operators that are restricted from export + * due to non-downloadable dataset dependencies. The restriction propagates through the + * workflow graph via data flow. * * @returns Observable that emits the restriction analysis result */ - public computeRestrictionAnalysis(): Observable { - return this.datasetService.retrieveAccessibleDatasets().pipe( - take(1), - map(datasets => { - const datasetDownloadableMap = new Map(); - const datasetLabelMap = new Map(); + public computeRestrictionAnalysis(): Observable { + const workflowId = this.workflowActionService.getWorkflow().wid; + if (!workflowId) { + return of({ restrictedOperatorMap: new Map>() }); + } - datasets.forEach(dataset => { - const key = this.buildDatasetKey(dataset.ownerEmail, dataset.dataset.name); - const isDownloadable = dataset.dataset.isDownloadable || dataset.isOwner; - datasetDownloadableMap.set(key, isDownloadable); - datasetLabelMap.set(key, `${dataset.dataset.name} (${dataset.ownerEmail})`); + return this.downloadService.getWorkflowResultDownloadability(workflowId).pipe( + map(backendResponse => { + // Convert backend format to Map> + const restrictedOperatorMap = new Map>(); + Object.entries(backendResponse).forEach(([operatorId, datasetLabels]) => { + restrictedOperatorMap.set(operatorId, new Set(datasetLabels)); }); - - const restrictedOperatorMap = this.runRestrictionAnalysis(datasetDownloadableMap, datasetLabelMap); - - return { restrictedOperatorMap, datasetDownloadableMap, datasetLabelMap }; + return { restrictedOperatorMap }; }), catchError(() => { return of({ restrictedOperatorMap: new Map>(), - datasetDownloadableMap: new Map(), - datasetLabelMap: new Map(), }); }) ); } - /** - * Builds a normalized key for dataset lookup in caches. - * Converts both email and dataset name to lowercase for case-insensitive matching. - * - * @param ownerEmail The dataset owner's email - * @param datasetName The dataset name - * @returns Normalized key in format "email::dataset" - */ - private buildDatasetKey(ownerEmail: string, datasetName: string): string { - return `${ownerEmail.toLowerCase()}::${datasetName.toLowerCase()}`; - } - - /** - * Extracts dataset information from an operator's fileName property. - * - * Parses file paths in the expected format and validates that the dataset - * exists in the provided dataset maps. - * - * @param fileName The fileName property from operator properties - * @param datasetDownloadableMap Map tracking which datasets are downloadable - * @param datasetLabelMap Map storing human-readable dataset labels - * @returns Object with dataset key and label, or null if invalid/not found - */ - private extractDatasetInfo( - fileName: unknown, - datasetDownloadableMap: Map, - datasetLabelMap: Map - ): { key: string; label: string } | null { - if (typeof fileName !== "string") { - return null; - } - const trimmed = fileName.trim(); - if (!trimmed.startsWith("/")) { - return null; - } - try { - const { ownerEmail, datasetName } = parseFilePathToDatasetFile(trimmed); - if (!ownerEmail || !datasetName) { - return null; - } - const key = this.buildDatasetKey(ownerEmail, datasetName); - if (!datasetDownloadableMap.has(key)) { - return null; - } - const label = datasetLabelMap.get(key) ?? `${datasetName} (${ownerEmail})`; - return { key, label }; - } catch { - return null; - } - } - - /** - * Performs client-side restriction analysis to mirror backend validation. - * - * This function: - * 1. Identifies operators using non-downloadable datasets - * 2. Builds a workflow dependency graph from operator links - * 3. Uses BFS to propagate restrictions through the graph - * 4. Returns a map of restricted operators - * - * The analysis considers only enabled operators and ignores disabled ones. - * Restrictions flow downstream through operator dependencies. - * - * @param datasetDownloadableMap Map tracking which datasets are downloadable - * @param datasetLabelMap Map storing human-readable dataset labels - * @returns Map of operator IDs to sets of blocking dataset labels - */ - private runRestrictionAnalysis( - datasetDownloadableMap: Map, - datasetLabelMap: Map - ): Map> { - const texeraGraph = this.workflowActionService.getTexeraGraph(); - const allOperators = texeraGraph.getAllOperators(); - const operatorById = new Map(allOperators.map(op => [op.operatorID, op] as const)); - const enabledOperators = allOperators.filter(operator => !operator.isDisabled); - const datasetSources: Array<{ operatorId: string; label: string }> = []; - - // Identify source operators that use non-downloadable datasets - enabledOperators.forEach(operator => { - const datasetInfo = this.extractDatasetInfo( - operator.operatorProperties?.fileName, - datasetDownloadableMap, - datasetLabelMap - ); - if (!datasetInfo) { - return; - } - const isDownloadable = datasetDownloadableMap.get(datasetInfo.key); - if (isDownloadable === false) { - datasetSources.push({ operatorId: operator.operatorID, label: datasetInfo.label }); - } - }); - - const restrictions = new Map>(); - - if (datasetSources.length === 0) { - return restrictions; - } - - // Build Workflow Dependency Graph - const adjacency = new Map(); - texeraGraph.getAllLinks().forEach(link => { - const sourceId = link.source.operatorID; - const targetId = link.target.operatorID; - const sourceOperator = operatorById.get(sourceId); - const targetOperator = operatorById.get(targetId); - if (!sourceOperator || !targetOperator) { - return; - } - if (sourceOperator.isDisabled || targetOperator.isDisabled) { - return; - } - const neighbors = adjacency.get(sourceId); - if (neighbors) { - neighbors.push(targetId); - } else { - adjacency.set(sourceId, [targetId]); - } - }); - - // BFS - const queue: Array<{ operatorId: string; datasets: Set }> = []; - datasetSources.forEach(source => { - queue.push({ operatorId: source.operatorId, datasets: new Set([source.label]) }); - }); - - while (queue.length > 0) { - const current = queue.shift()!; - const existing = restrictions.get(current.operatorId) ?? new Set(); - let updated = false; - current.datasets.forEach(label => { - if (!existing.has(label)) { - existing.add(label); - updated = true; - } - }); - if (updated || !restrictions.has(current.operatorId)) { - restrictions.set(current.operatorId, existing); - const neighbors = adjacency.get(current.operatorId) ?? []; - neighbors.forEach(nextOperatorId => { - queue.push({ operatorId: nextOperatorId, datasets: new Set(existing) }); - }); - } - } - - return restrictions; - } - /** * Updates UI flags that control export button visibility and availability. * From 48d3e0985aeea502af934eadbd94c1e0b10f875a Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Fri, 10 Oct 2025 09:47:09 -0700 Subject: [PATCH 19/20] fix --- .../service/user/download/download.service.ts | 6 +- .../result-exportation.component.ts | 96 +++++++++------ .../workflow-result-export.service.ts | 110 ++++++++---------- 3 files changed, 114 insertions(+), 98 deletions(-) diff --git a/core/gui/src/app/dashboard/service/user/download/download.service.ts b/core/gui/src/app/dashboard/service/user/download/download.service.ts index cca7e7ac462..aafb14a44c6 100644 --- a/core/gui/src/app/dashboard/service/user/download/download.service.ts +++ b/core/gui/src/app/dashboard/service/user/download/download.service.ts @@ -45,7 +45,7 @@ export interface ExportWorkflowJsonResponse { message: string; } -export interface WorkflowResultDownloadability { +export interface WorkflowResultDownloadabilityResponse { [operatorId: string]: string[]; // operatorId -> array of dataset labels blocking export } @@ -127,9 +127,9 @@ export class DownloadService { * @param workflowId The workflow ID to check * @returns Observable of downloadability information */ - public getWorkflowResultDownloadability(workflowId: number): Observable { + public getWorkflowResultDownloadability(workflowId: number): Observable { const urlPath = `${WORKFLOW_EXECUTIONS_API_BASE_URL}/${workflowId}/${DOWNLOADABILITY_BASE_URL}`; - return this.http.get(urlPath); + return this.http.get(urlPath); } /** diff --git a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts index 90a3a46ae6d..4783bd93f23 100644 --- a/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts +++ b/core/gui/src/app/workspace/component/result-exportation/result-exportation.component.ts @@ -55,16 +55,65 @@ export class ResultExportationComponent implements OnInit { containsBinaryData: boolean = false; inputDatasetName = ""; selectedComputingUnit: DashboardWorkflowComputingUnit | null = null; - exportableOperatorIds: string[] = []; - blockedOperatorIds: string[] = []; - isExportRestricted: boolean = false; - hasPartialNonDownloadable: boolean = false; - blockingDatasetLabels: string[] = []; - restrictedOperatorMap = new Map>(); + downloadability?: WorkflowResultDownloadability; userAccessibleDatasets: DashboardDataset[] = []; filteredUserAccessibleDatasets: DashboardDataset[] = []; + /** + * Gets the operator IDs to check for restrictions based on the source trigger. + * Menu: all operators, Context menu: highlighted operators only + */ + private getOperatorIdsToCheck(): readonly string[] { + if (this.sourceTriggered === "menu") { + return this.workflowActionService + .getTexeraGraph() + .getAllOperators() + .map(op => op.operatorID); + } else { + return this.workflowActionService.getJointGraphWrapper().getCurrentHighlightedOperatorIDs(); + } + } + + /** + * Computed property: operator IDs that can be exported + */ + get exportableOperatorIds(): string[] { + if (!this.downloadability) return []; + return this.downloadability.getExportableOperatorIds(this.getOperatorIdsToCheck()); + } + + /** + * Computed property: operator IDs that are blocked from export + */ + get blockedOperatorIds(): string[] { + if (!this.downloadability) return []; + return this.downloadability.getBlockedOperatorIds(this.getOperatorIdsToCheck()); + } + + /** + * Computed property: whether all selected operators are blocked + */ + get isExportRestricted(): boolean { + const operatorIds = this.getOperatorIdsToCheck(); + return this.exportableOperatorIds.length === 0 && operatorIds.length > 0; + } + + /** + * Computed property: whether some (but not all) operators are blocked + */ + get hasPartialNonDownloadable(): boolean { + return this.exportableOperatorIds.length > 0 && this.blockedOperatorIds.length > 0; + } + + /** + * Computed property: dataset labels that are blocking export + */ + get blockingDatasetLabels(): string[] { + if (!this.downloadability) return []; + return this.downloadability.getBlockingDatasets(this.getOperatorIdsToCheck()); + } + constructor( public workflowResultExportService: WorkflowResultExportService, private modalRef: NzModalRef, @@ -87,8 +136,8 @@ export class ResultExportationComponent implements OnInit { this.workflowResultExportService .computeRestrictionAnalysis() .pipe(untilDestroyed(this)) - .subscribe(restrictionResult => { - this.restrictedOperatorMap = restrictionResult.restrictedOperatorMap; + .subscribe(downloadability => { + this.downloadability = downloadability; this.updateOutputType(); }); @@ -101,33 +150,11 @@ export class ResultExportationComponent implements OnInit { } updateOutputType(): void { - // Determine if the caller of this component is menu or context menu - // if its menu then we need to export all operators else we need to export only highlighted operators - - let operatorIds: readonly string[]; - if (this.sourceTriggered === "menu") { - operatorIds = this.workflowActionService - .getTexeraGraph() - .getAllOperators() - .map(op => op.operatorID); - } else { - operatorIds = this.workflowActionService.getJointGraphWrapper().getCurrentHighlightedOperatorIDs(); + if (!this.downloadability) { + return; } - this.exportableOperatorIds = this.workflowResultExportService.getExportableOperatorIds( - operatorIds, - this.restrictedOperatorMap - ); - this.blockedOperatorIds = this.workflowResultExportService.getBlockedOperatorIds( - operatorIds, - this.restrictedOperatorMap - ); - this.blockingDatasetLabels = this.workflowResultExportService.getBlockingDatasets( - operatorIds, - this.restrictedOperatorMap - ); - this.isExportRestricted = this.exportableOperatorIds.length === 0 && operatorIds.length > 0; - this.hasPartialNonDownloadable = this.exportableOperatorIds.length > 0 && this.blockedOperatorIds.length > 0; + const operatorIds = this.getOperatorIdsToCheck(); if (operatorIds.length === 0) { // No operators highlighted @@ -144,8 +171,7 @@ export class ResultExportationComponent implements OnInit { return; } - // Assume they're all table or visualization - // until we find an operator that isn't + // Assume they're all table or visualization until we find an operator that isn't let allTable = true; let allVisualization = true; let anyBinaryData = false; diff --git a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts index ba1c7db7c80..1abad167ec5 100644 --- a/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts +++ b/core/gui/src/app/workspace/service/workflow-result-export/workflow-result-export.service.ts @@ -39,7 +39,7 @@ import { GuiConfigService } from "../../../common/service/gui-config.service"; * Contains information about which operators are restricted from exporting * due to non-downloadable dataset dependencies. */ -export interface WorkflowResultDownloadability { +export class WorkflowResultDownloadability { /** * Map of operator IDs to sets of blocking dataset labels. * Key: Operator ID @@ -50,6 +50,46 @@ export interface WorkflowResultDownloadability { * one or more datasets that the current user is not allowed to download. */ restrictedOperatorMap: Map>; + + constructor(restrictedOperatorMap: Map>) { + this.restrictedOperatorMap = restrictedOperatorMap; + } + + /** + * Filters operator IDs to return only those that are not restricted by dataset access controls. + * + * @param operatorIds Array of operator IDs to filter + * @returns Array of operator IDs that can be exported + */ + getExportableOperatorIds(operatorIds: readonly string[]): string[] { + return operatorIds.filter(operatorId => !this.restrictedOperatorMap.has(operatorId)); + } + + /** + * Filters operator IDs to return only those that are restricted by dataset access controls. + * + * @param operatorIds Array of operator IDs to filter + * @returns Array of operator IDs that are blocked from export + */ + getBlockedOperatorIds(operatorIds: readonly string[]): string[] { + return operatorIds.filter(operatorId => this.restrictedOperatorMap.has(operatorId)); + } + + /** + * Gets the list of dataset labels that are blocking export for the given operators. + * Used to display user-friendly error messages about which datasets are causing restrictions. + * + * @param operatorIds Array of operator IDs to check + * @returns Array of dataset labels (e.g., "Dataset1 (user@example.com)") + */ + getBlockingDatasets(operatorIds: readonly string[]): string[] { + const labels = new Set(); + operatorIds.forEach(operatorId => { + const datasets = this.restrictedOperatorMap.get(operatorId); + datasets?.forEach(label => labels.add(label)); + }); + return Array.from(labels); + } } @Injectable({ @@ -94,7 +134,7 @@ export class WorkflowResultExportService { public computeRestrictionAnalysis(): Observable { const workflowId = this.workflowActionService.getWorkflow().wid; if (!workflowId) { - return of({ restrictedOperatorMap: new Map>() }); + return of(new WorkflowResultDownloadability(new Map>())); } return this.downloadService.getWorkflowResultDownloadability(workflowId).pipe( @@ -104,12 +144,10 @@ export class WorkflowResultExportService { Object.entries(backendResponse).forEach(([operatorId, datasetLabels]) => { restrictedOperatorMap.set(operatorId, new Set(datasetLabels)); }); - return { restrictedOperatorMap }; + return new WorkflowResultDownloadability(restrictedOperatorMap); }), catchError(() => { - return of({ - restrictedOperatorMap: new Map>(), - }); + return of(new WorkflowResultDownloadability(new Map>())); }) ); } @@ -152,54 +190,6 @@ export class WorkflowResultExportService { this.hasResultToExportOnAllOperators.next(hasAnyResult); } - /** - * Filters operator IDs to return only those that are not restricted by dataset access controls. - * - * @param operatorIds Array of operator IDs to filter - * @param restrictedOperatorMap Map of restricted operators to blocking dataset labels - * @returns Array of operator IDs that can be exported - */ - public getExportableOperatorIds( - operatorIds: readonly string[], - restrictedOperatorMap: Map> - ): string[] { - return operatorIds.filter(operatorId => !restrictedOperatorMap.has(operatorId)); - } - - /** - * Filters operator IDs to return only those that are restricted by dataset access controls. - * - * @param operatorIds Array of operator IDs to filter - * @param restrictedOperatorMap Map of restricted operators to blocking dataset labels - * @returns Array of operator IDs that are blocked from export - */ - public getBlockedOperatorIds( - operatorIds: readonly string[], - restrictedOperatorMap: Map> - ): string[] { - return operatorIds.filter(operatorId => restrictedOperatorMap.has(operatorId)); - } - - /** - * Gets the list of dataset labels that are blocking export for the given operators. - * Used to display user-friendly error messages about which datasets are causing restrictions. - * - * @param operatorIds Array of operator IDs to check - * @param restrictedOperatorMap Map of restricted operators to blocking dataset labels - * @returns Array of dataset labels (e.g., "Dataset1 (user@example.com)") - */ - public getBlockingDatasets( - operatorIds: readonly string[], - restrictedOperatorMap: Map> - ): string[] { - const labels = new Set(); - operatorIds.forEach(operatorId => { - const datasets = restrictedOperatorMap.get(operatorId); - datasets?.forEach(label => labels.add(label)); - }); - return Array.from(labels); - } - /** * export the workflow execution result according the export type */ @@ -229,7 +219,7 @@ export class WorkflowResultExportService { exportAll, destination, unit, - restrictionResult.restrictedOperatorMap + restrictionResult ) ); } @@ -246,7 +236,7 @@ export class WorkflowResultExportService { * * Shows error messages if all operators are blocked, warning messages if some are blocked. * - * @param restrictedOperatorMap Map of restricted operators from restriction analysis + * @param downloadability Downloadability analysis result containing restriction information */ private performExport( exportType: string, @@ -258,7 +248,7 @@ export class WorkflowResultExportService { exportAll: boolean, destination: "dataset" | "local", unit: DashboardWorkflowComputingUnit | null, - restrictedOperatorMap: Map> + downloadability: WorkflowResultDownloadability ): void { // Validates configuration and computing unit availability if (!this.config.env.exportExecutionResultEnabled) { @@ -288,10 +278,10 @@ export class WorkflowResultExportService { } // Applies restriction filtering with user feedback - const exportableOperatorIds = this.getExportableOperatorIds(operatorIds, restrictedOperatorMap); + const exportableOperatorIds = downloadability.getExportableOperatorIds(operatorIds); if (exportableOperatorIds.length === 0) { - const datasets = this.getBlockingDatasets(operatorIds, restrictedOperatorMap); + const datasets = downloadability.getBlockingDatasets(operatorIds); const suffix = datasets.length > 0 ? `: ${datasets.join(", ")}` : ""; this.notificationService.error( `Cannot export result: selection depends on dataset(s) that are not downloadable${suffix}` @@ -300,7 +290,7 @@ export class WorkflowResultExportService { } if (exportableOperatorIds.length < operatorIds.length) { - const datasets = this.getBlockingDatasets(operatorIds, restrictedOperatorMap); + const datasets = downloadability.getBlockingDatasets(operatorIds); const suffix = datasets.length > 0 ? ` (${datasets.join(", ")})` : ""; this.notificationService.warning( `Some operators were skipped because their results depend on dataset(s) that are not downloadable${suffix}` From 124035bf0bd5c782e6a0be717a334cb1007e8009 Mon Sep 17 00:00:00 2001 From: Seongjin Yoon Date: Fri, 10 Oct 2025 14:41:24 -0700 Subject: [PATCH 20/20] fix --- .../dashboard/user/workflow/WorkflowExecutionsResource.scala | 5 ----- 1 file changed, 5 deletions(-) diff --git a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala index 2839c9a5011..7192f7f3fd6 100644 --- a/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala +++ b/core/amber/src/main/scala/edu/uci/ics/texera/web/resource/dashboard/user/workflow/WorkflowExecutionsResource.scala @@ -19,7 +19,6 @@ package edu.uci.ics.texera.web.resource.dashboard.user.workflow -import edu.uci.ics.amber.core.storage.result.ExecutionResourcesMapping import edu.uci.ics.amber.core.storage.{ DocumentFactory, FileResolver, @@ -40,10 +39,6 @@ import edu.uci.ics.texera.dao.SqlServer.withTransaction import edu.uci.ics.texera.dao.jooq.generated.Tables._ import edu.uci.ics.texera.dao.jooq.generated.tables.daos.WorkflowExecutionsDao import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.{User => UserPojo, WorkflowExecutions} -import edu.uci.ics.texera.auth.SessionUser -import edu.uci.ics.texera.config.UserSystemConfig -import edu.uci.ics.texera.dao.SqlServer.withTransaction -import edu.uci.ics.texera.dao.jooq.generated.tables.pojos.WorkflowExecutions import edu.uci.ics.texera.web.model.http.request.result.ResultExportRequest import edu.uci.ics.texera.web.resource.dashboard.user.workflow.WorkflowExecutionsResource._ import edu.uci.ics.texera.web.service.{ExecutionsMetadataPersistService, ResultExportService}