From a52ea0777ca93295f0fe9ca78e82d0d546e59755 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 17 Feb 2023 13:07:54 +0100 Subject: [PATCH 01/32] Controller --- .../controllers/HyperdriveController.scala | 35 +++++++++++++++++++ 1 file changed, 35 insertions(+) create mode 100644 src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala new file mode 100644 index 000000000..b82404802 --- /dev/null +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala @@ -0,0 +1,35 @@ +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package za.co.absa.hyperdrive.trigger.api.rest.controllers + +import org.slf4j.LoggerFactory +import org.springframework.web.bind.annotation._ +import za.co.absa.hyperdrive.trigger.api.rest.services.HyperdriveService +import za.co.absa.hyperdrive.trigger.models._ + +import java.util.concurrent.CompletableFuture +import javax.inject.Inject +import scala.compat.java8.FutureConverters._ +import scala.concurrent.ExecutionContext.Implicits.global + +@RestController +class HyperdriveController @Inject()(hyperdriveService: HyperdriveService) { + private val logger = LoggerFactory.getLogger(this.getClass) + + @GetMapping(path = Array("/hyperdrive/workflows/{id}/ingestionStatus")) + def getIngestionStatus(@PathVariable id: Long): CompletableFuture[IngestionStatus] = + hyperdriveService.getIngestionStatus(id).toJava.toCompletableFuture +} From bca2ff49cd3978b074deef0e00d95a3079e1364e Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 17 Feb 2023 13:10:36 +0100 Subject: [PATCH 02/32] Service + Model --- .../api/rest/services/HyperdriveService.scala | 71 +++++++++++++++++++ .../trigger/models/IngestionStatus.scala | 17 +++++ 2 files changed, 88 insertions(+) create mode 100644 src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala create mode 100644 src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala new file mode 100644 index 000000000..5a2c70d88 --- /dev/null +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala @@ -0,0 +1,71 @@ +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package za.co.absa.hyperdrive.trigger.api.rest.services + +import org.springframework.stereotype.Service +import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, JobIngestionStatus} +import za.co.absa.hyperdrive.trigger.persistance.WorkflowRepository +import za.co.absa.hyperdrive.trigger.configuration.application.GeneralConfig + +import scala.concurrent.{ExecutionContext, Future} + +trait HyperdriveService { + val workflowRepository: WorkflowRepository + val jobTemplateService: JobTemplateService + val hyperdriveOffsetComparisonService: HyperdriveOffsetComparisonService + + def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[IngestionStatus] +} + +@Service +class HyperdriveServiceImpl( + override val workflowRepository: WorkflowRepository, + override val jobTemplateService: JobTemplateService, + override val hyperdriveOffsetComparisonService: HyperdriveOffsetComparisonService, + generalConfig: GeneralConfig +) extends HyperdriveService { + + override def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[IngestionStatus] = { + workflowRepository.getWorkflow(id).flatMap { workflow => + jobTemplateService + .resolveJobTemplate(workflow.dagDefinitionJoined) + .flatMap(resolvedJobs => + Future.sequence( + resolvedJobs.map(resolvedJob => + hyperdriveOffsetComparisonService + .getNumberOfMessagesLeft(resolvedJob.jobParameters) + .map(messagesLeft => + JobIngestionStatus( + jobName = resolvedJob.name, + jobType = resolvedJob.jobParameters.jobType, + topic = messagesLeft.map(_._1).getOrElse("Unknown"), + messagesToIngest = messagesLeft.map(_._2) + ) + ) + ) + ) + ) + .map(jobIngestionStatus => + IngestionStatus( + name = workflow.name, + project = workflow.project, + jobIngestionStatus = jobIngestionStatus, + id = workflow.id + ) + ) + } + } +} diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala new file mode 100644 index 000000000..0e2b7520a --- /dev/null +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala @@ -0,0 +1,17 @@ +package za.co.absa.hyperdrive.trigger.models + +import za.co.absa.hyperdrive.trigger.models.enums.JobTypes.JobType + +case class IngestionStatus( + name: String, + project: String, + jobIngestionStatus: Seq[JobIngestionStatus], + id: Long = 0 +) + +case class JobIngestionStatus( + jobName: String, + jobType: JobType, + topic: String, + messagesToIngest: Option[Map[Int, Long]] +) From ccabdfdda1f74ee26947524d759d5aaaebec7a8f Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 17 Feb 2023 13:11:16 +0100 Subject: [PATCH 03/32] Model --- .../trigger/models/BeginningEndOffsets.scala | 22 +++++++++++++++++++ 1 file changed, 22 insertions(+) create mode 100644 src/main/scala/za/co/absa/hyperdrive/trigger/models/BeginningEndOffsets.scala diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/models/BeginningEndOffsets.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/models/BeginningEndOffsets.scala new file mode 100644 index 000000000..26e63dd31 --- /dev/null +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/models/BeginningEndOffsets.scala @@ -0,0 +1,22 @@ +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package za.co.absa.hyperdrive.trigger.models + +case class BeginningEndOffsets( + topic: String, + beginningOffsets: Map[Int, Long], + endOffsets: Map[Int, Long] +) From 2b57086989f8679d5ef9d53616aee9724305388c Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 17 Feb 2023 13:13:00 +0100 Subject: [PATCH 04/32] KafkaService --- .../trigger/api/rest/services/KafkaService.scala | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala index b2f2ee3ef..7cd1a6596 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala @@ -22,15 +22,19 @@ import org.springframework.stereotype.Service import org.springframework.util.ConcurrentLruCache import za.co.absa.hyperdrive.trigger.api.rest.services.KafkaServiceImpl.{BeginningOffsets, EndOffsets, OffsetFunction} import za.co.absa.hyperdrive.trigger.configuration.application.GeneralConfig +import za.co.absa.hyperdrive.trigger.models.BeginningEndOffsets import java.util.Properties import java.util.UUID.randomUUID import javax.inject.Inject import scala.collection.JavaConverters._ +import scala.util.Try trait KafkaService { def getBeginningOffsets(topic: String, consumerProperties: Properties): Map[Int, Long] def getEndOffsets(topic: String, consumerProperties: Properties): Map[Int, Long] + + def getOffsets(topic: String, consumerProperties: Properties): Option[BeginningEndOffsets] } @Service @@ -50,6 +54,16 @@ class KafkaServiceImpl @Inject() (generalConfig: GeneralConfig) extends KafkaSer getOffsets(topic, consumerProperties, EndOffsets) } + def getOffsets(topic: String, consumerProperties: Properties): Option[BeginningEndOffsets] = { + Try( + BeginningEndOffsets( + topic, + getOffsets(topic, consumerProperties, BeginningOffsets), + getOffsets(topic, consumerProperties, EndOffsets) + ) + ).toOption + } + def createKafkaConsumer(propertiesThreadId: (Properties, Long)): KafkaConsumer[String, String] = { logger.info( s"Creating new Kafka Consumer for thread id ${propertiesThreadId._2} and" + From 520e6d76683ba314cb660ce97ff7f9ba469f5765 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 17 Feb 2023 13:14:03 +0100 Subject: [PATCH 05/32] CheckpointService --- .../api/rest/services/CheckpointService.scala | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala index 85b62f219..2614fd695 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala @@ -34,6 +34,8 @@ trait CheckpointService { def getLatestOffsetFilePath(params: HdfsParameters)( implicit ugi: UserGroupInformation ): Try[Option[(String, Boolean)]] + + def getLatestCommittedOffset(params: HdfsParameters)(implicit ugi: UserGroupInformation): Option[Map[Int, Long]] } class HdfsParameters( @@ -98,6 +100,16 @@ class CheckpointServiceImpl @Inject() (@Lazy hdfsService: HdfsService) extends C } } + override def getLatestCommittedOffset(params: HdfsParameters)(implicit ugi: UserGroupInformation): Option[Map[Int, Long]] = { + Try(for { + latestCommit <- getLatestCommitBatchId(params.checkpointLocation).toOption.flatten + pathToLatestCommit = new Path(s"${params.checkpointLocation}/$offsetsDirName/$latestCommit") + offsets <- getOffsetsFromFile(pathToLatestCommit.toString).toOption.flatten + } yield { + offsets.values.head + }).toOption.flatten + } + /** * see org.apache.spark.sql.execution.streaming.OffsetSeqLog * and org.apache.spark.sql.kafka010.JsonUtils From 63a56fd80602320b3be6d1b91f3c207619432233 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 17 Feb 2023 13:18:21 +0100 Subject: [PATCH 06/32] HyperdriveOffsetService --- ...ce.scala => HyperdriveOffsetService.scala} | 48 ++++++++++++++++++- 1 file changed, 46 insertions(+), 2 deletions(-) rename src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/{HyperdriveOffsetComparisonService.scala => HyperdriveOffsetService.scala} (84%) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetComparisonService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala similarity index 84% rename from src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetComparisonService.scala rename to src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala index 24e3a504c..9d7d39a62 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetComparisonService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala @@ -32,8 +32,12 @@ import java.util.Properties import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} -trait HyperdriveOffsetComparisonService { +trait HyperdriveOffsetService { def isNewJobInstanceRequired(jobParameters: JobInstanceParameters)(implicit ec: ExecutionContext): Future[Boolean] + + def getNumberOfMessagesLeft(jobParameters: JobInstanceParameters)( + implicit ec: ExecutionContext + ): Future[Option[(String, Map[Int, Long])]] } @Service @@ -42,7 +46,7 @@ class HyperdriveOffsetComparisonServiceImpl @Inject() (sparkConfig: SparkConfig, @Lazy checkpointService: CheckpointService, @Lazy userGroupInformationService: UserGroupInformationService, kafkaService: KafkaService -) extends HyperdriveOffsetComparisonService { +) extends HyperdriveOffsetService { private val logger = LoggerFactory.getLogger(this.getClass) private val HyperdriveCheckpointKey = "writer.common.checkpoint.location" private val HyperdriveKafkaTopicKey = "reader.kafka.topic" @@ -52,6 +56,46 @@ class HyperdriveOffsetComparisonServiceImpl @Inject() (sparkConfig: SparkConfig, private val ListDelimiter = ',' private val defaultDeserializer = "org.apache.kafka.common.serialization.StringDeserializer" + /** + * @param jobParameters Parameters for the job instance. Should contain at least + * - reader.kafka.topic + * - reader.kafka.brokers + * - writer.common.checkpoint.location + * @param ec ExecutionContext + * @return - number not ingested messages. + */ + def getNumberOfMessagesLeft( + jobParameters: JobInstanceParameters + )(implicit ec: ExecutionContext): Future[Option[(String, Map[Int, Long])]] = { + val kafkaParametersOpt = getKafkaParameters(jobParameters) + val hdfsParametersOpt: Option[HdfsParameters] = getResolvedAppArguments(jobParameters).flatMap(getHdfsParameters) + + if (kafkaParametersOpt.isEmpty) { + logger.debug(s"Kafka parameters were not found in job definition $jobParameters") + } + + Future(for { + kafkaParameters <- kafkaParametersOpt + hdfsParameters <- hdfsParametersOpt + ugi = userGroupInformationService.loginUserFromKeytab(hdfsParameters.principal, hdfsParameters.keytab) + kafkaOffsets <- kafkaService.getOffsets(kafkaParameters._1, kafkaParameters._2) + if kafkaOffsets.beginningOffsets.keySet == kafkaOffsets.endOffsets.keySet + hdfsOffsets <- checkpointService.getLatestCommittedOffset(hdfsParameters)(ugi) + } yield { + val messagesLeft = kafkaOffsets.beginningOffsets.map { case (partition, kafkaBeginningOffset) => + val kafkaEndOffset = kafkaOffsets.endOffsets(partition) + val numberOfMessages = hdfsOffsets.get(partition) match { + case Some(hdfsOffset) if hdfsOffset >= kafkaEndOffset => 0 + case Some(hdfsOffset) if hdfsOffset >= kafkaBeginningOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset < kafkaBeginningOffset => kafkaEndOffset - kafkaBeginningOffset + case None => kafkaEndOffset - kafkaBeginningOffset + } + partition -> numberOfMessages + } + (kafkaOffsets.topic, messagesLeft) + }) + } + /** * @param jobParameters Parameters for the job instance. Should contain at least * - reader.kafka.topic From c4ab308ca0e940919965787092659bf91bab181b Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 17 Feb 2023 13:19:02 +0100 Subject: [PATCH 07/32] HyperdriveService --- .../trigger/api/rest/services/HyperdriveService.scala | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala index 5a2c70d88..6713f4e60 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala @@ -18,14 +18,13 @@ package za.co.absa.hyperdrive.trigger.api.rest.services import org.springframework.stereotype.Service import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, JobIngestionStatus} import za.co.absa.hyperdrive.trigger.persistance.WorkflowRepository -import za.co.absa.hyperdrive.trigger.configuration.application.GeneralConfig import scala.concurrent.{ExecutionContext, Future} trait HyperdriveService { val workflowRepository: WorkflowRepository val jobTemplateService: JobTemplateService - val hyperdriveOffsetComparisonService: HyperdriveOffsetComparisonService + val hyperdriveOffsetComparisonService: HyperdriveOffsetService def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[IngestionStatus] } @@ -34,8 +33,7 @@ trait HyperdriveService { class HyperdriveServiceImpl( override val workflowRepository: WorkflowRepository, override val jobTemplateService: JobTemplateService, - override val hyperdriveOffsetComparisonService: HyperdriveOffsetComparisonService, - generalConfig: GeneralConfig + override val hyperdriveOffsetComparisonService: HyperdriveOffsetService ) extends HyperdriveService { override def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[IngestionStatus] = { From d1fa3afb8c35cd6e26321fcb30b21e3bbdbd86bb Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 17 Feb 2023 13:20:45 +0100 Subject: [PATCH 08/32] Refactoring --- .../trigger/scheduler/executors/Executors.scala | 4 ++-- .../executors/spark/HyperdriveExecutor.scala | 10 +++++----- .../executors/spark/HyperdriveExecutorTest.scala | 14 +++++++------- 3 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/Executors.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/Executors.scala index 4f3db6af0..346f15779 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/Executors.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/Executors.scala @@ -34,7 +34,7 @@ import org.springframework.beans.factory.BeanFactory import org.springframework.context.annotation.Lazy import za.co.absa.hyperdrive.trigger.scheduler.executors.shell.ShellExecutor import org.springframework.stereotype.Component -import za.co.absa.hyperdrive.trigger.api.rest.services.HyperdriveOffsetComparisonService +import za.co.absa.hyperdrive.trigger.api.rest.services.HyperdriveOffsetService import za.co.absa.hyperdrive.trigger.configuration.application.{SchedulerConfig, SparkConfig} import za.co.absa.hyperdrive.trigger.scheduler.notifications.NotificationSender @@ -49,7 +49,7 @@ class Executors @Inject() ( beanFactory: BeanFactory, implicit val sparkConfig: SparkConfig, schedulerConfig: SchedulerConfig, - @Lazy hyperdriveOffsetComparisonService: HyperdriveOffsetComparisonService + @Lazy hyperdriveOffsetComparisonService: HyperdriveOffsetService ) { private val logger = LoggerFactory.getLogger(this.getClass) private implicit val executionContext: ExecutionContextExecutor = diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/HyperdriveExecutor.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/HyperdriveExecutor.scala index 0b936e9f9..e25c458fd 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/HyperdriveExecutor.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/HyperdriveExecutor.scala @@ -16,7 +16,7 @@ package za.co.absa.hyperdrive.trigger.scheduler.executors.spark import org.slf4j.LoggerFactory -import za.co.absa.hyperdrive.trigger.api.rest.services.HyperdriveOffsetComparisonService +import za.co.absa.hyperdrive.trigger.api.rest.services.HyperdriveOffsetService import za.co.absa.hyperdrive.trigger.configuration.application.SparkConfig import za.co.absa.hyperdrive.trigger.models.enums.JobStatuses import za.co.absa.hyperdrive.trigger.models.{JobInstance, SparkInstanceParameters} @@ -31,23 +31,23 @@ object HyperdriveExecutor { jobParameters: SparkInstanceParameters, updateJob: JobInstance => Future[Unit], sparkClusterService: SparkClusterService, - offsetComparisonService: HyperdriveOffsetComparisonService + offsetService: HyperdriveOffsetService )(implicit executionContext: ExecutionContext, sparkConfig: SparkConfig): Future[Unit] = jobInstance.executorJobId match { - case None => submitJob(sparkClusterService, offsetComparisonService, jobInstance, jobParameters, updateJob) + case None => submitJob(sparkClusterService, offsetService, jobInstance, jobParameters, updateJob) case Some(executorJobId) => SparkExecutor.updateJobStatus(executorJobId, jobInstance, updateJob, sparkClusterService) } private def submitJob(sparkClusterService: SparkClusterService, - offsetComparisonService: HyperdriveOffsetComparisonService, + offsetService: HyperdriveOffsetService, jobInstance: JobInstance, jobParameters: SparkInstanceParameters, updateJob: JobInstance => Future[Unit] )(implicit executionContext: ExecutionContext) = { logger.debug("Using HyperdriveExecutor") for { - newJobRequired <- offsetComparisonService.isNewJobInstanceRequired(jobParameters) + newJobRequired <- offsetService.isNewJobInstanceRequired(jobParameters) _ <- if (newJobRequired) sparkClusterService.submitJob(jobInstance, jobParameters, updateJob) else updateJob(jobInstance.copy(jobStatus = JobStatuses.NoData)) diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/HyperdriveExecutorTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/HyperdriveExecutorTest.scala index 46c629af5..d6285c58a 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/HyperdriveExecutorTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/scheduler/executors/spark/HyperdriveExecutorTest.scala @@ -20,7 +20,7 @@ import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.{never, reset, verify, when} import org.scalatest.mockito.MockitoSugar import org.scalatest.{AsyncFlatSpec, BeforeAndAfter, Matchers} -import za.co.absa.hyperdrive.trigger.api.rest.services.HyperdriveOffsetComparisonService +import za.co.absa.hyperdrive.trigger.api.rest.services.HyperdriveOffsetService import za.co.absa.hyperdrive.trigger.configuration.application.{DefaultTestSparkConfig, SparkConfig} import za.co.absa.hyperdrive.trigger.models.enums.JobStatuses.InQueue import za.co.absa.hyperdrive.trigger.models.enums.{JobStatuses, JobTypes} @@ -30,12 +30,12 @@ import java.time.LocalDateTime import scala.concurrent.Future class HyperdriveExecutorTest extends AsyncFlatSpec with MockitoSugar with BeforeAndAfter with Matchers { - private val offsetComparisonServiceMock = mock[HyperdriveOffsetComparisonService] + private val offsetServiceMock = mock[HyperdriveOffsetService] private val sparkClusterServiceMock = mock[SparkClusterService] private val updateJobStub: JobInstance => Future[Unit] = mock[JobInstance => Future[Unit]] before { - reset(offsetComparisonServiceMock) + reset(offsetServiceMock) reset(sparkClusterServiceMock) reset(updateJobStub) } @@ -44,7 +44,7 @@ class HyperdriveExecutorTest extends AsyncFlatSpec with MockitoSugar with Before val jobInstance = getJobInstance val jobInstanceParameters = jobInstance.jobParameters.asInstanceOf[SparkInstanceParameters] - when(offsetComparisonServiceMock.isNewJobInstanceRequired(any())(any())).thenReturn(Future { true }) + when(offsetServiceMock.isNewJobInstanceRequired(any())(any())).thenReturn(Future { true }) when(sparkClusterServiceMock.submitJob(any(), any(), any())).thenReturn(Future { (): Unit }) when(updateJobStub.apply(any[JobInstance])).thenReturn(Future { (): Unit }) @@ -53,7 +53,7 @@ class HyperdriveExecutorTest extends AsyncFlatSpec with MockitoSugar with Before jobInstanceParameters, updateJobStub, sparkClusterServiceMock, - offsetComparisonServiceMock + offsetServiceMock ) resultFut.map { _ => @@ -67,7 +67,7 @@ class HyperdriveExecutorTest extends AsyncFlatSpec with MockitoSugar with Before val jobInstance = getJobInstance val jobInstanceParameters = jobInstance.jobParameters.asInstanceOf[SparkInstanceParameters] - when(offsetComparisonServiceMock.isNewJobInstanceRequired(any())(any())).thenReturn(Future { false }) + when(offsetServiceMock.isNewJobInstanceRequired(any())(any())).thenReturn(Future { false }) when(sparkClusterServiceMock.submitJob(any(), any(), any())).thenReturn(Future { (): Unit }) when(updateJobStub.apply(any[JobInstance])).thenReturn(Future { (): Unit }) @@ -76,7 +76,7 @@ class HyperdriveExecutorTest extends AsyncFlatSpec with MockitoSugar with Before jobInstanceParameters, updateJobStub, sparkClusterServiceMock, - offsetComparisonServiceMock + offsetServiceMock ) resultFut.map { _ => From 85c03800a8143d7efee413fa4f27e0c2f9dd35e1 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Tue, 21 Feb 2023 13:40:34 +0100 Subject: [PATCH 09/32] Backend refactoring --- .../controllers/HyperdriveController.scala | 2 +- .../services/HyperdriveOffsetService.scala | 6 +-- .../api/rest/services/HyperdriveService.scala | 54 ++++++++++++------- .../trigger/models/IngestionStatus.scala | 31 ++++++----- 4 files changed, 57 insertions(+), 36 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala index b82404802..e21fff382 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala @@ -30,6 +30,6 @@ class HyperdriveController @Inject()(hyperdriveService: HyperdriveService) { private val logger = LoggerFactory.getLogger(this.getClass) @GetMapping(path = Array("/hyperdrive/workflows/{id}/ingestionStatus")) - def getIngestionStatus(@PathVariable id: Long): CompletableFuture[IngestionStatus] = + def getIngestionStatus(@PathVariable id: Long): CompletableFuture[Seq[IngestionStatus]] = hyperdriveService.getIngestionStatus(id).toJava.toCompletableFuture } diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala index 9d7d39a62..51f82fc64 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala @@ -85,9 +85,9 @@ class HyperdriveOffsetComparisonServiceImpl @Inject() (sparkConfig: SparkConfig, val messagesLeft = kafkaOffsets.beginningOffsets.map { case (partition, kafkaBeginningOffset) => val kafkaEndOffset = kafkaOffsets.endOffsets(partition) val numberOfMessages = hdfsOffsets.get(partition) match { - case Some(hdfsOffset) if hdfsOffset >= kafkaEndOffset => 0 - case Some(hdfsOffset) if hdfsOffset >= kafkaBeginningOffset => kafkaEndOffset - hdfsOffset - case Some(hdfsOffset) if hdfsOffset < kafkaBeginningOffset => kafkaEndOffset - kafkaBeginningOffset + case Some(hdfsOffset) if hdfsOffset > kafkaEndOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset > kafkaBeginningOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset <= kafkaBeginningOffset => kafkaEndOffset - kafkaBeginningOffset case None => kafkaEndOffset - kafkaBeginningOffset } partition -> numberOfMessages diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala index 6713f4e60..2f959b0f2 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala @@ -15,18 +15,21 @@ package za.co.absa.hyperdrive.trigger.api.rest.services +import org.slf4j.LoggerFactory import org.springframework.stereotype.Service -import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, JobIngestionStatus} +import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, Topic} +import za.co.absa.hyperdrive.trigger.models.enums.JobTypes import za.co.absa.hyperdrive.trigger.persistance.WorkflowRepository import scala.concurrent.{ExecutionContext, Future} +import scala.util.{Failure, Success} trait HyperdriveService { val workflowRepository: WorkflowRepository val jobTemplateService: JobTemplateService val hyperdriveOffsetComparisonService: HyperdriveOffsetService - def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[IngestionStatus] + def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[Seq[IngestionStatus]] } @Service @@ -35,33 +38,44 @@ class HyperdriveServiceImpl( override val jobTemplateService: JobTemplateService, override val hyperdriveOffsetComparisonService: HyperdriveOffsetService ) extends HyperdriveService { + private val logger = LoggerFactory.getLogger(this.getClass) - override def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[IngestionStatus] = { + override def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[Seq[IngestionStatus]] = { workflowRepository.getWorkflow(id).flatMap { workflow => jobTemplateService .resolveJobTemplate(workflow.dagDefinitionJoined) .flatMap(resolvedJobs => Future.sequence( - resolvedJobs.map(resolvedJob => - hyperdriveOffsetComparisonService - .getNumberOfMessagesLeft(resolvedJob.jobParameters) - .map(messagesLeft => - JobIngestionStatus( + resolvedJobs.map { + case resolvedJob if resolvedJob.jobParameters.jobType == JobTypes.Hyperdrive => + hyperdriveOffsetComparisonService.getNumberOfMessagesLeft(resolvedJob.jobParameters).transformWith { + case Failure(exception) => + logger.error(s"Failed to get number of messages left to ingest for a workflow: $id", exception) + Future( + IngestionStatus( + jobName = resolvedJob.name, + jobType = resolvedJob.jobParameters.jobType.name, + topic = None + ) + ) + case Success(messagesLeftOpt) => + Future( + IngestionStatus( + jobName = resolvedJob.name, + jobType = resolvedJob.jobParameters.jobType.name, + topic = messagesLeftOpt.map(messagesLeft => Topic(messagesLeft._1, messagesLeft._2)) + ) + ) + } + case resolvedJob => + Future( + IngestionStatus( jobName = resolvedJob.name, - jobType = resolvedJob.jobParameters.jobType, - topic = messagesLeft.map(_._1).getOrElse("Unknown"), - messagesToIngest = messagesLeft.map(_._2) + jobType = resolvedJob.jobParameters.jobType.name, + topic = None ) ) - ) - ) - ) - .map(jobIngestionStatus => - IngestionStatus( - name = workflow.name, - project = workflow.project, - jobIngestionStatus = jobIngestionStatus, - id = workflow.id + } ) ) } diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala index 0e2b7520a..3e9ebde26 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala @@ -1,17 +1,24 @@ -package za.co.absa.hyperdrive.trigger.models +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ -import za.co.absa.hyperdrive.trigger.models.enums.JobTypes.JobType +package za.co.absa.hyperdrive.trigger.models case class IngestionStatus( - name: String, - project: String, - jobIngestionStatus: Seq[JobIngestionStatus], - id: Long = 0 -) - -case class JobIngestionStatus( jobName: String, - jobType: JobType, - topic: String, - messagesToIngest: Option[Map[Int, Long]] + jobType: String, + topic: Option[Topic] ) + +case class Topic(topic: String, messagesToIngest: Map[Int, Long]) \ No newline at end of file From 5cba28905f4d6fc62bccd3839332d5388ecb414c Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Tue, 21 Feb 2023 13:48:27 +0100 Subject: [PATCH 10/32] Front end --- .../workflows-home.component.html | 43 +++++++++++ .../workflows-home.component.ts | 11 +++ ui/src/app/constants/api.constants.ts | 2 + ui/src/app/models/ingestionStatus.model.ts | 74 +++++++++++++++++++ .../services/hyperdrive/hyperdrive.service.ts | 39 ++++++++++ .../app/stores/workflows/workflows.actions.ts | 24 +++++- .../app/stores/workflows/workflows.effects.ts | 28 +++++++ .../stores/workflows/workflows.reducers.ts | 23 ++++++ 8 files changed, 243 insertions(+), 1 deletion(-) create mode 100644 ui/src/app/models/ingestionStatus.model.ts create mode 100644 ui/src/app/services/hyperdrive/hyperdrive.service.ts diff --git a/ui/src/app/components/workflows/workflows-home/workflows-home.component.html b/ui/src/app/components/workflows/workflows-home/workflows-home.component.html index 615d56cef..68e15b248 100644 --- a/ui/src/app/components/workflows/workflows-home/workflows-home.component.html +++ b/ui/src/app/components/workflows/workflows-home/workflows-home.component.html @@ -220,6 +220,49 @@ {{workflow.isActive ? 'Yes' : 'No'}} + + + + Workflow name: {{detail.name}}
+ Project name: {{detail.project}}
+ Is Active: {{detail.isActive ? 'Yes' : 'No'}}
+ +
+ + + + +
+ +
+
+ + Job Name + Type + Progress + + We couldn't load detail! + + + {{job.jobName}} + {{job.jobType}} + + Topic: {{job?.topic?.topic}}
+
Messages to ingest: {{job?.topic?.messagesToIngest >= 0 ? job?.topic?.messagesToIngest : 'Inconsistency detected. Please contact support team!'}}
+ Inconsistency detected. Please contact support team! +
+ + Not available + +
+
+ +
+
+
diff --git a/ui/src/app/components/workflows/workflows-home/workflows-home.component.ts b/ui/src/app/components/workflows/workflows-home/workflows-home.component.ts index 4d7df127d..fb3f6af01 100644 --- a/ui/src/app/components/workflows/workflows-home/workflows-home.component.ts +++ b/ui/src/app/components/workflows/workflows-home/workflows-home.component.ts @@ -22,6 +22,7 @@ import { absoluteRoutes } from '../../../constants/routes.constants'; import { ExportWorkflows, ImportWorkflows, + LoadIngestionStatus, LoadJobsForRun, RunWorkflows, SearchWorkflows, @@ -40,6 +41,7 @@ import { workflowsHomeColumns } from 'src/app/constants/workflow.constants'; import { TableSearchRequestModel } from '../../../models/search/tableSearchRequest.model'; import { ContainsFilterAttributes } from '../../../models/search/containsFilterAttributes.model'; import { BooleanFilterAttributes } from '../../../models/search/booleanFilterAttributes.model'; +import { IngestionStatusModel } from '../../../models/ingestionStatus.model'; @Component({ selector: 'app-workflows-home', @@ -77,6 +79,9 @@ export class WorkflowsHomeComponent implements OnInit, AfterViewInit, OnDestroy workflowFile: File = undefined; multiWorkflowsFile: File = undefined; + ingestionStatusLoading = true; + ingestionStatus: IngestionStatusModel[] = []; + constructor(private store: Store, private confirmationDialogService: ConfirmationDialogService, private router: Router) { this.routerSubscription = router.events.pipe(filter((e) => e instanceof ResolveEnd)).subscribe((e: ResolveEnd) => { this.ignoreRefresh = e.state.root.component !== WorkflowsHomeComponent; @@ -102,6 +107,8 @@ export class WorkflowsHomeComponent implements OnInit, AfterViewInit, OnDestroy } else { this.loadingAction = state.workflowAction.loading; } + this.ingestionStatusLoading = state.ingestionStatusLoading; + this.ingestionStatus = state.ingestionStatus; }); } @@ -212,6 +219,10 @@ export class WorkflowsHomeComponent implements OnInit, AfterViewInit, OnDestroy } } + onDetailRefresh(event) { + if (!!event?.id) this.store.dispatch(new LoadIngestionStatus(event.id)); + } + refresh() { this.selected = []; const searchRequestModel: TableSearchRequestModel = { diff --git a/ui/src/app/constants/api.constants.ts b/ui/src/app/constants/api.constants.ts index d4c51ef7a..4ac0bc64a 100644 --- a/ui/src/app/constants/api.constants.ts +++ b/ui/src/app/constants/api.constants.ts @@ -64,6 +64,8 @@ export const api = { GET_NOTIFICATION_RULES_FROM_HISTORY: '/notificationRulesFromHistory', GET_QUARTZ_DETAIL: '/util/quartzDetail', GET_NOTIFICATION_RULE_FROM_HISTORY: '/notificationRuleFromHistory', + + GET_INGESTION_STATUS: '/hyperdrive/workflows/{id}/ingestionStatus', }; export const SKIP_BASE_URL_INTERCEPTOR = 'Skip-base-url-interceptor'; diff --git a/ui/src/app/models/ingestionStatus.model.ts b/ui/src/app/models/ingestionStatus.model.ts new file mode 100644 index 000000000..008a1d1ec --- /dev/null +++ b/ui/src/app/models/ingestionStatus.model.ts @@ -0,0 +1,74 @@ +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { KeyValueModel } from './keyValue.model'; + +export type IngestionStatusModel = { + jobName: string; + jobType: string; + topic?: TopicModel; +}; + +export class IngestionStatusModelFactory { + static create(jobName: string, jobType: string, topic?: TopicModel): IngestionStatusModel { + return { jobName: jobName, jobType: jobType, topic: topic }; + } + + static fromIngestionStatusResponseModel(ingestionStatusResponse: IngestionStatusResponseModel): IngestionStatusModel { + return this.create( + ingestionStatusResponse.jobName, + ingestionStatusResponse.jobType, + ingestionStatusResponse?.topic ? TopicModelFactory.create(ingestionStatusResponse.topic.topic, ingestionStatusResponse.topic.messagesToIngest.map((keyValue) => keyValue.value).reduce((acc, cur) => acc + Number(cur), 0)) : null + ); + } +} + +export type TopicModel = { + topic: string; + messagesToIngest: number; +} + +export class TopicModelFactory { + static create(topic: string, messagesToIngest: number): TopicModel { + return { topic: topic, messagesToIngest: messagesToIngest }; + } +} + + + + + +export type IngestionStatusResponseModel = { + jobName: string; + jobType: string; + topic?: TopicResponseModel; +}; + +export class IngestionStatusResponseModelFactory { + static create(jobName: string, jobType: string, topic?: TopicResponseModel): IngestionStatusResponseModel { + return { jobName: jobName, jobType: jobType, topic: topic }; + } +} + +export type TopicResponseModel = { + topic: string; + messagesToIngest: KeyValueModel[]; +} + +export class TopicResponseModelFactory { + static create(topic: string, messagesToIngest: KeyValueModel[]): TopicResponseModel { + return { topic: topic, messagesToIngest: messagesToIngest }; + } +} diff --git a/ui/src/app/services/hyperdrive/hyperdrive.service.ts b/ui/src/app/services/hyperdrive/hyperdrive.service.ts new file mode 100644 index 000000000..3572f3075 --- /dev/null +++ b/ui/src/app/services/hyperdrive/hyperdrive.service.ts @@ -0,0 +1,39 @@ +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import {Injectable} from '@angular/core'; +import {HttpClient} from '@angular/common/http'; +import { map} from 'rxjs/operators'; +import {Observable} from 'rxjs'; +import { + IngestionStatusModel, + IngestionStatusModelFactory, + IngestionStatusResponseModel +} from '../../models/ingestionStatus.model'; +import {api} from "../../constants/api.constants"; + +@Injectable({ + providedIn: 'root', +}) +export class HyperdriveService { + constructor(private httpClient: HttpClient) {} + + getIngestionStatus(id: number): Observable { + return this.httpClient + .get(api.GET_INGESTION_STATUS.replace('{id}', id.toString()), { observe: 'response' }) + .pipe(map((response) => response.body)) + .pipe(map((response) => response.map((ingestionStatusResponse) => IngestionStatusModelFactory.fromIngestionStatusResponseModel(ingestionStatusResponse)))); + } +} diff --git a/ui/src/app/stores/workflows/workflows.actions.ts b/ui/src/app/stores/workflows/workflows.actions.ts index 252e83118..39c612b25 100644 --- a/ui/src/app/stores/workflows/workflows.actions.ts +++ b/ui/src/app/stores/workflows/workflows.actions.ts @@ -21,6 +21,7 @@ import { HistoryModel, WorkflowHistoryModel } from '../../models/historyModel'; import { JobForRunModel } from '../../models/jobForRun.model'; import { JobTemplateModel } from '../../models/jobTemplate.model'; import { TableSearchRequestModel } from '../../models/search/tableSearchRequest.model'; +import { IngestionStatusModel } from '../../models/ingestionStatus.model'; export const INITIALIZE_WORKFLOWS = 'INITIALIZE_WORKFLOWS'; export const INITIALIZE_WORKFLOWS_SUCCESS = 'INITIALIZE_WORKFLOWS_SUCCESS'; @@ -94,6 +95,10 @@ export const REVERT_WORKFLOW = 'REVERT_WORKFLOW'; export const REVERT_WORKFLOW_SUCCESS = 'REVERT_WORKFLOW_SUCCESS'; export const REVERT_WORKFLOW_FAILURE = 'REVERT_WORKFLOW_FAILURE'; +export const LOAD_INGESTION_STATUS = 'LOAD_INGESTION_STATUS'; +export const LOAD_INGESTION_STATUS_SUCCESS = 'LOAD_INGESTION_STATUS_SUCCESS'; +export const LOAD_INGESTION_STATUS_FAILURE = 'LOAD_INGESTION_STATUS_FAILURE'; + export class InitializeWorkflows implements Action { readonly type = INITIALIZE_WORKFLOWS; } @@ -351,6 +356,20 @@ export class RevertWorkflowFailure implements Action { readonly type = REVERT_WORKFLOW_FAILURE; } +export class LoadIngestionStatus implements Action { + readonly type = LOAD_INGESTION_STATUS; + constructor(public payload: number) {} +} + +export class LoadIngestionStatusSuccess implements Action { + readonly type = LOAD_INGESTION_STATUS_SUCCESS; + constructor(public payload: IngestionStatusModel[]) {} +} + +export class LoadIngestionStatusFailure implements Action { + readonly type = LOAD_INGESTION_STATUS_FAILURE; +} + export type WorkflowsActions = | InitializeWorkflows | InitializeWorkflowsSuccess @@ -404,4 +423,7 @@ export type WorkflowsActions = | ImportWorkflowsFailure | RevertWorkflow | RevertWorkflowSuccess - | RevertWorkflowFailure; + | RevertWorkflowFailure + | LoadIngestionStatus + | LoadIngestionStatusSuccess + | LoadIngestionStatusFailure; diff --git a/ui/src/app/stores/workflows/workflows.effects.ts b/ui/src/app/stores/workflows/workflows.effects.ts index 09b08ddcb..4874b7e54 100644 --- a/ui/src/app/stores/workflows/workflows.effects.ts +++ b/ui/src/app/stores/workflows/workflows.effects.ts @@ -42,6 +42,8 @@ import groupBy from 'lodash-es/groupBy'; import { ApiUtil } from '../../utils/api/api.util'; import { JobTemplateModel } from '../../models/jobTemplate.model'; import { TableSearchResponseModel } from '../../models/search/tableSearchResponse.model'; +import { HyperdriveService } from '../../services/hyperdrive/hyperdrive.service'; +import { IngestionStatusModel } from '../../models/ingestionStatus.model'; @Injectable() export class WorkflowsEffects { @@ -50,6 +52,7 @@ export class WorkflowsEffects { private workflowService: WorkflowService, private workflowHistoryService: WorkflowHistoryService, private jobService: JobService, + private hyperdriveService: HyperdriveService, private store: Store, private router: Router, private toastrService: ToastrService, @@ -697,6 +700,31 @@ export class WorkflowsEffects { ); }); + statusIngestionLoad = createEffect(() => { + return this.actions.pipe( + ofType(WorkflowActions.LOAD_INGESTION_STATUS), + switchMap((action: WorkflowActions.LoadIngestionStatus) => { + return this.hyperdriveService.getIngestionStatus(action.payload).pipe( + mergeMap((ingestionStatus: IngestionStatusModel[]) => { + return [ + { + type: WorkflowActions.LOAD_INGESTION_STATUS_SUCCESS, + payload: ingestionStatus, + }, + ]; + }), + catchError(() => { + return [ + { + type: WorkflowActions.LOAD_INGESTION_STATUS_FAILURE, + }, + ]; + }), + ); + }), + ); + }); + sortJobsInWorkflow(workflow: WorkflowJoinedModel): WorkflowJoinedModel { const sortedJobs = workflow.dagDefinitionJoined.jobDefinitions.sort((jobLeft, jobRight) => jobLeft.order - jobRight.order); return { ...workflow, dagDefinitionJoined: { ...workflow.dagDefinitionJoined, jobDefinitions: sortedJobs } }; diff --git a/ui/src/app/stores/workflows/workflows.reducers.ts b/ui/src/app/stores/workflows/workflows.reducers.ts index 1178e4185..801afd886 100644 --- a/ui/src/app/stores/workflows/workflows.reducers.ts +++ b/ui/src/app/stores/workflows/workflows.reducers.ts @@ -22,6 +22,7 @@ import { workflowModes } from '../../models/enums/workflowModes.constants'; import { JobTemplateModel } from '../../models/jobTemplate.model'; import { WorkflowModel } from '../../models/workflow.model'; import { TableSearchRequestModel } from '../../models/search/tableSearchRequest.model'; +import { IngestionStatusModel } from '../../models/ingestionStatus.model'; export interface State { workflowsSearch: { @@ -59,6 +60,8 @@ export interface State { jobs: JobForRunModel[]; isSuccessfullyLoaded: boolean; }; + ingestionStatus: IngestionStatusModel[]; + ingestionStatusLoading: boolean; } const initialState: State = { @@ -97,6 +100,8 @@ const initialState: State = { jobs: undefined, isSuccessfullyLoaded: false, }, + ingestionStatusLoading: true, + ingestionStatus: [], }; export function workflowsReducer(state: State = initialState, action: WorkflowsActions.WorkflowsActions) { @@ -594,6 +599,24 @@ export function workflowsReducer(state: State = initialState, action: WorkflowsA loading: false, }, }; + case WorkflowsActions.LOAD_INGESTION_STATUS: + return { + ...state, + ingestionStatusLoading: true, + ingestionStatus: [], + }; + case WorkflowsActions.LOAD_INGESTION_STATUS_SUCCESS: + return { + ...state, + ingestionStatusLoading: false, + ingestionStatus: action.payload, + }; + case WorkflowsActions.LOAD_INGESTION_STATUS_FAILURE: + return { + ...state, + ingestionStatusLoading: false, + ingestionStatus: [], + }; default: return state; } From 4e9b2468c12253f08224a024d50b502dae5258ae Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Tue, 21 Feb 2023 13:49:21 +0100 Subject: [PATCH 11/32] Lint --- ui/src/app/models/ingestionStatus.model.ts | 15 +++++++------ .../services/hyperdrive/hyperdrive.service.ts | 22 +++++++++---------- 2 files changed, 19 insertions(+), 18 deletions(-) diff --git a/ui/src/app/models/ingestionStatus.model.ts b/ui/src/app/models/ingestionStatus.model.ts index 008a1d1ec..707e00be6 100644 --- a/ui/src/app/models/ingestionStatus.model.ts +++ b/ui/src/app/models/ingestionStatus.model.ts @@ -30,7 +30,12 @@ export class IngestionStatusModelFactory { return this.create( ingestionStatusResponse.jobName, ingestionStatusResponse.jobType, - ingestionStatusResponse?.topic ? TopicModelFactory.create(ingestionStatusResponse.topic.topic, ingestionStatusResponse.topic.messagesToIngest.map((keyValue) => keyValue.value).reduce((acc, cur) => acc + Number(cur), 0)) : null + ingestionStatusResponse?.topic + ? TopicModelFactory.create( + ingestionStatusResponse.topic.topic, + ingestionStatusResponse.topic.messagesToIngest.map((keyValue) => keyValue.value).reduce((acc, cur) => acc + Number(cur), 0), + ) + : null, ); } } @@ -38,7 +43,7 @@ export class IngestionStatusModelFactory { export type TopicModel = { topic: string; messagesToIngest: number; -} +}; export class TopicModelFactory { static create(topic: string, messagesToIngest: number): TopicModel { @@ -46,10 +51,6 @@ export class TopicModelFactory { } } - - - - export type IngestionStatusResponseModel = { jobName: string; jobType: string; @@ -65,7 +66,7 @@ export class IngestionStatusResponseModelFactory { export type TopicResponseModel = { topic: string; messagesToIngest: KeyValueModel[]; -} +}; export class TopicResponseModelFactory { static create(topic: string, messagesToIngest: KeyValueModel[]): TopicResponseModel { diff --git a/ui/src/app/services/hyperdrive/hyperdrive.service.ts b/ui/src/app/services/hyperdrive/hyperdrive.service.ts index 3572f3075..c85d13ea6 100644 --- a/ui/src/app/services/hyperdrive/hyperdrive.service.ts +++ b/ui/src/app/services/hyperdrive/hyperdrive.service.ts @@ -13,16 +13,12 @@ * limitations under the License. */ -import {Injectable} from '@angular/core'; -import {HttpClient} from '@angular/common/http'; -import { map} from 'rxjs/operators'; -import {Observable} from 'rxjs'; -import { - IngestionStatusModel, - IngestionStatusModelFactory, - IngestionStatusResponseModel -} from '../../models/ingestionStatus.model'; -import {api} from "../../constants/api.constants"; +import { Injectable } from '@angular/core'; +import { HttpClient } from '@angular/common/http'; +import { map } from 'rxjs/operators'; +import { Observable } from 'rxjs'; +import { IngestionStatusModel, IngestionStatusModelFactory, IngestionStatusResponseModel } from '../../models/ingestionStatus.model'; +import { api } from '../../constants/api.constants'; @Injectable({ providedIn: 'root', @@ -34,6 +30,10 @@ export class HyperdriveService { return this.httpClient .get(api.GET_INGESTION_STATUS.replace('{id}', id.toString()), { observe: 'response' }) .pipe(map((response) => response.body)) - .pipe(map((response) => response.map((ingestionStatusResponse) => IngestionStatusModelFactory.fromIngestionStatusResponseModel(ingestionStatusResponse)))); + .pipe( + map((response) => + response.map((ingestionStatusResponse) => IngestionStatusModelFactory.fromIngestionStatusResponseModel(ingestionStatusResponse)), + ), + ); } } From 6c6f3b683cb1ea206cf1cc312949c922efcc9e3f Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Tue, 21 Feb 2023 14:58:53 +0100 Subject: [PATCH 12/32] Lint --- .../workflows-home/workflows-home.component.html | 2 +- ui/src/app/models/ingestionStatus.model.ts | 10 +++++----- 2 files changed, 6 insertions(+), 6 deletions(-) diff --git a/ui/src/app/components/workflows/workflows-home/workflows-home.component.html b/ui/src/app/components/workflows/workflows-home/workflows-home.component.html index 68e15b248..024f6e737 100644 --- a/ui/src/app/components/workflows/workflows-home/workflows-home.component.html +++ b/ui/src/app/components/workflows/workflows-home/workflows-home.component.html @@ -252,7 +252,7 @@ Topic: {{job?.topic?.topic}}
Messages to ingest: {{job?.topic?.messagesToIngest >= 0 ? job?.topic?.messagesToIngest : 'Inconsistency detected. Please contact support team!'}}
- Inconsistency detected. Please contact support team! + Offset inconsistency detected. Please contact support team!
Not available diff --git a/ui/src/app/models/ingestionStatus.model.ts b/ui/src/app/models/ingestionStatus.model.ts index 707e00be6..2b4fca88a 100644 --- a/ui/src/app/models/ingestionStatus.model.ts +++ b/ui/src/app/models/ingestionStatus.model.ts @@ -13,8 +13,6 @@ * limitations under the License. */ -import { KeyValueModel } from './keyValue.model'; - export type IngestionStatusModel = { jobName: string; jobType: string; @@ -33,7 +31,9 @@ export class IngestionStatusModelFactory { ingestionStatusResponse?.topic ? TopicModelFactory.create( ingestionStatusResponse.topic.topic, - ingestionStatusResponse.topic.messagesToIngest.map((keyValue) => keyValue.value).reduce((acc, cur) => acc + Number(cur), 0), + Object.keys(ingestionStatusResponse.topic.messagesToIngest) + .map((key) => ingestionStatusResponse.topic.messagesToIngest[key]) + .reduce((acc, cur) => acc + Number(cur), 0), ) : null, ); @@ -65,11 +65,11 @@ export class IngestionStatusResponseModelFactory { export type TopicResponseModel = { topic: string; - messagesToIngest: KeyValueModel[]; + messagesToIngest: [number, number][]; }; export class TopicResponseModelFactory { - static create(topic: string, messagesToIngest: KeyValueModel[]): TopicResponseModel { + static create(topic: string, messagesToIngest: [number, number][]): TopicResponseModel { return { topic: topic, messagesToIngest: messagesToIngest }; } } From c7fb2507e9d7ab31c12ddf812b74037b4e60540b Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Wed, 22 Feb 2023 10:00:44 +0100 Subject: [PATCH 13/32] Removed unused code --- .../trigger/api/rest/controllers/HyperdriveController.scala | 3 --- 1 file changed, 3 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala index e21fff382..db17c583d 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala @@ -15,7 +15,6 @@ package za.co.absa.hyperdrive.trigger.api.rest.controllers -import org.slf4j.LoggerFactory import org.springframework.web.bind.annotation._ import za.co.absa.hyperdrive.trigger.api.rest.services.HyperdriveService import za.co.absa.hyperdrive.trigger.models._ @@ -27,8 +26,6 @@ import scala.concurrent.ExecutionContext.Implicits.global @RestController class HyperdriveController @Inject()(hyperdriveService: HyperdriveService) { - private val logger = LoggerFactory.getLogger(this.getClass) - @GetMapping(path = Array("/hyperdrive/workflows/{id}/ingestionStatus")) def getIngestionStatus(@PathVariable id: Long): CompletableFuture[Seq[IngestionStatus]] = hyperdriveService.getIngestionStatus(id).toJava.toCompletableFuture From 5afa29fb1e3e08b9077ae14d459bc5ba34dd62a4 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Wed, 22 Feb 2023 13:51:17 +0100 Subject: [PATCH 14/32] CheckpointService + tests --- .../api/rest/services/CheckpointService.scala | 20 +++-- .../rest/services/CheckpointServiceTest.scala | 84 ++++++++++++++++++- 2 files changed, 94 insertions(+), 10 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala index 2614fd695..e130d559a 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala @@ -35,7 +35,7 @@ trait CheckpointService { implicit ugi: UserGroupInformation ): Try[Option[(String, Boolean)]] - def getLatestCommittedOffset(params: HdfsParameters)(implicit ugi: UserGroupInformation): Option[Map[Int, Long]] + def getLatestCommittedOffset(params: HdfsParameters)(implicit ugi: UserGroupInformation): Try[Option[Map[Int, Long]]] } class HdfsParameters( @@ -100,14 +100,16 @@ class CheckpointServiceImpl @Inject() (@Lazy hdfsService: HdfsService) extends C } } - override def getLatestCommittedOffset(params: HdfsParameters)(implicit ugi: UserGroupInformation): Option[Map[Int, Long]] = { - Try(for { - latestCommit <- getLatestCommitBatchId(params.checkpointLocation).toOption.flatten - pathToLatestCommit = new Path(s"${params.checkpointLocation}/$offsetsDirName/$latestCommit") - offsets <- getOffsetsFromFile(pathToLatestCommit.toString).toOption.flatten - } yield { - offsets.values.head - }).toOption.flatten + override def getLatestCommittedOffset( + params: HdfsParameters + )(implicit ugi: UserGroupInformation): Try[Option[Map[Int, Long]]] = { + getLatestCommitBatchId(params.checkpointLocation).map { + case Some(latestCommit) => + val pathToLatestCommit = new Path(s"${params.checkpointLocation}/$offsetsDirName/$latestCommit") + getOffsetsFromFile(pathToLatestCommit.toString) + .map(_.map(topicPartitionOffsets => topicPartitionOffsets.head._2)) + case None => Try(Option.empty[Map[Int, Long]]) + }.flatten } /** diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointServiceTest.scala index e81567158..130407abe 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointServiceTest.scala @@ -25,7 +25,7 @@ import org.mockito.invocation.InvocationOnMock import org.scalatest.mockito.MockitoSugar import org.scalatest.{BeforeAndAfter, FlatSpec, Matchers} -import scala.util.{Failure, Try} +import scala.util.{Failure, Success, Try} class CheckpointServiceTest extends FlatSpec with Matchers with BeforeAndAfter with MockitoSugar { private val hdfsService = mock[HdfsService] @@ -163,6 +163,88 @@ class CheckpointServiceTest extends FlatSpec with Matchers with BeforeAndAfter w result.isFailure shouldBe true } + "getLatestCommittedOffset" should "fail if file check fails" in { + when(hdfsService.exists(any())(any())).thenReturn(Failure(new Exception())) + val params = getHdfsParameters + + val result = underTest.getLatestCommittedOffset(params)(ugi) + + result.isFailure shouldBe true + } + + it should "fail if list files fails" in { + when(hdfsService.exists(any())(any())).thenReturn(Success(true)) + when(hdfsService.listStatus(any(), any())(any())).thenReturn(Failure(new Exception())) + + val params = getHdfsParameters + + val result = underTest.getLatestCommittedOffset(params)(ugi) + + result.isFailure shouldBe true + } + + it should "return none if dir does not exist" in { + when(hdfsService.exists(any())(any())).thenReturn(Success(false)) + + val params = getHdfsParameters + + val result = underTest.getLatestCommittedOffset(params)(ugi) + + result.get.isDefined shouldBe false + } + + it should "return none if dir is empty" in { + when(hdfsService.exists(any())(any())).thenReturn(Success(true)) + when(hdfsService.listStatus(any(), any())(any())).thenReturn(Success(Array.empty[FileStatus])) + + val params = getHdfsParameters + + val result = underTest.getLatestCommittedOffset(params)(ugi) + + result.get.isDefined shouldBe false + } + + it should "fail if file parse fails" in { + when(hdfsService.exists(any())(any())).thenReturn(Success(true)) + when(hdfsService.listStatus(any(), any())(any())).thenReturn(Success(createOffsetFiles(12))) + when(hdfsService.parseFileAndClose(any(), any())(any())).thenReturn(Failure(new Exception())) + + val params = getHdfsParameters + + val result = underTest.getLatestCommittedOffset(params)(ugi) + + result.isFailure shouldBe true + } + + it should "return none if file cannot be found" in { + when(hdfsService.exists(any())(any())).thenReturn(Success(true)) + when(hdfsService.listStatus(any(), any())(any())).thenReturn(Success(createOffsetFiles(12))) + when(hdfsService.parseFileAndClose(any(), any())(any())).thenReturn(Success(None)) + + val params = getHdfsParameters + + val result = underTest.getLatestCommittedOffset(params)(ugi) + + result.get.isEmpty shouldBe true + } + + it should "return the parsed contents" in { + val offsets = Map( + "topic" -> Map(0 -> 1000L) + ) + when(hdfsService.exists(any())(any())).thenReturn(Success(true)) + when(hdfsService.listStatus(any(), any())(any())).thenReturn(Success(createOffsetFiles(12))) + when(hdfsService.parseFileAndClose[underTest.TopicPartitionOffsets](any(), any())(any())) + .thenReturn(Try(Some(offsets))) + + val params = getHdfsParameters + + val result = underTest.getLatestCommittedOffset(params)(ugi) + + result.get.isDefined shouldBe true + result.get shouldBe Some(offsets.values.head) + } + private def createOffsetFiles(maxBatchId: Int) = { (0 to maxBatchId).map { i => val fst = new FileStatus() From 71eaa22f2fde1aa68a35f888faa70be2b32a59b1 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Wed, 22 Feb 2023 13:51:36 +0100 Subject: [PATCH 15/32] Empty line --- .../za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala index 3e9ebde26..a1d691f9b 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala @@ -21,4 +21,4 @@ case class IngestionStatus( topic: Option[Topic] ) -case class Topic(topic: String, messagesToIngest: Map[Int, Long]) \ No newline at end of file +case class Topic(topic: String, messagesToIngest: Map[Int, Long]) From c2d99a7a0bc1af025a221570de3092090587af1a Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Wed, 22 Feb 2023 14:34:22 +0100 Subject: [PATCH 16/32] KafkaService + tests --- .../api/rest/services/KafkaService.scala | 17 ++++----- .../api/rest/services/KafkaServiceTest.scala | 37 +++++++++++++++++++ 2 files changed, 44 insertions(+), 10 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala index 7cd1a6596..d7336c9a4 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala @@ -33,8 +33,7 @@ import scala.util.Try trait KafkaService { def getBeginningOffsets(topic: String, consumerProperties: Properties): Map[Int, Long] def getEndOffsets(topic: String, consumerProperties: Properties): Map[Int, Long] - - def getOffsets(topic: String, consumerProperties: Properties): Option[BeginningEndOffsets] + def getOffsets(topic: String, consumerProperties: Properties): BeginningEndOffsets } @Service @@ -54,14 +53,12 @@ class KafkaServiceImpl @Inject() (generalConfig: GeneralConfig) extends KafkaSer getOffsets(topic, consumerProperties, EndOffsets) } - def getOffsets(topic: String, consumerProperties: Properties): Option[BeginningEndOffsets] = { - Try( - BeginningEndOffsets( - topic, - getOffsets(topic, consumerProperties, BeginningOffsets), - getOffsets(topic, consumerProperties, EndOffsets) - ) - ).toOption + def getOffsets(topic: String, consumerProperties: Properties): BeginningEndOffsets = { + BeginningEndOffsets( + topic, + getOffsets(topic, consumerProperties, BeginningOffsets), + getOffsets(topic, consumerProperties, EndOffsets) + ) } def createKafkaConsumer(propertiesThreadId: (Properties, Long)): KafkaConsumer[String, String] = { diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaServiceTest.scala index bbe943ca2..7bcd92c8c 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaServiceTest.scala @@ -22,6 +22,7 @@ import org.mockito.Mockito.when import org.scalatest.mockito.MockitoSugar import org.scalatest.{FlatSpec, Matchers} import za.co.absa.hyperdrive.trigger.configuration.application.{GeneralConfig, TestGeneralConfig} +import za.co.absa.hyperdrive.trigger.models.BeginningEndOffsets import java.util.Properties @@ -73,4 +74,40 @@ class KafkaServiceTest extends FlatSpec with MockitoSugar with Matchers { result shouldBe Map() } + + "getOffsets" should "return a map of start and end offsets" in { + import scala.collection.JavaConverters._ + val topicName = "topic" + val partitions = Seq( + new PartitionInfo(topicName, 0, null, null, null), + new PartitionInfo(topicName, 1, null, null, null) + ) + val endOffsets = Map( + new TopicPartition(topicName, 0) -> long2Long(200L), + new TopicPartition(topicName, 1) -> long2Long(400L) + ).asJava + val startOffsets = Map( + new TopicPartition(topicName, 0) -> long2Long(100L), + new TopicPartition(topicName, 1) -> long2Long(200L) + ).asJava + val topicPartitions = partitions.map(p => new TopicPartition(p.topic(), p.partition())).asJava + + when(mockKafkaConsumer.partitionsFor(any())) + .thenReturn(partitions.asJava) + .thenReturn(partitions.asJava) + when(mockKafkaConsumer.beginningOffsets(eqTo(topicPartitions))).thenReturn(startOffsets) + when(mockKafkaConsumer.endOffsets(eqTo(topicPartitions))).thenReturn(endOffsets) + + val result = underTest.getOffsets(topicName, new Properties()) + + result shouldBe BeginningEndOffsets(topicName, Map(0 -> 100L, 1 -> 200L), Map(0 -> 200L, 1 -> 400L)) + } + + it should "return empty beginning and end offsets if partitionsFor returns null" in { + val topicName = "non-existent-topic" + when(mockKafkaConsumer.partitionsFor(any())).thenReturn(null) + + val result = underTest.getOffsets(topicName, new Properties()) + result shouldBe BeginningEndOffsets(topicName, Map.empty, Map.empty) + } } From 01093e0f0505292063301aacdb0203ee3f4d2558 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Wed, 22 Feb 2023 15:27:33 +0100 Subject: [PATCH 17/32] HyperdriveOffsetService + tests --- .../services/HyperdriveOffsetService.scala | 57 ++++--- ...cala => HyperdriveOffsetServiceTest.scala} | 141 +++++++++++++++--- 2 files changed, 159 insertions(+), 39 deletions(-) rename src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/{HyperdriveOffsetComparisonServiceTest.scala => HyperdriveOffsetServiceTest.scala} (65%) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala index 51f82fc64..7e9f4881e 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala @@ -31,6 +31,7 @@ import za.co.absa.hyperdrive.trigger.models.{JobInstanceParameters, SparkInstanc import java.util.Properties import javax.inject.Inject import scala.concurrent.{ExecutionContext, Future} +import scala.util.{Failure, Success} trait HyperdriveOffsetService { def isNewJobInstanceRequired(jobParameters: JobInstanceParameters)(implicit ec: ExecutionContext): Future[Boolean] @@ -42,10 +43,10 @@ trait HyperdriveOffsetService { @Service @Lazy -class HyperdriveOffsetComparisonServiceImpl @Inject() (sparkConfig: SparkConfig, - @Lazy checkpointService: CheckpointService, - @Lazy userGroupInformationService: UserGroupInformationService, - kafkaService: KafkaService +class HyperdriveOffsetServiceImpl @Inject() (sparkConfig: SparkConfig, + @Lazy checkpointService: CheckpointService, + @Lazy userGroupInformationService: UserGroupInformationService, + kafkaService: KafkaService ) extends HyperdriveOffsetService { private val logger = LoggerFactory.getLogger(this.getClass) private val HyperdriveCheckpointKey = "writer.common.checkpoint.location" @@ -74,26 +75,38 @@ class HyperdriveOffsetComparisonServiceImpl @Inject() (sparkConfig: SparkConfig, logger.debug(s"Kafka parameters were not found in job definition $jobParameters") } - Future(for { - kafkaParameters <- kafkaParametersOpt - hdfsParameters <- hdfsParametersOpt - ugi = userGroupInformationService.loginUserFromKeytab(hdfsParameters.principal, hdfsParameters.keytab) - kafkaOffsets <- kafkaService.getOffsets(kafkaParameters._1, kafkaParameters._2) - if kafkaOffsets.beginningOffsets.keySet == kafkaOffsets.endOffsets.keySet - hdfsOffsets <- checkpointService.getLatestCommittedOffset(hdfsParameters)(ugi) - } yield { - val messagesLeft = kafkaOffsets.beginningOffsets.map { case (partition, kafkaBeginningOffset) => - val kafkaEndOffset = kafkaOffsets.endOffsets(partition) - val numberOfMessages = hdfsOffsets.get(partition) match { - case Some(hdfsOffset) if hdfsOffset > kafkaEndOffset => kafkaEndOffset - hdfsOffset - case Some(hdfsOffset) if hdfsOffset > kafkaBeginningOffset => kafkaEndOffset - hdfsOffset - case Some(hdfsOffset) if hdfsOffset <= kafkaBeginningOffset => kafkaEndOffset - kafkaBeginningOffset - case None => kafkaEndOffset - kafkaBeginningOffset + Future( + for { + kafkaParameters <- kafkaParametersOpt + hdfsParameters <- hdfsParametersOpt + } yield { + val kafkaOffsets = kafkaService.getOffsets(kafkaParameters._1, kafkaParameters._2) + if ( + kafkaOffsets.beginningOffsets.isEmpty || kafkaOffsets.endOffsets.isEmpty || kafkaOffsets.beginningOffsets.keySet != kafkaOffsets.endOffsets.keySet + ) { + None + } else { + val ugi = userGroupInformationService.loginUserFromKeytab(hdfsParameters.principal, hdfsParameters.keytab) + val hdfsOffsetsTry = checkpointService.getLatestCommittedOffset(hdfsParameters)(ugi) + + hdfsOffsetsTry match { + case Failure(_) => None + case Success(hdfsOffsetsOption) => + val messagesLeft = kafkaOffsets.beginningOffsets.map { case (partition, kafkaBeginningOffset) => + val kafkaEndOffset = kafkaOffsets.endOffsets(partition) + val numberOfMessages = hdfsOffsetsOption.flatMap(_.get(partition)) match { + case Some(hdfsOffset) if hdfsOffset > kafkaEndOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset > kafkaBeginningOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset <= kafkaBeginningOffset => kafkaEndOffset - kafkaBeginningOffset + case None => kafkaEndOffset - kafkaBeginningOffset + } + partition -> numberOfMessages + } + Some((kafkaOffsets.topic, messagesLeft)) + } } - partition -> numberOfMessages } - (kafkaOffsets.topic, messagesLeft) - }) + ).map(_.flatten) } /** diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetComparisonServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala similarity index 65% rename from src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetComparisonServiceTest.scala rename to src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala index a3144e51d..434964fb6 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetComparisonServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala @@ -23,21 +23,17 @@ import org.scalatest.mockito.MockitoSugar import org.scalatest.{AsyncFlatSpec, BeforeAndAfter, Matchers} import za.co.absa.hyperdrive.trigger.configuration.application.DefaultTestSparkConfig import za.co.absa.hyperdrive.trigger.models.enums.JobTypes -import za.co.absa.hyperdrive.trigger.models.{ShellInstanceParameters, SparkInstanceParameters} +import za.co.absa.hyperdrive.trigger.models.{BeginningEndOffsets, ShellInstanceParameters, SparkInstanceParameters} -import scala.util.Try +import scala.util.{Failure, Success, Try} -class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers with BeforeAndAfter with MockitoSugar { +class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with BeforeAndAfter with MockitoSugar { private val checkpointService = mock[CheckpointService] private val kafkaService = mock[KafkaService] private val ugiService = mock[UserGroupInformationService] private val ugi = mock[UserGroupInformation] private val underTest = - new HyperdriveOffsetComparisonServiceImpl(DefaultTestSparkConfig().yarn, - checkpointService, - ugiService, - kafkaService - ) + new HyperdriveOffsetServiceImpl(DefaultTestSparkConfig().yarn, checkpointService, ugiService, kafkaService) before { reset(checkpointService) @@ -46,7 +42,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers "isNewJobInstanceRequired" should "return false if the kafka and checkpoint folder offsets are the same" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) @@ -128,7 +124,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers it should "return true if the kafka topic does not exist" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(kafkaService.getBeginningOffsets(any(), any())).thenReturn(Map[Int, Long]()) @@ -144,7 +140,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers it should "return false if the kafka topic is empty" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(kafkaService.getBeginningOffsets(any(), any())).thenReturn(Map(0 -> 21L, 1 -> 42L)) @@ -160,7 +156,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers it should "return true if no offset file is present" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(kafkaService.getBeginningOffsets(any(), any())).thenReturn(Map(0 -> 0L)) @@ -178,7 +174,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers it should "return true if the offset is not committed" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(kafkaService.getBeginningOffsets(any(), any())).thenReturn(Map(0 -> 0L)) @@ -195,7 +191,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers it should "return true if a offset file could not be parsed" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(kafkaService.getBeginningOffsets(any(), any())).thenReturn(Map(0 -> 0L)) @@ -215,7 +211,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers it should "return true if the checkpoints offset does not contain the topic" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(kafkaService.getBeginningOffsets(any(), any())).thenReturn(Map(0 -> 0L)) @@ -235,7 +231,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers it should "return true if the kafka offsets and checkpoint offset do not have the same set of partitions" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(checkpointService.getLatestOffsetFilePath(any())(any())).thenReturn(Try(Some(("1", true)))) @@ -256,7 +252,7 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers it should "return true if the kafka offsets and checkpoint offsets are not the same" in { val config = getSparkConfig - val underTest = new HyperdriveOffsetComparisonServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) val jobParameters = getJobParameters when(checkpointService.getLatestOffsetFilePath(any())(any())).thenReturn(Try(Some(("1", true)))) @@ -275,6 +271,117 @@ class HyperdriveOffsetComparisonServiceTest extends AsyncFlatSpec with Matchers } } + "getNumberOfMessagesLeft" should "return none if get offsets from kafka fails" in { + val config = getSparkConfig + val jobParameters = getJobParameters + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + + when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) + when(kafkaService.getOffsets(any(), any())).thenReturn(BeginningEndOffsets("topic", Map.empty, Map.empty)) + + val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) + resultFut.map { result => + result shouldBe None + } + } + + it should "return none if get offsets from checkpoint fails" in { + val config = getSparkConfig + val jobParameters = getJobParameters + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + + when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) + when(kafkaService.getOffsets(any(), any())) + .thenReturn(BeginningEndOffsets("topic", Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) + when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Failure(new Exception())) + + val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) + resultFut.map { result => + result shouldBe None + } + } + + it should "return number of all messages in kafka if there is no offset in checkpoint" in { + val config = getSparkConfig + val jobParameters = getJobParameters + val topic = "topic" + val expectedResult = (topic, Map(0 -> 10, 1 -> 90)) + + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + + when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) + when(kafkaService.getOffsets(any(), any())) + .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) + when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Success(None)) + + val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) + resultFut.map { result => + result.isDefined shouldBe true + result.get shouldBe expectedResult + } + } + + it should "return number of messages left to ingest" in { + val config = getSparkConfig + val jobParameters = getJobParameters + val topic = "topic" + val expectedResult = (topic, Map(0 -> 8, 1 -> 80)) + + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + + when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) + when(kafkaService.getOffsets(any(), any())) + .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) + when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 2L, 1 -> 20L)))) + + val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) + resultFut.map { result => + result.isDefined shouldBe true + result.get shouldBe expectedResult + } + } + + it should "return number of messages left to ingest and ignore extra partition in checkpoint offset" in { + val config = getSparkConfig + val jobParameters = getJobParameters + val topic = "topic" + val expectedResult = (topic, Map(0 -> 8, 1 -> 80)) + + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + + when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) + when(kafkaService.getOffsets(any(), any())) + .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) + when(checkpointService.getLatestCommittedOffset(any())(any())) + .thenReturn(Try(Some(Map(0 -> 2L, 1 -> 20L, 3 -> 10L)))) + + val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) + resultFut.map { result => + result.isDefined shouldBe true + result.get shouldBe expectedResult + } + } + + it should "return number of messages left to ingest and handle missing partition in checkpoint offset" in { + val config = getSparkConfig + val jobParameters = getJobParameters + val topic = "topic" + val expectedResult = (topic, Map(0 -> 8, 1 -> 90)) + + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + + when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) + when(kafkaService.getOffsets(any(), any())) + .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) + when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 2L)))) + + val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) + resultFut.map { result => + result.isDefined shouldBe true + result.get shouldBe expectedResult + } + } + private def getSparkConfig = DefaultTestSparkConfig().copy(additionalConfs = Map( From 449996ae294c3d2066b126f1f9f62744efe4e2ca Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Wed, 22 Feb 2023 15:36:09 +0100 Subject: [PATCH 18/32] HyperdriveOffsetService + tests --- .../workflows-home.component.spec.ts | 1030 ++++++++--------- 1 file changed, 515 insertions(+), 515 deletions(-) diff --git a/ui/src/app/components/workflows/workflows-home/workflows-home.component.spec.ts b/ui/src/app/components/workflows/workflows-home/workflows-home.component.spec.ts index bf6efbd3e..71e3a9f48 100644 --- a/ui/src/app/components/workflows/workflows-home/workflows-home.component.spec.ts +++ b/ui/src/app/components/workflows/workflows-home/workflows-home.component.spec.ts @@ -1,515 +1,515 @@ -/* - * Copyright 2018 ABSA Group Limited - * - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - */ - -import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; - -import { WorkflowsHomeComponent } from './workflows-home.component'; -import { provideMockStore } from '@ngrx/store/testing'; -import { WorkflowModelFactory } from '../../../models/workflow.model'; -import { ConfirmationDialogService } from '../../../services/confirmation-dialog/confirmation-dialog.service'; -import { Store } from '@ngrx/store'; -import { AppState } from '../../../stores/app.reducers'; -import { Subject } from 'rxjs'; -import { RouterTestingModule } from '@angular/router/testing'; -import { Router } from '@angular/router'; -import { absoluteRoutes } from '../../../constants/routes.constants'; -import { ClrDatagridStateInterface } from '@clr/angular'; -import { - DeleteWorkflow, - SwitchWorkflowActiveState, - LoadJobsForRun, - ExportWorkflows, - SetWorkflowFile, - ImportWorkflows, - RunWorkflows, - SearchWorkflows, -} from '../../../stores/workflows/workflows.actions'; - -describe('WorkflowsHomeComponent', () => { - let fixture: ComponentFixture; - let underTest: WorkflowsHomeComponent; - let confirmationDialogService: ConfirmationDialogService; - let store: Store; - let router: Router; - - const initialAppState = { - workflows: { - workflowsSearch: { - loading: true, - workflows: [ - WorkflowModelFactory.create('workflowOne', undefined, undefined, undefined, undefined, undefined, undefined), - WorkflowModelFactory.create('workflowTwo', undefined, undefined, undefined, undefined, undefined, undefined), - ], - total: 2, - searchRequest: undefined, - }, - workflowAction: { - loading: false, - }, - }, - }; - - beforeEach( - waitForAsync(() => { - TestBed.configureTestingModule({ - providers: [ConfirmationDialogService, provideMockStore({ initialState: initialAppState })], - declarations: [WorkflowsHomeComponent], - imports: [RouterTestingModule.withRoutes([])], - }).compileComponents(); - confirmationDialogService = TestBed.inject(ConfirmationDialogService); - store = TestBed.inject(Store); - router = TestBed.inject(Router); - }), - ); - - beforeEach(() => { - fixture = TestBed.createComponent(WorkflowsHomeComponent); - underTest = fixture.componentInstance; - }); - - it('should create', () => { - expect(underTest).toBeTruthy(); - }); - - it( - 'should after view init set component properties', - waitForAsync(() => { - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.workflows).toEqual([...initialAppState.workflows.workflowsSearch.workflows]); - expect(underTest.total).toEqual(initialAppState.workflows.workflowsSearch.total); - expect(underTest.sort).toEqual(undefined); - expect(underTest.filters).toEqual([]); - expect(underTest.pageFrom).toEqual(0); - expect(underTest.pageSize).toEqual(100); - expect(underTest.page).toEqual(0 / 100 + 1); - }); - }), - ); - - it( - 'exportWorkflow() should dispatch workflow export', - waitForAsync(() => { - const id = 42; - const storeSpy = spyOn(store, 'dispatch'); - - underTest.exportWorkflow(id); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalledWith(new ExportWorkflows([id])); - }); - }), - ); - - it( - 'openImportWorkflowModal() should set is workflow import variable to true', - waitForAsync(() => { - expect(underTest.isWorkflowImportOpen).toBeFalsy(); - underTest.openImportWorkflowModal(); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.isWorkflowImportOpen).toBeTruthy(); - }); - }), - ); - - it( - 'setWorkflowFile() should set workflow file', - waitForAsync(() => { - const dataTransfer = new DataTransfer(); - const file: File = new File(['content'], 'filename.jpg'); - dataTransfer.items.add(file); - const fileList: FileList = dataTransfer.files; - - expect(underTest.workflowFile).toBeUndefined(); - underTest.setWorkflowFile(fileList); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.workflowFile).toBeDefined(); - }); - }), - ); - - it( - 'closeWorkflowImport() should close modal and remove workflow file when is submitted is false', - waitForAsync(() => { - const isSubmitted = false; - const file: File = new File(['content'], 'filename.jpg'); - const storeSpy = spyOn(store, 'dispatch'); - - underTest.isWorkflowImportOpen = true; - underTest.workflowFile = file; - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.workflowFile).toBeDefined(); - expect(underTest.isWorkflowImportOpen).toBeTruthy(); - - underTest.closeWorkflowImport(isSubmitted); - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.workflowFile).toBeUndefined(); - expect(underTest.isWorkflowImportOpen).toBeFalsy(); - expect(storeSpy).toHaveBeenCalledTimes(0); - }); - }); - }), - ); - - it( - 'closeWorkflowImport() should close modal, remove workflow file and dispatch and navigate to import when is submitted is true', - waitForAsync(() => { - const isSubmitted = true; - const file: File = new File(['content'], 'filename.jpg'); - const storeSpy = spyOn(store, 'dispatch'); - const routerSpy = spyOn(router, 'navigate'); - - underTest.isWorkflowImportOpen = true; - underTest.workflowFile = file; - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.workflowFile).toBeDefined(); - expect(underTest.isWorkflowImportOpen).toBeTruthy(); - - underTest.closeWorkflowImport(isSubmitted); - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.workflowFile).toBeUndefined(); - expect(underTest.isWorkflowImportOpen).toBeFalsy(); - - expect(routerSpy).toHaveBeenCalledTimes(1); - expect(routerSpy).toHaveBeenCalledWith([absoluteRoutes.IMPORT_WORKFLOW]); - expect(storeSpy).toHaveBeenCalled(); - expect(storeSpy).toHaveBeenCalledWith(new SetWorkflowFile(file)); - }); - }); - }), - ); - - it( - 'openImportMutliWorkflowsModal() should set isMultiWorkflowsImportOpen to true', - waitForAsync(() => { - expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); - underTest.openImportMultiWorkflowsModal(); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); - }); - }), - ); - - it( - 'setMultiWorkflowsFile() should set multiWorkflowsFile', - waitForAsync(() => { - const dataTransfer = new DataTransfer(); - const file: File = new File(['content'], 'workflows.zip'); - dataTransfer.items.add(file); - const fileList: FileList = dataTransfer.files; - - expect(underTest.multiWorkflowsFile).toBeUndefined(); - underTest.setMultiWorkflowsFile(fileList); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.multiWorkflowsFile).toBeDefined(); - }); - }), - ); - - it( - 'closeMultiWorkflowsImport() should close modal and remove multi workflow file when is submitted is false', - waitForAsync(() => { - const isSubmitted = false; - const file: File = new File(['content'], 'workflows.zip'); - const storeSpy = spyOn(store, 'dispatch'); - - underTest.isMultiWorkflowsImportOpen = true; - underTest.multiWorkflowsFile = file; - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.multiWorkflowsFile).toBeDefined(); - expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); - - underTest.closeMultiWorkflowsImport(isSubmitted); - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.multiWorkflowsFile).toBeUndefined(); - expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); - expect(storeSpy).toHaveBeenCalledTimes(0); - }); - }); - }), - ); - - it( - 'closeMultiWorkflowsImport() should close modal, remove workflow file and dispatch and navigate to import when is submitted is true', - waitForAsync(() => { - const isSubmitted = true; - const file: File = new File(['content'], 'workflows.zip'); - const storeSpy = spyOn(store, 'dispatch'); - - underTest.isMultiWorkflowsImportOpen = true; - underTest.multiWorkflowsFile = file; - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.multiWorkflowsFile).toBeDefined(); - expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); - - underTest.closeMultiWorkflowsImport(isSubmitted); - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(underTest.multiWorkflowsFile).toBeUndefined(); - expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); - - expect(storeSpy).toHaveBeenCalled(); - expect(storeSpy).toHaveBeenCalledWith(new ImportWorkflows(file)); - }); - }); - }), - ); - - it( - 'deleteWorkflow() should dispatch delete workflow action with id when dialog is confirmed', - waitForAsync(() => { - const id = 1; - const subject = new Subject(); - const storeSpy = spyOn(store, 'dispatch'); - - spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); - - underTest.deleteWorkflow(id); - subject.next(true); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalled(); - expect(storeSpy).toHaveBeenCalledWith(new DeleteWorkflow(id)); - }); - }), - ); - - it( - 'deleteWorkflow() should not dispatch delete workflow action when dialog is not confirmed', - waitForAsync(() => { - const id = 1; - const subject = new Subject(); - const storeSpy = spyOn(store, 'dispatch'); - - spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); - - underTest.deleteWorkflow(id); - subject.next(false); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalledTimes(0); - }); - }), - ); - - it( - 'switchWorkflowActiveState() should dispatch switch workflow active state with id and old value when dialog is confirmed', - waitForAsync(() => { - const id = 1; - const currentActiveState = true; - const subject = new Subject(); - const storeSpy = spyOn(store, 'dispatch'); - - spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); - - underTest.switchWorkflowActiveState(id, currentActiveState); - subject.next(true); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalled(); - expect(storeSpy).toHaveBeenCalledWith( - new SwitchWorkflowActiveState({ - id: id, - currentActiveState: currentActiveState, - }), - ); - }); - }), - ); - - it( - 'switchWorkflowActiveState() should not dispatch switch workflow active state when dialog is not confirmed', - waitForAsync(() => { - const id = 1; - const currentActiveState = false; - const subject = new Subject(); - const storeSpy = spyOn(store, 'dispatch'); - - spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); - - underTest.switchWorkflowActiveState(id, currentActiveState); - subject.next(false); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalledTimes(0); - }); - }), - ); - - it( - 'runWorkflow() should dispatch load jobs for run', - waitForAsync(() => { - const id = 42; - const storeSpy = spyOn(store, 'dispatch'); - - underTest.runWorkflow(id); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalledWith(new LoadJobsForRun(id)); - }); - }), - ); - - it( - 'runSelectedWorkflows() should dispatch run workflows', - waitForAsync(() => { - const subject = new Subject(); - spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); - subject.next(true); - - const workflows = [ - WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), - WorkflowModelFactory.create('workflowName2', true, 'projectName2', new Date(Date.now()), new Date(Date.now()), 1, 1), - ]; - const workflowIds = workflows.map((workflow) => workflow.id); - const storeSpy = spyOn(store, 'dispatch'); - - underTest.runSelectedWorkflows(workflows); - subject.next(true); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalledWith(new RunWorkflows(workflowIds)); - }); - }), - ); - - it( - 'runSelectedWorkflows() should not dispatch run workflows when confirmation dialog is not confirmed', - waitForAsync(() => { - const subject = new Subject(); - spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); - subject.next(true); - - const workflows = [ - WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), - WorkflowModelFactory.create('workflowName2', true, 'projectName2', new Date(Date.now()), new Date(Date.now()), 1, 1), - ]; - const storeSpy = spyOn(store, 'dispatch'); - - underTest.runSelectedWorkflows(workflows); - subject.next(false); - - fixture.detectChanges(); - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalledTimes(0); - }); - }), - ); - - it( - 'showWorkflow() should navigate to show workflow page', - waitForAsync(() => { - const id = 42; - const routerSpy = spyOn(router, 'navigate'); - - underTest.showWorkflow(id); - - expect(routerSpy).toHaveBeenCalledTimes(1); - expect(routerSpy).toHaveBeenCalledWith([absoluteRoutes.SHOW_WORKFLOW, id]); - }), - ); - - describe('onClarityDgRefresh', () => { - it( - 'should dispatch SearchWorkflows when ignoreRefresh is false', - waitForAsync(() => { - underTest.ignoreRefresh = false; - underTest.loading = true; - underTest.filters = []; - - const subject = new Subject(); - const storeSpy = spyOn(store, 'dispatch'); - - underTest.refresh(); - subject.next(true); - - fixture.detectChanges(); - - fixture.whenStable().then(() => { - expect(storeSpy).toHaveBeenCalled(); - expect(storeSpy).toHaveBeenCalledWith( - new SearchWorkflows({ from: 0, size: 0, sort: undefined, containsFilterAttributes: [], booleanFilterAttributes: [] }), - ); - }); - }), - ); - - it( - 'onClarityDgRefresh() should not dispatch SearchWorkflows when ignoreRefresh is true', - waitForAsync(() => { - underTest.ignoreRefresh = true; - - const subject = new Subject(); - const storeSpy = spyOn(store, 'dispatch'); - const state: ClrDatagridStateInterface = {}; - - underTest.onClarityDgRefresh(state); - subject.next(true); - - fixture.detectChanges(); - expect(underTest.ignoreRefresh).toBeTrue(); - expect(storeSpy).not.toHaveBeenCalled(); - }), - ); - }); - - describe('isRunSelectedWorkflowsDisabled', () => { - it('should return false when at least two workflows are selected', () => { - const workflows = [ - WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), - WorkflowModelFactory.create('workflowName2', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 1), - ]; - expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeFalse(); - }); - - it('should return true when only one workflows is selected', () => { - const workflows = [ - WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), - ]; - expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeTrue(); - }); - - it('should return true when no workflow is selected', () => { - const workflows = []; - expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeTrue(); - }); - }); -}); +// /* +// * Copyright 2018 ABSA Group Limited +// * +// * Licensed under the Apache License, Version 2.0 (the "License"); +// * you may not use this file except in compliance with the License. +// * You may obtain a copy of the License at +// * http://www.apache.org/licenses/LICENSE-2.0 +// * +// * Unless required by applicable law or agreed to in writing, software +// * distributed under the License is distributed on an "AS IS" BASIS, +// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// * See the License for the specific language governing permissions and +// * limitations under the License. +// */ +// +// import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; +// +// import { WorkflowsHomeComponent } from './workflows-home.component'; +// import { provideMockStore } from '@ngrx/store/testing'; +// import { WorkflowModelFactory } from '../../../models/workflow.model'; +// import { ConfirmationDialogService } from '../../../services/confirmation-dialog/confirmation-dialog.service'; +// import { Store } from '@ngrx/store'; +// import { AppState } from '../../../stores/app.reducers'; +// import { Subject } from 'rxjs'; +// import { RouterTestingModule } from '@angular/router/testing'; +// import { Router } from '@angular/router'; +// import { absoluteRoutes } from '../../../constants/routes.constants'; +// import { ClrDatagridStateInterface } from '@clr/angular'; +// import { +// DeleteWorkflow, +// SwitchWorkflowActiveState, +// LoadJobsForRun, +// ExportWorkflows, +// SetWorkflowFile, +// ImportWorkflows, +// RunWorkflows, +// SearchWorkflows, +// } from '../../../stores/workflows/workflows.actions'; +// +// describe('WorkflowsHomeComponent', () => { +// let fixture: ComponentFixture; +// let underTest: WorkflowsHomeComponent; +// let confirmationDialogService: ConfirmationDialogService; +// let store: Store; +// let router: Router; +// +// const initialAppState = { +// workflows: { +// workflowsSearch: { +// loading: true, +// workflows: [ +// WorkflowModelFactory.create('workflowOne', undefined, undefined, undefined, undefined, undefined, undefined), +// WorkflowModelFactory.create('workflowTwo', undefined, undefined, undefined, undefined, undefined, undefined), +// ], +// total: 2, +// searchRequest: undefined, +// }, +// workflowAction: { +// loading: false, +// }, +// }, +// }; +// +// beforeEach( +// waitForAsync(() => { +// TestBed.configureTestingModule({ +// providers: [ConfirmationDialogService, provideMockStore({ initialState: initialAppState })], +// declarations: [WorkflowsHomeComponent], +// imports: [RouterTestingModule.withRoutes([])], +// }).compileComponents(); +// confirmationDialogService = TestBed.inject(ConfirmationDialogService); +// store = TestBed.inject(Store); +// router = TestBed.inject(Router); +// }), +// ); +// +// beforeEach(() => { +// fixture = TestBed.createComponent(WorkflowsHomeComponent); +// underTest = fixture.componentInstance; +// }); +// +// it('should create', () => { +// expect(underTest).toBeTruthy(); +// }); +// +// it( +// 'should after view init set component properties', +// waitForAsync(() => { +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.workflows).toEqual([...initialAppState.workflows.workflowsSearch.workflows]); +// expect(underTest.total).toEqual(initialAppState.workflows.workflowsSearch.total); +// expect(underTest.sort).toEqual(undefined); +// expect(underTest.filters).toEqual([]); +// expect(underTest.pageFrom).toEqual(0); +// expect(underTest.pageSize).toEqual(100); +// expect(underTest.page).toEqual(0 / 100 + 1); +// }); +// }), +// ); +// +// it( +// 'exportWorkflow() should dispatch workflow export', +// waitForAsync(() => { +// const id = 42; +// const storeSpy = spyOn(store, 'dispatch'); +// +// underTest.exportWorkflow(id); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalledWith(new ExportWorkflows([id])); +// }); +// }), +// ); +// +// it( +// 'openImportWorkflowModal() should set is workflow import variable to true', +// waitForAsync(() => { +// expect(underTest.isWorkflowImportOpen).toBeFalsy(); +// underTest.openImportWorkflowModal(); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.isWorkflowImportOpen).toBeTruthy(); +// }); +// }), +// ); +// +// it( +// 'setWorkflowFile() should set workflow file', +// waitForAsync(() => { +// const dataTransfer = new DataTransfer(); +// const file: File = new File(['content'], 'filename.jpg'); +// dataTransfer.items.add(file); +// const fileList: FileList = dataTransfer.files; +// +// expect(underTest.workflowFile).toBeUndefined(); +// underTest.setWorkflowFile(fileList); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.workflowFile).toBeDefined(); +// }); +// }), +// ); +// +// it( +// 'closeWorkflowImport() should close modal and remove workflow file when is submitted is false', +// waitForAsync(() => { +// const isSubmitted = false; +// const file: File = new File(['content'], 'filename.jpg'); +// const storeSpy = spyOn(store, 'dispatch'); +// +// underTest.isWorkflowImportOpen = true; +// underTest.workflowFile = file; +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.workflowFile).toBeDefined(); +// expect(underTest.isWorkflowImportOpen).toBeTruthy(); +// +// underTest.closeWorkflowImport(isSubmitted); +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.workflowFile).toBeUndefined(); +// expect(underTest.isWorkflowImportOpen).toBeFalsy(); +// expect(storeSpy).toHaveBeenCalledTimes(0); +// }); +// }); +// }), +// ); +// +// it( +// 'closeWorkflowImport() should close modal, remove workflow file and dispatch and navigate to import when is submitted is true', +// waitForAsync(() => { +// const isSubmitted = true; +// const file: File = new File(['content'], 'filename.jpg'); +// const storeSpy = spyOn(store, 'dispatch'); +// const routerSpy = spyOn(router, 'navigate'); +// +// underTest.isWorkflowImportOpen = true; +// underTest.workflowFile = file; +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.workflowFile).toBeDefined(); +// expect(underTest.isWorkflowImportOpen).toBeTruthy(); +// +// underTest.closeWorkflowImport(isSubmitted); +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.workflowFile).toBeUndefined(); +// expect(underTest.isWorkflowImportOpen).toBeFalsy(); +// +// expect(routerSpy).toHaveBeenCalledTimes(1); +// expect(routerSpy).toHaveBeenCalledWith([absoluteRoutes.IMPORT_WORKFLOW]); +// expect(storeSpy).toHaveBeenCalled(); +// expect(storeSpy).toHaveBeenCalledWith(new SetWorkflowFile(file)); +// }); +// }); +// }), +// ); +// +// it( +// 'openImportMutliWorkflowsModal() should set isMultiWorkflowsImportOpen to true', +// waitForAsync(() => { +// expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); +// underTest.openImportMultiWorkflowsModal(); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); +// }); +// }), +// ); +// +// it( +// 'setMultiWorkflowsFile() should set multiWorkflowsFile', +// waitForAsync(() => { +// const dataTransfer = new DataTransfer(); +// const file: File = new File(['content'], 'workflows.zip'); +// dataTransfer.items.add(file); +// const fileList: FileList = dataTransfer.files; +// +// expect(underTest.multiWorkflowsFile).toBeUndefined(); +// underTest.setMultiWorkflowsFile(fileList); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.multiWorkflowsFile).toBeDefined(); +// }); +// }), +// ); +// +// it( +// 'closeMultiWorkflowsImport() should close modal and remove multi workflow file when is submitted is false', +// waitForAsync(() => { +// const isSubmitted = false; +// const file: File = new File(['content'], 'workflows.zip'); +// const storeSpy = spyOn(store, 'dispatch'); +// +// underTest.isMultiWorkflowsImportOpen = true; +// underTest.multiWorkflowsFile = file; +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.multiWorkflowsFile).toBeDefined(); +// expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); +// +// underTest.closeMultiWorkflowsImport(isSubmitted); +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.multiWorkflowsFile).toBeUndefined(); +// expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); +// expect(storeSpy).toHaveBeenCalledTimes(0); +// }); +// }); +// }), +// ); +// +// it( +// 'closeMultiWorkflowsImport() should close modal, remove workflow file and dispatch and navigate to import when is submitted is true', +// waitForAsync(() => { +// const isSubmitted = true; +// const file: File = new File(['content'], 'workflows.zip'); +// const storeSpy = spyOn(store, 'dispatch'); +// +// underTest.isMultiWorkflowsImportOpen = true; +// underTest.multiWorkflowsFile = file; +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.multiWorkflowsFile).toBeDefined(); +// expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); +// +// underTest.closeMultiWorkflowsImport(isSubmitted); +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(underTest.multiWorkflowsFile).toBeUndefined(); +// expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); +// +// expect(storeSpy).toHaveBeenCalled(); +// expect(storeSpy).toHaveBeenCalledWith(new ImportWorkflows(file)); +// }); +// }); +// }), +// ); +// +// it( +// 'deleteWorkflow() should dispatch delete workflow action with id when dialog is confirmed', +// waitForAsync(() => { +// const id = 1; +// const subject = new Subject(); +// const storeSpy = spyOn(store, 'dispatch'); +// +// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); +// +// underTest.deleteWorkflow(id); +// subject.next(true); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalled(); +// expect(storeSpy).toHaveBeenCalledWith(new DeleteWorkflow(id)); +// }); +// }), +// ); +// +// it( +// 'deleteWorkflow() should not dispatch delete workflow action when dialog is not confirmed', +// waitForAsync(() => { +// const id = 1; +// const subject = new Subject(); +// const storeSpy = spyOn(store, 'dispatch'); +// +// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); +// +// underTest.deleteWorkflow(id); +// subject.next(false); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalledTimes(0); +// }); +// }), +// ); +// +// it( +// 'switchWorkflowActiveState() should dispatch switch workflow active state with id and old value when dialog is confirmed', +// waitForAsync(() => { +// const id = 1; +// const currentActiveState = true; +// const subject = new Subject(); +// const storeSpy = spyOn(store, 'dispatch'); +// +// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); +// +// underTest.switchWorkflowActiveState(id, currentActiveState); +// subject.next(true); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalled(); +// expect(storeSpy).toHaveBeenCalledWith( +// new SwitchWorkflowActiveState({ +// id: id, +// currentActiveState: currentActiveState, +// }), +// ); +// }); +// }), +// ); +// +// it( +// 'switchWorkflowActiveState() should not dispatch switch workflow active state when dialog is not confirmed', +// waitForAsync(() => { +// const id = 1; +// const currentActiveState = false; +// const subject = new Subject(); +// const storeSpy = spyOn(store, 'dispatch'); +// +// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); +// +// underTest.switchWorkflowActiveState(id, currentActiveState); +// subject.next(false); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalledTimes(0); +// }); +// }), +// ); +// +// it( +// 'runWorkflow() should dispatch load jobs for run', +// waitForAsync(() => { +// const id = 42; +// const storeSpy = spyOn(store, 'dispatch'); +// +// underTest.runWorkflow(id); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalledWith(new LoadJobsForRun(id)); +// }); +// }), +// ); +// +// it( +// 'runSelectedWorkflows() should dispatch run workflows', +// waitForAsync(() => { +// const subject = new Subject(); +// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); +// subject.next(true); +// +// const workflows = [ +// WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), +// WorkflowModelFactory.create('workflowName2', true, 'projectName2', new Date(Date.now()), new Date(Date.now()), 1, 1), +// ]; +// const workflowIds = workflows.map((workflow) => workflow.id); +// const storeSpy = spyOn(store, 'dispatch'); +// +// underTest.runSelectedWorkflows(workflows); +// subject.next(true); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalledWith(new RunWorkflows(workflowIds)); +// }); +// }), +// ); +// +// it( +// 'runSelectedWorkflows() should not dispatch run workflows when confirmation dialog is not confirmed', +// waitForAsync(() => { +// const subject = new Subject(); +// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); +// subject.next(true); +// +// const workflows = [ +// WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), +// WorkflowModelFactory.create('workflowName2', true, 'projectName2', new Date(Date.now()), new Date(Date.now()), 1, 1), +// ]; +// const storeSpy = spyOn(store, 'dispatch'); +// +// underTest.runSelectedWorkflows(workflows); +// subject.next(false); +// +// fixture.detectChanges(); +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalledTimes(0); +// }); +// }), +// ); +// +// it( +// 'showWorkflow() should navigate to show workflow page', +// waitForAsync(() => { +// const id = 42; +// const routerSpy = spyOn(router, 'navigate'); +// +// underTest.showWorkflow(id); +// +// expect(routerSpy).toHaveBeenCalledTimes(1); +// expect(routerSpy).toHaveBeenCalledWith([absoluteRoutes.SHOW_WORKFLOW, id]); +// }), +// ); +// +// describe('onClarityDgRefresh', () => { +// it( +// 'should dispatch SearchWorkflows when ignoreRefresh is false', +// waitForAsync(() => { +// underTest.ignoreRefresh = false; +// underTest.loading = true; +// underTest.filters = []; +// +// const subject = new Subject(); +// const storeSpy = spyOn(store, 'dispatch'); +// +// underTest.refresh(); +// subject.next(true); +// +// fixture.detectChanges(); +// +// fixture.whenStable().then(() => { +// expect(storeSpy).toHaveBeenCalled(); +// expect(storeSpy).toHaveBeenCalledWith( +// new SearchWorkflows({ from: 0, size: 0, sort: undefined, containsFilterAttributes: [], booleanFilterAttributes: [] }), +// ); +// }); +// }), +// ); +// +// it( +// 'onClarityDgRefresh() should not dispatch SearchWorkflows when ignoreRefresh is true', +// waitForAsync(() => { +// underTest.ignoreRefresh = true; +// +// const subject = new Subject(); +// const storeSpy = spyOn(store, 'dispatch'); +// const state: ClrDatagridStateInterface = {}; +// +// underTest.onClarityDgRefresh(state); +// subject.next(true); +// +// fixture.detectChanges(); +// expect(underTest.ignoreRefresh).toBeTrue(); +// expect(storeSpy).not.toHaveBeenCalled(); +// }), +// ); +// }); +// +// describe('isRunSelectedWorkflowsDisabled', () => { +// it('should return false when at least two workflows are selected', () => { +// const workflows = [ +// WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), +// WorkflowModelFactory.create('workflowName2', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 1), +// ]; +// expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeFalse(); +// }); +// +// it('should return true when only one workflows is selected', () => { +// const workflows = [ +// WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), +// ]; +// expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeTrue(); +// }); +// +// it('should return true when no workflow is selected', () => { +// const workflows = []; +// expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeTrue(); +// }); +// }); +// }); From 0891835c5c2292ae7df60f5127681e3f38784ae4 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Wed, 22 Feb 2023 15:36:24 +0100 Subject: [PATCH 19/32] HyperdriveOffsetService + tests --- .../HyperdriveOffsetServiceTest.scala | 20 +++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala index 434964fb6..a0bc1ab4b 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala @@ -382,6 +382,26 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor } } + it should "return negative numbers if kafka offsets are smaller than checkpoint offsets" in { + val config = getSparkConfig + val jobParameters = getJobParameters + val topic = "topic" + val expectedResult = (topic, Map(0 -> -10, 1 -> -100)) + + val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) + + when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) + when(kafkaService.getOffsets(any(), any())) + .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) + when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 20L, 1 -> 200L)))) + + val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) + resultFut.map { result => + result.isDefined shouldBe true + result.get shouldBe expectedResult + } + } + private def getSparkConfig = DefaultTestSparkConfig().copy(additionalConfs = Map( From a3a3d15cafa227e2a6d17ba1cdc80075d7ca028c Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Thu, 23 Feb 2023 09:23:57 +0100 Subject: [PATCH 20/32] HyperdriveService + tests --- .../api/rest/services/HyperdriveService.scala | 6 +- .../rest/services/HyperdriveServiceTest.scala | 104 ++++++++++++++++++ 2 files changed, 107 insertions(+), 3 deletions(-) create mode 100644 src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala index 2f959b0f2..00ab0d21e 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala @@ -27,7 +27,7 @@ import scala.util.{Failure, Success} trait HyperdriveService { val workflowRepository: WorkflowRepository val jobTemplateService: JobTemplateService - val hyperdriveOffsetComparisonService: HyperdriveOffsetService + val hyperdriveOffsetService: HyperdriveOffsetService def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[Seq[IngestionStatus]] } @@ -36,7 +36,7 @@ trait HyperdriveService { class HyperdriveServiceImpl( override val workflowRepository: WorkflowRepository, override val jobTemplateService: JobTemplateService, - override val hyperdriveOffsetComparisonService: HyperdriveOffsetService + override val hyperdriveOffsetService: HyperdriveOffsetService ) extends HyperdriveService { private val logger = LoggerFactory.getLogger(this.getClass) @@ -48,7 +48,7 @@ class HyperdriveServiceImpl( Future.sequence( resolvedJobs.map { case resolvedJob if resolvedJob.jobParameters.jobType == JobTypes.Hyperdrive => - hyperdriveOffsetComparisonService.getNumberOfMessagesLeft(resolvedJob.jobParameters).transformWith { + hyperdriveOffsetService.getNumberOfMessagesLeft(resolvedJob.jobParameters).transformWith { case Failure(exception) => logger.error(s"Failed to get number of messages left to ingest for a workflow: $id", exception) Future( diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala new file mode 100644 index 000000000..4f29f610f --- /dev/null +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala @@ -0,0 +1,104 @@ +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package za.co.absa.hyperdrive.trigger.api.rest.services + +import org.mockito.ArgumentMatchers.any +import org.mockito.Mockito.{reset, when} +import org.scalatest.mockito.MockitoSugar +import org.scalatest.{AsyncFlatSpec, BeforeAndAfter, Matchers} +import za.co.absa.hyperdrive.trigger.TestUtils.await +import za.co.absa.hyperdrive.trigger.models.enums.JobTypes +import za.co.absa.hyperdrive.trigger.models.{ResolvedJobDefinition, ShellInstanceParameters, SparkInstanceParameters} +import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, Topic} +import za.co.absa.hyperdrive.trigger.persistance.WorkflowRepository + +import scala.concurrent.Future + +class HyperdriveServiceTest extends AsyncFlatSpec with Matchers with BeforeAndAfter with MockitoSugar { + private val workflowRepository = mock[WorkflowRepository] + private val jobTemplateService = mock[JobTemplateService] + private val hyperdriveOffsetService = mock[HyperdriveOffsetService] + private val underTest = new HyperdriveServiceImpl(workflowRepository, jobTemplateService, hyperdriveOffsetService) + + before { + reset(workflowRepository) + reset(jobTemplateService) + reset(hyperdriveOffsetService) + } + + "getIngestionStatus" should "fail on get workflow failure" in { + val id = 1 + val error = "error" + when(workflowRepository.getWorkflow(any())(any())).thenReturn(Future.failed(new Exception(error))) + + val result = the[Exception] thrownBy await(underTest.getIngestionStatus(id)) + result.getMessage shouldBe error + } + + it should "fail on resolve job template failure" in { + val id = 1 + val error = "error" + + when(workflowRepository.getWorkflow(any())(any())).thenReturn(Future(WorkflowFixture.createWorkflowJoined())) + when(jobTemplateService.resolveJobTemplate(any())(any())).thenReturn(Future.failed(new Exception(error))) + + recoverToSucceededIf[Exception] { + underTest.getIngestionStatus(id) + } + } + + it should "succeed" in { + val id = 1 + val error = "error" + val resolvedJobDefinitions = Seq( + ResolvedJobDefinition( + name = "JobA", + jobParameters = SparkInstanceParameters(jobType = JobTypes.Hyperdrive, jobJar = "", mainClass = ""), + order = 0 + ), + ResolvedJobDefinition( + name = "JobB", + jobParameters = SparkInstanceParameters(jobType = JobTypes.Hyperdrive, jobJar = "", mainClass = ""), + order = 1 + ), + ResolvedJobDefinition( + name = "JobC", + jobParameters = SparkInstanceParameters(jobType = JobTypes.Spark, jobJar = "", mainClass = ""), + order = 2 + ), + ResolvedJobDefinition(name = "JobD", jobParameters = ShellInstanceParameters(scriptLocation = ""), order = 3) + ) + val expectedResult = Seq( + IngestionStatus( + jobName = "JobA", + JobTypes.Hyperdrive.name, + topic = Some(Topic(topic = "topic", messagesToIngest = Map.empty)) + ), + IngestionStatus(jobName = "JobB", JobTypes.Hyperdrive.name, topic = None), + IngestionStatus(jobName = "JobC", JobTypes.Spark.name, topic = None), + IngestionStatus(jobName = "JobD", JobTypes.Shell.name, topic = None) + ) + when(workflowRepository.getWorkflow(any())(any())).thenReturn(Future(WorkflowFixture.createWorkflowJoined())) + when(jobTemplateService.resolveJobTemplate(any())(any())).thenReturn(Future(resolvedJobDefinitions)) + when(hyperdriveOffsetService.getNumberOfMessagesLeft(any())(any())) + .thenReturn(Future(Some(("topic", Map.empty[Int, Long])))) + .thenReturn(Future.failed(new Exception(error))) + + underTest.getIngestionStatus(id).map { result => + result shouldBe expectedResult + } + } +} From bfcd428763b3446f4985c698d3bb63b80e001bac Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Thu, 23 Feb 2023 10:23:32 +0100 Subject: [PATCH 21/32] UI tests --- .../workflows-home.component.spec.ts | 1066 +++++++++-------- .../hyperdrive/hyperdrive.service.spec.ts | 56 + .../workflows/workflows.effects.spec.ts | 43 + 3 files changed, 650 insertions(+), 515 deletions(-) create mode 100644 ui/src/app/services/hyperdrive/hyperdrive.service.spec.ts diff --git a/ui/src/app/components/workflows/workflows-home/workflows-home.component.spec.ts b/ui/src/app/components/workflows/workflows-home/workflows-home.component.spec.ts index 71e3a9f48..6f258336a 100644 --- a/ui/src/app/components/workflows/workflows-home/workflows-home.component.spec.ts +++ b/ui/src/app/components/workflows/workflows-home/workflows-home.component.spec.ts @@ -1,515 +1,551 @@ -// /* -// * Copyright 2018 ABSA Group Limited -// * -// * Licensed under the Apache License, Version 2.0 (the "License"); -// * you may not use this file except in compliance with the License. -// * You may obtain a copy of the License at -// * http://www.apache.org/licenses/LICENSE-2.0 -// * -// * Unless required by applicable law or agreed to in writing, software -// * distributed under the License is distributed on an "AS IS" BASIS, -// * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// * See the License for the specific language governing permissions and -// * limitations under the License. -// */ -// -// import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; -// -// import { WorkflowsHomeComponent } from './workflows-home.component'; -// import { provideMockStore } from '@ngrx/store/testing'; -// import { WorkflowModelFactory } from '../../../models/workflow.model'; -// import { ConfirmationDialogService } from '../../../services/confirmation-dialog/confirmation-dialog.service'; -// import { Store } from '@ngrx/store'; -// import { AppState } from '../../../stores/app.reducers'; -// import { Subject } from 'rxjs'; -// import { RouterTestingModule } from '@angular/router/testing'; -// import { Router } from '@angular/router'; -// import { absoluteRoutes } from '../../../constants/routes.constants'; -// import { ClrDatagridStateInterface } from '@clr/angular'; -// import { -// DeleteWorkflow, -// SwitchWorkflowActiveState, -// LoadJobsForRun, -// ExportWorkflows, -// SetWorkflowFile, -// ImportWorkflows, -// RunWorkflows, -// SearchWorkflows, -// } from '../../../stores/workflows/workflows.actions'; -// -// describe('WorkflowsHomeComponent', () => { -// let fixture: ComponentFixture; -// let underTest: WorkflowsHomeComponent; -// let confirmationDialogService: ConfirmationDialogService; -// let store: Store; -// let router: Router; -// -// const initialAppState = { -// workflows: { -// workflowsSearch: { -// loading: true, -// workflows: [ -// WorkflowModelFactory.create('workflowOne', undefined, undefined, undefined, undefined, undefined, undefined), -// WorkflowModelFactory.create('workflowTwo', undefined, undefined, undefined, undefined, undefined, undefined), -// ], -// total: 2, -// searchRequest: undefined, -// }, -// workflowAction: { -// loading: false, -// }, -// }, -// }; -// -// beforeEach( -// waitForAsync(() => { -// TestBed.configureTestingModule({ -// providers: [ConfirmationDialogService, provideMockStore({ initialState: initialAppState })], -// declarations: [WorkflowsHomeComponent], -// imports: [RouterTestingModule.withRoutes([])], -// }).compileComponents(); -// confirmationDialogService = TestBed.inject(ConfirmationDialogService); -// store = TestBed.inject(Store); -// router = TestBed.inject(Router); -// }), -// ); -// -// beforeEach(() => { -// fixture = TestBed.createComponent(WorkflowsHomeComponent); -// underTest = fixture.componentInstance; -// }); -// -// it('should create', () => { -// expect(underTest).toBeTruthy(); -// }); -// -// it( -// 'should after view init set component properties', -// waitForAsync(() => { -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.workflows).toEqual([...initialAppState.workflows.workflowsSearch.workflows]); -// expect(underTest.total).toEqual(initialAppState.workflows.workflowsSearch.total); -// expect(underTest.sort).toEqual(undefined); -// expect(underTest.filters).toEqual([]); -// expect(underTest.pageFrom).toEqual(0); -// expect(underTest.pageSize).toEqual(100); -// expect(underTest.page).toEqual(0 / 100 + 1); -// }); -// }), -// ); -// -// it( -// 'exportWorkflow() should dispatch workflow export', -// waitForAsync(() => { -// const id = 42; -// const storeSpy = spyOn(store, 'dispatch'); -// -// underTest.exportWorkflow(id); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalledWith(new ExportWorkflows([id])); -// }); -// }), -// ); -// -// it( -// 'openImportWorkflowModal() should set is workflow import variable to true', -// waitForAsync(() => { -// expect(underTest.isWorkflowImportOpen).toBeFalsy(); -// underTest.openImportWorkflowModal(); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.isWorkflowImportOpen).toBeTruthy(); -// }); -// }), -// ); -// -// it( -// 'setWorkflowFile() should set workflow file', -// waitForAsync(() => { -// const dataTransfer = new DataTransfer(); -// const file: File = new File(['content'], 'filename.jpg'); -// dataTransfer.items.add(file); -// const fileList: FileList = dataTransfer.files; -// -// expect(underTest.workflowFile).toBeUndefined(); -// underTest.setWorkflowFile(fileList); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.workflowFile).toBeDefined(); -// }); -// }), -// ); -// -// it( -// 'closeWorkflowImport() should close modal and remove workflow file when is submitted is false', -// waitForAsync(() => { -// const isSubmitted = false; -// const file: File = new File(['content'], 'filename.jpg'); -// const storeSpy = spyOn(store, 'dispatch'); -// -// underTest.isWorkflowImportOpen = true; -// underTest.workflowFile = file; -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.workflowFile).toBeDefined(); -// expect(underTest.isWorkflowImportOpen).toBeTruthy(); -// -// underTest.closeWorkflowImport(isSubmitted); -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.workflowFile).toBeUndefined(); -// expect(underTest.isWorkflowImportOpen).toBeFalsy(); -// expect(storeSpy).toHaveBeenCalledTimes(0); -// }); -// }); -// }), -// ); -// -// it( -// 'closeWorkflowImport() should close modal, remove workflow file and dispatch and navigate to import when is submitted is true', -// waitForAsync(() => { -// const isSubmitted = true; -// const file: File = new File(['content'], 'filename.jpg'); -// const storeSpy = spyOn(store, 'dispatch'); -// const routerSpy = spyOn(router, 'navigate'); -// -// underTest.isWorkflowImportOpen = true; -// underTest.workflowFile = file; -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.workflowFile).toBeDefined(); -// expect(underTest.isWorkflowImportOpen).toBeTruthy(); -// -// underTest.closeWorkflowImport(isSubmitted); -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.workflowFile).toBeUndefined(); -// expect(underTest.isWorkflowImportOpen).toBeFalsy(); -// -// expect(routerSpy).toHaveBeenCalledTimes(1); -// expect(routerSpy).toHaveBeenCalledWith([absoluteRoutes.IMPORT_WORKFLOW]); -// expect(storeSpy).toHaveBeenCalled(); -// expect(storeSpy).toHaveBeenCalledWith(new SetWorkflowFile(file)); -// }); -// }); -// }), -// ); -// -// it( -// 'openImportMutliWorkflowsModal() should set isMultiWorkflowsImportOpen to true', -// waitForAsync(() => { -// expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); -// underTest.openImportMultiWorkflowsModal(); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); -// }); -// }), -// ); -// -// it( -// 'setMultiWorkflowsFile() should set multiWorkflowsFile', -// waitForAsync(() => { -// const dataTransfer = new DataTransfer(); -// const file: File = new File(['content'], 'workflows.zip'); -// dataTransfer.items.add(file); -// const fileList: FileList = dataTransfer.files; -// -// expect(underTest.multiWorkflowsFile).toBeUndefined(); -// underTest.setMultiWorkflowsFile(fileList); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.multiWorkflowsFile).toBeDefined(); -// }); -// }), -// ); -// -// it( -// 'closeMultiWorkflowsImport() should close modal and remove multi workflow file when is submitted is false', -// waitForAsync(() => { -// const isSubmitted = false; -// const file: File = new File(['content'], 'workflows.zip'); -// const storeSpy = spyOn(store, 'dispatch'); -// -// underTest.isMultiWorkflowsImportOpen = true; -// underTest.multiWorkflowsFile = file; -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.multiWorkflowsFile).toBeDefined(); -// expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); -// -// underTest.closeMultiWorkflowsImport(isSubmitted); -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.multiWorkflowsFile).toBeUndefined(); -// expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); -// expect(storeSpy).toHaveBeenCalledTimes(0); -// }); -// }); -// }), -// ); -// -// it( -// 'closeMultiWorkflowsImport() should close modal, remove workflow file and dispatch and navigate to import when is submitted is true', -// waitForAsync(() => { -// const isSubmitted = true; -// const file: File = new File(['content'], 'workflows.zip'); -// const storeSpy = spyOn(store, 'dispatch'); -// -// underTest.isMultiWorkflowsImportOpen = true; -// underTest.multiWorkflowsFile = file; -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.multiWorkflowsFile).toBeDefined(); -// expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); -// -// underTest.closeMultiWorkflowsImport(isSubmitted); -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(underTest.multiWorkflowsFile).toBeUndefined(); -// expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); -// -// expect(storeSpy).toHaveBeenCalled(); -// expect(storeSpy).toHaveBeenCalledWith(new ImportWorkflows(file)); -// }); -// }); -// }), -// ); -// -// it( -// 'deleteWorkflow() should dispatch delete workflow action with id when dialog is confirmed', -// waitForAsync(() => { -// const id = 1; -// const subject = new Subject(); -// const storeSpy = spyOn(store, 'dispatch'); -// -// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); -// -// underTest.deleteWorkflow(id); -// subject.next(true); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalled(); -// expect(storeSpy).toHaveBeenCalledWith(new DeleteWorkflow(id)); -// }); -// }), -// ); -// -// it( -// 'deleteWorkflow() should not dispatch delete workflow action when dialog is not confirmed', -// waitForAsync(() => { -// const id = 1; -// const subject = new Subject(); -// const storeSpy = spyOn(store, 'dispatch'); -// -// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); -// -// underTest.deleteWorkflow(id); -// subject.next(false); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalledTimes(0); -// }); -// }), -// ); -// -// it( -// 'switchWorkflowActiveState() should dispatch switch workflow active state with id and old value when dialog is confirmed', -// waitForAsync(() => { -// const id = 1; -// const currentActiveState = true; -// const subject = new Subject(); -// const storeSpy = spyOn(store, 'dispatch'); -// -// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); -// -// underTest.switchWorkflowActiveState(id, currentActiveState); -// subject.next(true); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalled(); -// expect(storeSpy).toHaveBeenCalledWith( -// new SwitchWorkflowActiveState({ -// id: id, -// currentActiveState: currentActiveState, -// }), -// ); -// }); -// }), -// ); -// -// it( -// 'switchWorkflowActiveState() should not dispatch switch workflow active state when dialog is not confirmed', -// waitForAsync(() => { -// const id = 1; -// const currentActiveState = false; -// const subject = new Subject(); -// const storeSpy = spyOn(store, 'dispatch'); -// -// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); -// -// underTest.switchWorkflowActiveState(id, currentActiveState); -// subject.next(false); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalledTimes(0); -// }); -// }), -// ); -// -// it( -// 'runWorkflow() should dispatch load jobs for run', -// waitForAsync(() => { -// const id = 42; -// const storeSpy = spyOn(store, 'dispatch'); -// -// underTest.runWorkflow(id); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalledWith(new LoadJobsForRun(id)); -// }); -// }), -// ); -// -// it( -// 'runSelectedWorkflows() should dispatch run workflows', -// waitForAsync(() => { -// const subject = new Subject(); -// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); -// subject.next(true); -// -// const workflows = [ -// WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), -// WorkflowModelFactory.create('workflowName2', true, 'projectName2', new Date(Date.now()), new Date(Date.now()), 1, 1), -// ]; -// const workflowIds = workflows.map((workflow) => workflow.id); -// const storeSpy = spyOn(store, 'dispatch'); -// -// underTest.runSelectedWorkflows(workflows); -// subject.next(true); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalledWith(new RunWorkflows(workflowIds)); -// }); -// }), -// ); -// -// it( -// 'runSelectedWorkflows() should not dispatch run workflows when confirmation dialog is not confirmed', -// waitForAsync(() => { -// const subject = new Subject(); -// spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); -// subject.next(true); -// -// const workflows = [ -// WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), -// WorkflowModelFactory.create('workflowName2', true, 'projectName2', new Date(Date.now()), new Date(Date.now()), 1, 1), -// ]; -// const storeSpy = spyOn(store, 'dispatch'); -// -// underTest.runSelectedWorkflows(workflows); -// subject.next(false); -// -// fixture.detectChanges(); -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalledTimes(0); -// }); -// }), -// ); -// -// it( -// 'showWorkflow() should navigate to show workflow page', -// waitForAsync(() => { -// const id = 42; -// const routerSpy = spyOn(router, 'navigate'); -// -// underTest.showWorkflow(id); -// -// expect(routerSpy).toHaveBeenCalledTimes(1); -// expect(routerSpy).toHaveBeenCalledWith([absoluteRoutes.SHOW_WORKFLOW, id]); -// }), -// ); -// -// describe('onClarityDgRefresh', () => { -// it( -// 'should dispatch SearchWorkflows when ignoreRefresh is false', -// waitForAsync(() => { -// underTest.ignoreRefresh = false; -// underTest.loading = true; -// underTest.filters = []; -// -// const subject = new Subject(); -// const storeSpy = spyOn(store, 'dispatch'); -// -// underTest.refresh(); -// subject.next(true); -// -// fixture.detectChanges(); -// -// fixture.whenStable().then(() => { -// expect(storeSpy).toHaveBeenCalled(); -// expect(storeSpy).toHaveBeenCalledWith( -// new SearchWorkflows({ from: 0, size: 0, sort: undefined, containsFilterAttributes: [], booleanFilterAttributes: [] }), -// ); -// }); -// }), -// ); -// -// it( -// 'onClarityDgRefresh() should not dispatch SearchWorkflows when ignoreRefresh is true', -// waitForAsync(() => { -// underTest.ignoreRefresh = true; -// -// const subject = new Subject(); -// const storeSpy = spyOn(store, 'dispatch'); -// const state: ClrDatagridStateInterface = {}; -// -// underTest.onClarityDgRefresh(state); -// subject.next(true); -// -// fixture.detectChanges(); -// expect(underTest.ignoreRefresh).toBeTrue(); -// expect(storeSpy).not.toHaveBeenCalled(); -// }), -// ); -// }); -// -// describe('isRunSelectedWorkflowsDisabled', () => { -// it('should return false when at least two workflows are selected', () => { -// const workflows = [ -// WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), -// WorkflowModelFactory.create('workflowName2', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 1), -// ]; -// expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeFalse(); -// }); -// -// it('should return true when only one workflows is selected', () => { -// const workflows = [ -// WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), -// ]; -// expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeTrue(); -// }); -// -// it('should return true when no workflow is selected', () => { -// const workflows = []; -// expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeTrue(); -// }); -// }); -// }); +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { ComponentFixture, TestBed, waitForAsync } from '@angular/core/testing'; + +import { WorkflowsHomeComponent } from './workflows-home.component'; +import { provideMockStore } from '@ngrx/store/testing'; +import { WorkflowModelFactory } from '../../../models/workflow.model'; +import { ConfirmationDialogService } from '../../../services/confirmation-dialog/confirmation-dialog.service'; +import { Store } from '@ngrx/store'; +import { AppState } from '../../../stores/app.reducers'; +import { Subject } from 'rxjs'; +import { RouterTestingModule } from '@angular/router/testing'; +import { Router } from '@angular/router'; +import { absoluteRoutes } from '../../../constants/routes.constants'; +import { ClrDatagridStateInterface } from '@clr/angular'; +import { + DeleteWorkflow, + SwitchWorkflowActiveState, + LoadJobsForRun, + ExportWorkflows, + SetWorkflowFile, + ImportWorkflows, + RunWorkflows, + SearchWorkflows, + LoadIngestionStatus, +} from '../../../stores/workflows/workflows.actions'; + +describe('WorkflowsHomeComponent', () => { + let fixture: ComponentFixture; + let underTest: WorkflowsHomeComponent; + let confirmationDialogService: ConfirmationDialogService; + let store: Store; + let router: Router; + + const initialAppState = { + workflows: { + workflowsSearch: { + loading: true, + workflows: [ + WorkflowModelFactory.create('workflowOne', undefined, undefined, undefined, undefined, undefined, undefined), + WorkflowModelFactory.create('workflowTwo', undefined, undefined, undefined, undefined, undefined, undefined), + ], + total: 2, + searchRequest: undefined, + }, + workflowAction: { + loading: false, + }, + ingestionStatusLoading: true, + ingestionStatus: [], + }, + }; + + beforeEach( + waitForAsync(() => { + TestBed.configureTestingModule({ + providers: [ConfirmationDialogService, provideMockStore({ initialState: initialAppState })], + declarations: [WorkflowsHomeComponent], + imports: [RouterTestingModule.withRoutes([])], + }).compileComponents(); + confirmationDialogService = TestBed.inject(ConfirmationDialogService); + store = TestBed.inject(Store); + router = TestBed.inject(Router); + }), + ); + + beforeEach(() => { + fixture = TestBed.createComponent(WorkflowsHomeComponent); + underTest = fixture.componentInstance; + }); + + it('should create', () => { + expect(underTest).toBeTruthy(); + }); + + it( + 'should after view init set component properties', + waitForAsync(() => { + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.workflows).toEqual([...initialAppState.workflows.workflowsSearch.workflows]); + expect(underTest.total).toEqual(initialAppState.workflows.workflowsSearch.total); + expect(underTest.sort).toEqual(undefined); + expect(underTest.filters).toEqual([]); + expect(underTest.pageFrom).toEqual(0); + expect(underTest.pageSize).toEqual(100); + expect(underTest.ingestionStatusLoading).toEqual(true); + expect(underTest.ingestionStatus).toEqual([]); + }); + }), + ); + + it( + 'exportWorkflow() should dispatch workflow export', + waitForAsync(() => { + const id = 42; + const storeSpy = spyOn(store, 'dispatch'); + + underTest.exportWorkflow(id); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalledWith(new ExportWorkflows([id])); + }); + }), + ); + + it( + 'openImportWorkflowModal() should set is workflow import variable to true', + waitForAsync(() => { + expect(underTest.isWorkflowImportOpen).toBeFalsy(); + underTest.openImportWorkflowModal(); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.isWorkflowImportOpen).toBeTruthy(); + }); + }), + ); + + it( + 'setWorkflowFile() should set workflow file', + waitForAsync(() => { + const dataTransfer = new DataTransfer(); + const file: File = new File(['content'], 'filename.jpg'); + dataTransfer.items.add(file); + const fileList: FileList = dataTransfer.files; + + expect(underTest.workflowFile).toBeUndefined(); + underTest.setWorkflowFile(fileList); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.workflowFile).toBeDefined(); + }); + }), + ); + + it( + 'closeWorkflowImport() should close modal and remove workflow file when is submitted is false', + waitForAsync(() => { + const isSubmitted = false; + const file: File = new File(['content'], 'filename.jpg'); + const storeSpy = spyOn(store, 'dispatch'); + + underTest.isWorkflowImportOpen = true; + underTest.workflowFile = file; + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.workflowFile).toBeDefined(); + expect(underTest.isWorkflowImportOpen).toBeTruthy(); + + underTest.closeWorkflowImport(isSubmitted); + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.workflowFile).toBeUndefined(); + expect(underTest.isWorkflowImportOpen).toBeFalsy(); + expect(storeSpy).toHaveBeenCalledTimes(0); + }); + }); + }), + ); + + it( + 'closeWorkflowImport() should close modal, remove workflow file and dispatch and navigate to import when is submitted is true', + waitForAsync(() => { + const isSubmitted = true; + const file: File = new File(['content'], 'filename.jpg'); + const storeSpy = spyOn(store, 'dispatch'); + const routerSpy = spyOn(router, 'navigate'); + + underTest.isWorkflowImportOpen = true; + underTest.workflowFile = file; + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.workflowFile).toBeDefined(); + expect(underTest.isWorkflowImportOpen).toBeTruthy(); + + underTest.closeWorkflowImport(isSubmitted); + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.workflowFile).toBeUndefined(); + expect(underTest.isWorkflowImportOpen).toBeFalsy(); + + expect(routerSpy).toHaveBeenCalledTimes(1); + expect(routerSpy).toHaveBeenCalledWith([absoluteRoutes.IMPORT_WORKFLOW]); + expect(storeSpy).toHaveBeenCalled(); + expect(storeSpy).toHaveBeenCalledWith(new SetWorkflowFile(file)); + }); + }); + }), + ); + + it( + 'openImportMutliWorkflowsModal() should set isMultiWorkflowsImportOpen to true', + waitForAsync(() => { + expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); + underTest.openImportMultiWorkflowsModal(); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); + }); + }), + ); + + it( + 'setMultiWorkflowsFile() should set multiWorkflowsFile', + waitForAsync(() => { + const dataTransfer = new DataTransfer(); + const file: File = new File(['content'], 'workflows.zip'); + dataTransfer.items.add(file); + const fileList: FileList = dataTransfer.files; + + expect(underTest.multiWorkflowsFile).toBeUndefined(); + underTest.setMultiWorkflowsFile(fileList); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.multiWorkflowsFile).toBeDefined(); + }); + }), + ); + + it( + 'closeMultiWorkflowsImport() should close modal and remove multi workflow file when is submitted is false', + waitForAsync(() => { + const isSubmitted = false; + const file: File = new File(['content'], 'workflows.zip'); + const storeSpy = spyOn(store, 'dispatch'); + + underTest.isMultiWorkflowsImportOpen = true; + underTest.multiWorkflowsFile = file; + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.multiWorkflowsFile).toBeDefined(); + expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); + + underTest.closeMultiWorkflowsImport(isSubmitted); + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.multiWorkflowsFile).toBeUndefined(); + expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); + expect(storeSpy).toHaveBeenCalledTimes(0); + }); + }); + }), + ); + + it( + 'closeMultiWorkflowsImport() should close modal, remove workflow file and dispatch and navigate to import when is submitted is true', + waitForAsync(() => { + const isSubmitted = true; + const file: File = new File(['content'], 'workflows.zip'); + const storeSpy = spyOn(store, 'dispatch'); + + underTest.isMultiWorkflowsImportOpen = true; + underTest.multiWorkflowsFile = file; + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.multiWorkflowsFile).toBeDefined(); + expect(underTest.isMultiWorkflowsImportOpen).toBeTruthy(); + + underTest.closeMultiWorkflowsImport(isSubmitted); + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(underTest.multiWorkflowsFile).toBeUndefined(); + expect(underTest.isMultiWorkflowsImportOpen).toBeFalsy(); + + expect(storeSpy).toHaveBeenCalled(); + expect(storeSpy).toHaveBeenCalledWith(new ImportWorkflows(file)); + }); + }); + }), + ); + + it( + 'deleteWorkflow() should dispatch delete workflow action with id when dialog is confirmed', + waitForAsync(() => { + const id = 1; + const subject = new Subject(); + const storeSpy = spyOn(store, 'dispatch'); + + spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); + + underTest.deleteWorkflow(id); + subject.next(true); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalled(); + expect(storeSpy).toHaveBeenCalledWith(new DeleteWorkflow(id)); + }); + }), + ); + + it( + 'deleteWorkflow() should not dispatch delete workflow action when dialog is not confirmed', + waitForAsync(() => { + const id = 1; + const subject = new Subject(); + const storeSpy = spyOn(store, 'dispatch'); + + spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); + + underTest.deleteWorkflow(id); + subject.next(false); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalledTimes(0); + }); + }), + ); + + it( + 'switchWorkflowActiveState() should dispatch switch workflow active state with id and old value when dialog is confirmed', + waitForAsync(() => { + const id = 1; + const currentActiveState = true; + const subject = new Subject(); + const storeSpy = spyOn(store, 'dispatch'); + + spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); + + underTest.switchWorkflowActiveState(id, currentActiveState); + subject.next(true); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalled(); + expect(storeSpy).toHaveBeenCalledWith( + new SwitchWorkflowActiveState({ + id: id, + currentActiveState: currentActiveState, + }), + ); + }); + }), + ); + + it( + 'switchWorkflowActiveState() should not dispatch switch workflow active state when dialog is not confirmed', + waitForAsync(() => { + const id = 1; + const currentActiveState = false; + const subject = new Subject(); + const storeSpy = spyOn(store, 'dispatch'); + + spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); + + underTest.switchWorkflowActiveState(id, currentActiveState); + subject.next(false); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalledTimes(0); + }); + }), + ); + + it( + 'runWorkflow() should dispatch load jobs for run', + waitForAsync(() => { + const id = 42; + const storeSpy = spyOn(store, 'dispatch'); + + underTest.runWorkflow(id); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalledWith(new LoadJobsForRun(id)); + }); + }), + ); + + it( + 'runSelectedWorkflows() should dispatch run workflows', + waitForAsync(() => { + const subject = new Subject(); + spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); + subject.next(true); + + const workflows = [ + WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), + WorkflowModelFactory.create('workflowName2', true, 'projectName2', new Date(Date.now()), new Date(Date.now()), 1, 1), + ]; + const workflowIds = workflows.map((workflow) => workflow.id); + const storeSpy = spyOn(store, 'dispatch'); + + underTest.runSelectedWorkflows(workflows); + subject.next(true); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalledWith(new RunWorkflows(workflowIds)); + }); + }), + ); + + it( + 'runSelectedWorkflows() should not dispatch run workflows when confirmation dialog is not confirmed', + waitForAsync(() => { + const subject = new Subject(); + spyOn(confirmationDialogService, 'confirm').and.returnValue(subject.asObservable()); + subject.next(true); + + const workflows = [ + WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), + WorkflowModelFactory.create('workflowName2', true, 'projectName2', new Date(Date.now()), new Date(Date.now()), 1, 1), + ]; + const storeSpy = spyOn(store, 'dispatch'); + + underTest.runSelectedWorkflows(workflows); + subject.next(false); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalledTimes(0); + }); + }), + ); + + it( + 'showWorkflow() should navigate to show workflow page', + waitForAsync(() => { + const id = 42; + const routerSpy = spyOn(router, 'navigate'); + + underTest.showWorkflow(id); + + expect(routerSpy).toHaveBeenCalledTimes(1); + expect(routerSpy).toHaveBeenCalledWith([absoluteRoutes.SHOW_WORKFLOW, id]); + }), + ); + + describe('onClarityDgRefresh', () => { + it( + 'should dispatch SearchWorkflows when ignoreRefresh is false', + waitForAsync(() => { + underTest.ignoreRefresh = false; + underTest.loading = true; + underTest.filters = []; + + const subject = new Subject(); + const storeSpy = spyOn(store, 'dispatch'); + + underTest.refresh(); + subject.next(true); + + fixture.detectChanges(); + + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalled(); + expect(storeSpy).toHaveBeenCalledWith( + new SearchWorkflows({ from: 0, size: 0, sort: undefined, containsFilterAttributes: [], booleanFilterAttributes: [] }), + ); + }); + }), + ); + + it( + 'onClarityDgRefresh() should not dispatch SearchWorkflows when ignoreRefresh is true', + waitForAsync(() => { + underTest.ignoreRefresh = true; + + const subject = new Subject(); + const storeSpy = spyOn(store, 'dispatch'); + const state: ClrDatagridStateInterface = {}; + + underTest.onClarityDgRefresh(state); + subject.next(true); + + fixture.detectChanges(); + expect(underTest.ignoreRefresh).toBeTrue(); + expect(storeSpy).not.toHaveBeenCalled(); + }), + ); + }); + + describe('isRunSelectedWorkflowsDisabled', () => { + it('should return false when at least two workflows are selected', () => { + const workflows = [ + WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), + WorkflowModelFactory.create('workflowName2', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 1), + ]; + expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeFalse(); + }); + + it('should return true when only one workflows is selected', () => { + const workflows = [ + WorkflowModelFactory.create('workflowName1', true, 'projectName1', new Date(Date.now()), new Date(Date.now()), 1, 0), + ]; + expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeTrue(); + }); + + it('should return true when no workflow is selected', () => { + const workflows = []; + expect(underTest.isRunSelectedWorkflowsDisabled(workflows)).toBeTrue(); + }); + }); + + describe('onDetailRefresh', () => { + it( + 'should dispatch load ingestion status', + waitForAsync(() => { + const id = 42; + const storeSpy = spyOn(store, 'dispatch'); + + underTest.onDetailRefresh({ id: id }); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalledWith(new LoadIngestionStatus(id)); + }); + }), + ); + + it( + 'should not dispatch load ingestion status if id is null', + waitForAsync(() => { + const id = 42; + const storeSpy = spyOn(store, 'dispatch'); + + underTest.onDetailRefresh({ id: null }); + + fixture.detectChanges(); + fixture.whenStable().then(() => { + expect(storeSpy).toHaveBeenCalledTimes(0); + }); + }), + ); + }); +}); diff --git a/ui/src/app/services/hyperdrive/hyperdrive.service.spec.ts b/ui/src/app/services/hyperdrive/hyperdrive.service.spec.ts new file mode 100644 index 000000000..216db511a --- /dev/null +++ b/ui/src/app/services/hyperdrive/hyperdrive.service.spec.ts @@ -0,0 +1,56 @@ +/* + * Copyright 2018 ABSA Group Limited + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { TestBed } from '@angular/core/testing'; + +import { HttpClientTestingModule, HttpTestingController } from '@angular/common/http/testing'; +import { api } from '../../constants/api.constants'; +import { HyperdriveService } from './hyperdrive.service'; +import { IngestionStatusModelFactory } from '../../models/ingestionStatus.model'; + +describe('HyperdriveService', () => { + let underTest: HyperdriveService; + let httpTestingController: HttpTestingController; + + beforeEach(() => { + TestBed.configureTestingModule({ + providers: [HyperdriveService], + imports: [HttpClientTestingModule], + }); + underTest = TestBed.inject(HyperdriveService); + httpTestingController = TestBed.inject(HttpTestingController); + }); + + afterEach(() => { + httpTestingController.verify(); + }); + + it('should be created', () => { + expect(underTest).toBeTruthy(); + }); + + it('getProjects() should return projects', () => { + const ingestionStatus = [IngestionStatusModelFactory.create('jobName', 'Hyperdrive', null)]; + const id = 1; + underTest.getIngestionStatus(id).subscribe( + (data) => expect(data).toEqual(ingestionStatus), + (error) => fail(error), + ); + + const req = httpTestingController.expectOne(api.GET_INGESTION_STATUS.replace('{id}', id.toString())); + expect(req.request.method).toEqual('GET'); + req.flush([...ingestionStatus]); + }); +}); diff --git a/ui/src/app/stores/workflows/workflows.effects.spec.ts b/ui/src/app/stores/workflows/workflows.effects.spec.ts index 84aa0932b..15776d9ad 100644 --- a/ui/src/app/stores/workflows/workflows.effects.spec.ts +++ b/ui/src/app/stores/workflows/workflows.effects.spec.ts @@ -37,6 +37,7 @@ import { RunWorkflows, RevertWorkflow, SearchWorkflows, + LoadIngestionStatus, } from './workflows.actions'; import { WorkflowsEffects } from './workflows.effects'; @@ -67,12 +68,15 @@ import { SparkTemplateParametersModel } from '../../models/jobTemplateParameters import * as WorkflowActions from './workflows.actions'; import { TableSearchRequestModelFactory } from '../../models/search/tableSearchRequest.model'; import { TableSearchResponseModel } from '../../models/search/tableSearchResponse.model'; +import { HyperdriveService } from '../../services/hyperdrive/hyperdrive.service'; +import { IngestionStatusModel, IngestionStatusModelFactory, TopicModel, TopicModelFactory } from '../../models/ingestionStatus.model'; describe('WorkflowsEffects', () => { let underTest: WorkflowsEffects; let workflowService: WorkflowService; let workflowHistoryService: WorkflowHistoryService; let jobService: JobService; + let hyperdriveService: HyperdriveService; let mockActions: Observable; let mockStore: MockStore; let toastrService: ToastrService; @@ -106,6 +110,7 @@ describe('WorkflowsEffects', () => { underTest = TestBed.inject(WorkflowsEffects); workflowService = TestBed.inject(WorkflowService); workflowHistoryService = TestBed.inject(WorkflowHistoryService); + hyperdriveService = TestBed.inject(HyperdriveService); jobService = TestBed.inject(JobService); mockActions = TestBed.inject(Actions); mockStore = TestBed.inject(MockStore); @@ -1223,4 +1228,42 @@ describe('WorkflowsEffects', () => { expect(result).toEqual(inputWorkflow); }); }); + + describe('statusIngestionLoad', () => { + it('should load ingestion status', () => { + const payload = 1; + const response = [IngestionStatusModelFactory.create('jobName', 'jobType', TopicModelFactory.create('topic', 10))]; + + const action = new LoadIngestionStatus(payload); + mockActions = cold('-a', { a: action }); + const getIngestionStatusResponse = cold('-a|', { a: response }); + const expected = cold('--a', { + a: { + type: WorkflowActions.LOAD_INGESTION_STATUS_SUCCESS, + payload: response, + }, + }); + + spyOn(hyperdriveService, 'getIngestionStatus').and.returnValue(getIngestionStatusResponse); + + expect(underTest.statusIngestionLoad).toBeObservable(expected); + }); + + it('should catch failure when service fails to load ingestion status', () => { + const payload = 1; + + const action = new LoadIngestionStatus(payload); + mockActions = cold('-a', { a: action }); + + const getIngestionStatusResponse = cold('-#|'); + spyOn(hyperdriveService, 'getIngestionStatus').and.returnValue(getIngestionStatusResponse); + + const expected = cold('--a', { + a: { + type: WorkflowActions.LOAD_INGESTION_STATUS_FAILURE, + }, + }); + expect(underTest.statusIngestionLoad).toBeObservable(expected); + }); + }); }); From aac15c746bf5d17257d64d02978f595443efee96 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Thu, 23 Feb 2023 10:29:59 +0100 Subject: [PATCH 22/32] Formatting --- .../trigger/api/rest/controllers/HyperdriveController.scala | 2 +- .../trigger/api/rest/services/CheckpointService.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala index db17c583d..055e0a074 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/controllers/HyperdriveController.scala @@ -25,7 +25,7 @@ import scala.compat.java8.FutureConverters._ import scala.concurrent.ExecutionContext.Implicits.global @RestController -class HyperdriveController @Inject()(hyperdriveService: HyperdriveService) { +class HyperdriveController @Inject() (hyperdriveService: HyperdriveService) { @GetMapping(path = Array("/hyperdrive/workflows/{id}/ingestionStatus")) def getIngestionStatus(@PathVariable id: Long): CompletableFuture[Seq[IngestionStatus]] = hyperdriveService.getIngestionStatus(id).toJava.toCompletableFuture diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala index e130d559a..82dab7a80 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala @@ -103,7 +103,7 @@ class CheckpointServiceImpl @Inject() (@Lazy hdfsService: HdfsService) extends C override def getLatestCommittedOffset( params: HdfsParameters )(implicit ugi: UserGroupInformation): Try[Option[Map[Int, Long]]] = { - getLatestCommitBatchId(params.checkpointLocation).map { + getLatestCommitBatchId(params.checkpointLocation).map { case Some(latestCommit) => val pathToLatestCommit = new Path(s"${params.checkpointLocation}/$offsetsDirName/$latestCommit") getOffsetsFromFile(pathToLatestCommit.toString) From 54d2150316beb8c5809b49f5547399b445162aed Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 24 Feb 2023 10:22:10 +0100 Subject: [PATCH 23/32] PR fixes --- .../api/rest/services/CheckpointService.scala | 4 ++-- .../services/HyperdriveOffsetService.scala | 4 ++-- .../api/rest/services/HyperdriveService.scala | 20 +++++++++---------- .../api/rest/services/KafkaService.scala | 5 ++--- .../trigger/models/IngestionStatus.scala | 4 ++-- .../HyperdriveOffsetServiceTest.scala | 14 ++++++------- .../rest/services/HyperdriveServiceTest.scala | 10 +++++----- .../api/rest/services/KafkaServiceTest.scala | 6 +++--- 8 files changed, 33 insertions(+), 34 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala index 82dab7a80..3a58927cf 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala @@ -103,13 +103,13 @@ class CheckpointServiceImpl @Inject() (@Lazy hdfsService: HdfsService) extends C override def getLatestCommittedOffset( params: HdfsParameters )(implicit ugi: UserGroupInformation): Try[Option[Map[Int, Long]]] = { - getLatestCommitBatchId(params.checkpointLocation).map { + getLatestCommitBatchId(params.checkpointLocation).flatMap { case Some(latestCommit) => val pathToLatestCommit = new Path(s"${params.checkpointLocation}/$offsetsDirName/$latestCommit") getOffsetsFromFile(pathToLatestCommit.toString) .map(_.map(topicPartitionOffsets => topicPartitionOffsets.head._2)) case None => Try(Option.empty[Map[Int, Long]]) - }.flatten + } } /** diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala index 7e9f4881e..6c27075b4 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala @@ -63,7 +63,7 @@ class HyperdriveOffsetServiceImpl @Inject() (sparkConfig: SparkConfig, * - reader.kafka.brokers * - writer.common.checkpoint.location * @param ec ExecutionContext - * @return - number not ingested messages. + * @return - number of not ingested messages for each topic and partition. */ def getNumberOfMessagesLeft( jobParameters: JobInstanceParameters @@ -80,7 +80,7 @@ class HyperdriveOffsetServiceImpl @Inject() (sparkConfig: SparkConfig, kafkaParameters <- kafkaParametersOpt hdfsParameters <- hdfsParametersOpt } yield { - val kafkaOffsets = kafkaService.getOffsets(kafkaParameters._1, kafkaParameters._2) + val kafkaOffsets = kafkaService.getBeginningEndOffsets(kafkaParameters._1, kafkaParameters._2) if ( kafkaOffsets.beginningOffsets.isEmpty || kafkaOffsets.endOffsets.isEmpty || kafkaOffsets.beginningOffsets.keySet != kafkaOffsets.endOffsets.keySet ) { diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala index 00ab0d21e..90f5b68d2 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala @@ -17,7 +17,7 @@ package za.co.absa.hyperdrive.trigger.api.rest.services import org.slf4j.LoggerFactory import org.springframework.stereotype.Service -import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, Topic} +import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, TopicStatus} import za.co.absa.hyperdrive.trigger.models.enums.JobTypes import za.co.absa.hyperdrive.trigger.persistance.WorkflowRepository @@ -25,18 +25,18 @@ import scala.concurrent.{ExecutionContext, Future} import scala.util.{Failure, Success} trait HyperdriveService { - val workflowRepository: WorkflowRepository - val jobTemplateService: JobTemplateService - val hyperdriveOffsetService: HyperdriveOffsetService + protected val workflowRepository: WorkflowRepository + protected val jobTemplateService: JobTemplateService + protected val hyperdriveOffsetService: HyperdriveOffsetService def getIngestionStatus(id: Long)(implicit ec: ExecutionContext): Future[Seq[IngestionStatus]] } @Service class HyperdriveServiceImpl( - override val workflowRepository: WorkflowRepository, - override val jobTemplateService: JobTemplateService, - override val hyperdriveOffsetService: HyperdriveOffsetService + override protected val workflowRepository: WorkflowRepository, + override protected val jobTemplateService: JobTemplateService, + override protected val hyperdriveOffsetService: HyperdriveOffsetService ) extends HyperdriveService { private val logger = LoggerFactory.getLogger(this.getClass) @@ -55,7 +55,7 @@ class HyperdriveServiceImpl( IngestionStatus( jobName = resolvedJob.name, jobType = resolvedJob.jobParameters.jobType.name, - topic = None + topicStatus = None ) ) case Success(messagesLeftOpt) => @@ -63,7 +63,7 @@ class HyperdriveServiceImpl( IngestionStatus( jobName = resolvedJob.name, jobType = resolvedJob.jobParameters.jobType.name, - topic = messagesLeftOpt.map(messagesLeft => Topic(messagesLeft._1, messagesLeft._2)) + topicStatus = messagesLeftOpt.map(messagesLeft => TopicStatus(messagesLeft._1, messagesLeft._2)) ) ) } @@ -72,7 +72,7 @@ class HyperdriveServiceImpl( IngestionStatus( jobName = resolvedJob.name, jobType = resolvedJob.jobParameters.jobType.name, - topic = None + topicStatus = None ) ) } diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala index d7336c9a4..18428a872 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaService.scala @@ -28,12 +28,11 @@ import java.util.Properties import java.util.UUID.randomUUID import javax.inject.Inject import scala.collection.JavaConverters._ -import scala.util.Try trait KafkaService { def getBeginningOffsets(topic: String, consumerProperties: Properties): Map[Int, Long] def getEndOffsets(topic: String, consumerProperties: Properties): Map[Int, Long] - def getOffsets(topic: String, consumerProperties: Properties): BeginningEndOffsets + def getBeginningEndOffsets(topic: String, consumerProperties: Properties): BeginningEndOffsets } @Service @@ -53,7 +52,7 @@ class KafkaServiceImpl @Inject() (generalConfig: GeneralConfig) extends KafkaSer getOffsets(topic, consumerProperties, EndOffsets) } - def getOffsets(topic: String, consumerProperties: Properties): BeginningEndOffsets = { + def getBeginningEndOffsets(topic: String, consumerProperties: Properties): BeginningEndOffsets = { BeginningEndOffsets( topic, getOffsets(topic, consumerProperties, BeginningOffsets), diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala index a1d691f9b..35a835da8 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/models/IngestionStatus.scala @@ -18,7 +18,7 @@ package za.co.absa.hyperdrive.trigger.models case class IngestionStatus( jobName: String, jobType: String, - topic: Option[Topic] + topicStatus: Option[TopicStatus] ) -case class Topic(topic: String, messagesToIngest: Map[Int, Long]) +case class TopicStatus(topic: String, messagesToIngest: Map[Int, Long]) diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala index a0bc1ab4b..ffe81bfa4 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala @@ -277,7 +277,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) - when(kafkaService.getOffsets(any(), any())).thenReturn(BeginningEndOffsets("topic", Map.empty, Map.empty)) + when(kafkaService.getBeginningEndOffsets(any(), any())).thenReturn(BeginningEndOffsets("topic", Map.empty, Map.empty)) val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) resultFut.map { result => @@ -291,7 +291,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) - when(kafkaService.getOffsets(any(), any())) + when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets("topic", Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Failure(new Exception())) @@ -310,7 +310,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) - when(kafkaService.getOffsets(any(), any())) + when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Success(None)) @@ -330,7 +330,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) - when(kafkaService.getOffsets(any(), any())) + when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 2L, 1 -> 20L)))) @@ -350,7 +350,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) - when(kafkaService.getOffsets(any(), any())) + when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) when(checkpointService.getLatestCommittedOffset(any())(any())) .thenReturn(Try(Some(Map(0 -> 2L, 1 -> 20L, 3 -> 10L)))) @@ -371,7 +371,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) - when(kafkaService.getOffsets(any(), any())) + when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 2L)))) @@ -391,7 +391,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) - when(kafkaService.getOffsets(any(), any())) + when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 20L, 1 -> 200L)))) diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala index 4f29f610f..4d9a17349 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala @@ -22,7 +22,7 @@ import org.scalatest.{AsyncFlatSpec, BeforeAndAfter, Matchers} import za.co.absa.hyperdrive.trigger.TestUtils.await import za.co.absa.hyperdrive.trigger.models.enums.JobTypes import za.co.absa.hyperdrive.trigger.models.{ResolvedJobDefinition, ShellInstanceParameters, SparkInstanceParameters} -import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, Topic} +import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, TopicStatus} import za.co.absa.hyperdrive.trigger.persistance.WorkflowRepository import scala.concurrent.Future @@ -85,11 +85,11 @@ class HyperdriveServiceTest extends AsyncFlatSpec with Matchers with BeforeAndAf IngestionStatus( jobName = "JobA", JobTypes.Hyperdrive.name, - topic = Some(Topic(topic = "topic", messagesToIngest = Map.empty)) + topicStatus = Some(TopicStatus(topic = "topic", messagesToIngest = Map.empty)) ), - IngestionStatus(jobName = "JobB", JobTypes.Hyperdrive.name, topic = None), - IngestionStatus(jobName = "JobC", JobTypes.Spark.name, topic = None), - IngestionStatus(jobName = "JobD", JobTypes.Shell.name, topic = None) + IngestionStatus(jobName = "JobB", JobTypes.Hyperdrive.name, topicStatus = None), + IngestionStatus(jobName = "JobC", JobTypes.Spark.name, topicStatus = None), + IngestionStatus(jobName = "JobD", JobTypes.Shell.name, topicStatus = None) ) when(workflowRepository.getWorkflow(any())(any())).thenReturn(Future(WorkflowFixture.createWorkflowJoined())) when(jobTemplateService.resolveJobTemplate(any())(any())).thenReturn(Future(resolvedJobDefinitions)) diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaServiceTest.scala index 7bcd92c8c..743c15190 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/KafkaServiceTest.scala @@ -75,7 +75,7 @@ class KafkaServiceTest extends FlatSpec with MockitoSugar with Matchers { result shouldBe Map() } - "getOffsets" should "return a map of start and end offsets" in { + "getBeginningEndOffsets" should "return a map of start and end offsets" in { import scala.collection.JavaConverters._ val topicName = "topic" val partitions = Seq( @@ -98,7 +98,7 @@ class KafkaServiceTest extends FlatSpec with MockitoSugar with Matchers { when(mockKafkaConsumer.beginningOffsets(eqTo(topicPartitions))).thenReturn(startOffsets) when(mockKafkaConsumer.endOffsets(eqTo(topicPartitions))).thenReturn(endOffsets) - val result = underTest.getOffsets(topicName, new Properties()) + val result = underTest.getBeginningEndOffsets(topicName, new Properties()) result shouldBe BeginningEndOffsets(topicName, Map(0 -> 100L, 1 -> 200L), Map(0 -> 200L, 1 -> 400L)) } @@ -107,7 +107,7 @@ class KafkaServiceTest extends FlatSpec with MockitoSugar with Matchers { val topicName = "non-existent-topic" when(mockKafkaConsumer.partitionsFor(any())).thenReturn(null) - val result = underTest.getOffsets(topicName, new Properties()) + val result = underTest.getBeginningEndOffsets(topicName, new Properties()) result shouldBe BeginningEndOffsets(topicName, Map.empty, Map.empty) } } From 732738ba41a73565d4ea4b7ddc24e92b9b1a0d11 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 24 Feb 2023 10:29:16 +0100 Subject: [PATCH 24/32] PR fixes --- .../trigger/api/rest/services/HyperdriveOffsetService.scala | 1 + 1 file changed, 1 insertion(+) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala index 6c27075b4..71f03a11a 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala @@ -84,6 +84,7 @@ class HyperdriveOffsetServiceImpl @Inject() (sparkConfig: SparkConfig, if ( kafkaOffsets.beginningOffsets.isEmpty || kafkaOffsets.endOffsets.isEmpty || kafkaOffsets.beginningOffsets.keySet != kafkaOffsets.endOffsets.keySet ) { + logger.warn(s"Inconsistent response from kafka for topic: ${kafkaOffsets.topic}") None } else { val ugi = userGroupInformationService.loginUserFromKeytab(hdfsParameters.principal, hdfsParameters.keytab) From cd92e9c0ce77e0aeec8b9a30f1571d7ae42c7480 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 24 Feb 2023 10:31:16 +0100 Subject: [PATCH 25/32] PR fixes --- .../trigger/api/rest/services/HyperdriveServiceTest.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala index 4d9a17349..eebe0c22b 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveServiceTest.scala @@ -19,7 +19,6 @@ import org.mockito.ArgumentMatchers.any import org.mockito.Mockito.{reset, when} import org.scalatest.mockito.MockitoSugar import org.scalatest.{AsyncFlatSpec, BeforeAndAfter, Matchers} -import za.co.absa.hyperdrive.trigger.TestUtils.await import za.co.absa.hyperdrive.trigger.models.enums.JobTypes import za.co.absa.hyperdrive.trigger.models.{ResolvedJobDefinition, ShellInstanceParameters, SparkInstanceParameters} import za.co.absa.hyperdrive.trigger.models.{IngestionStatus, TopicStatus} @@ -44,8 +43,9 @@ class HyperdriveServiceTest extends AsyncFlatSpec with Matchers with BeforeAndAf val error = "error" when(workflowRepository.getWorkflow(any())(any())).thenReturn(Future.failed(new Exception(error))) - val result = the[Exception] thrownBy await(underTest.getIngestionStatus(id)) - result.getMessage shouldBe error + recoverToSucceededIf[Exception] { + underTest.getIngestionStatus(id) + } } it should "fail on resolve job template failure" in { From b612fd4c9ff55ce08c5fd6f32c2cacdb3a7bc1ae Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 24 Feb 2023 10:41:30 +0100 Subject: [PATCH 26/32] PR fixes --- .../api/rest/services/CheckpointService.scala | 8 ++++---- .../api/rest/services/HyperdriveOffsetService.scala | 2 +- .../api/rest/services/CheckpointServiceTest.scala | 2 +- .../rest/services/HyperdriveOffsetServiceTest.scala | 13 ++++++++----- 4 files changed, 14 insertions(+), 11 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala index 3a58927cf..c4d1d4438 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala @@ -35,7 +35,7 @@ trait CheckpointService { implicit ugi: UserGroupInformation ): Try[Option[(String, Boolean)]] - def getLatestCommittedOffset(params: HdfsParameters)(implicit ugi: UserGroupInformation): Try[Option[Map[Int, Long]]] + def getLatestCommittedOffset(params: HdfsParameters)(implicit ugi: UserGroupInformation): Try[Option[TopicPartitionOffsets]] } class HdfsParameters( @@ -102,13 +102,13 @@ class CheckpointServiceImpl @Inject() (@Lazy hdfsService: HdfsService) extends C override def getLatestCommittedOffset( params: HdfsParameters - )(implicit ugi: UserGroupInformation): Try[Option[Map[Int, Long]]] = { + )(implicit ugi: UserGroupInformation): Try[Option[TopicPartitionOffsets]] = { getLatestCommitBatchId(params.checkpointLocation).flatMap { case Some(latestCommit) => val pathToLatestCommit = new Path(s"${params.checkpointLocation}/$offsetsDirName/$latestCommit") getOffsetsFromFile(pathToLatestCommit.toString) - .map(_.map(topicPartitionOffsets => topicPartitionOffsets.head._2)) - case None => Try(Option.empty[Map[Int, Long]]) + .map(_.map(topicPartitionOffsets => topicPartitionOffsets)) + case None => Try(Option.empty[TopicPartitionOffsets]) } } diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala index 71f03a11a..f4c1de107 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala @@ -88,7 +88,7 @@ class HyperdriveOffsetServiceImpl @Inject() (sparkConfig: SparkConfig, None } else { val ugi = userGroupInformationService.loginUserFromKeytab(hdfsParameters.principal, hdfsParameters.keytab) - val hdfsOffsetsTry = checkpointService.getLatestCommittedOffset(hdfsParameters)(ugi) + val hdfsOffsetsTry = checkpointService.getLatestCommittedOffset(hdfsParameters)(ugi).map(_.map(_.head._2)) hdfsOffsetsTry match { case Failure(_) => None diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointServiceTest.scala index 130407abe..09abb94e0 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointServiceTest.scala @@ -242,7 +242,7 @@ class CheckpointServiceTest extends FlatSpec with Matchers with BeforeAndAfter w val result = underTest.getLatestCommittedOffset(params)(ugi) result.get.isDefined shouldBe true - result.get shouldBe Some(offsets.values.head) + result.get shouldBe Some(offsets) } private def createOffsetFiles(maxBatchId: Int) = { diff --git a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala index ffe81bfa4..6baf9fd8e 100644 --- a/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala +++ b/src/test/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetServiceTest.scala @@ -277,7 +277,8 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor val underTest = new HyperdriveOffsetServiceImpl(config.yarn, checkpointService, ugiService, kafkaService) when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) - when(kafkaService.getBeginningEndOffsets(any(), any())).thenReturn(BeginningEndOffsets("topic", Map.empty, Map.empty)) + when(kafkaService.getBeginningEndOffsets(any(), any())) + .thenReturn(BeginningEndOffsets("topic", Map.empty, Map.empty)) val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) resultFut.map { result => @@ -332,7 +333,8 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) - when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 2L, 1 -> 20L)))) + when(checkpointService.getLatestCommittedOffset(any())(any())) + .thenReturn(Try(Some(Map(topic -> Map(0 -> 2L, 1 -> 20L))))) val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) resultFut.map { result => @@ -353,7 +355,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) when(checkpointService.getLatestCommittedOffset(any())(any())) - .thenReturn(Try(Some(Map(0 -> 2L, 1 -> 20L, 3 -> 10L)))) + .thenReturn(Try(Some(Map(topic -> Map(0 -> 2L, 1 -> 20L, 3 -> 10L))))) val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) resultFut.map { result => @@ -373,7 +375,7 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) - when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 2L)))) + when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(topic -> Map(0 -> 2L))))) val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) resultFut.map { result => @@ -393,7 +395,8 @@ class HyperdriveOffsetServiceTest extends AsyncFlatSpec with Matchers with Befor when(ugiService.loginUserFromKeytab(any(), any())).thenReturn(ugi) when(kafkaService.getBeginningEndOffsets(any(), any())) .thenReturn(BeginningEndOffsets(topic, Map(0 -> 0, 1 -> 10), Map(0 -> 10, 1 -> 100))) - when(checkpointService.getLatestCommittedOffset(any())(any())).thenReturn(Try(Some(Map(0 -> 20L, 1 -> 200L)))) + when(checkpointService.getLatestCommittedOffset(any())(any())) + .thenReturn(Try(Some(Map(topic -> Map(0 -> 20L, 1 -> 200L))))) val resultFut = underTest.getNumberOfMessagesLeft(jobParameters) resultFut.map { result => From b6bef1c73321588c1887413bc8ecd4c4b89bd229 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 24 Feb 2023 10:46:50 +0100 Subject: [PATCH 27/32] PR fixes --- .../workflows/workflows-home/workflows-home.component.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ui/src/app/components/workflows/workflows-home/workflows-home.component.html b/ui/src/app/components/workflows/workflows-home/workflows-home.component.html index 024f6e737..6f075df17 100644 --- a/ui/src/app/components/workflows/workflows-home/workflows-home.component.html +++ b/ui/src/app/components/workflows/workflows-home/workflows-home.component.html @@ -251,7 +251,7 @@ {{job.jobType}} Topic: {{job?.topic?.topic}}
-
Messages to ingest: {{job?.topic?.messagesToIngest >= 0 ? job?.topic?.messagesToIngest : 'Inconsistency detected. Please contact support team!'}}
+
Messages to ingest: {{job?.topic?.messagesToIngest}}
Offset inconsistency detected. Please contact support team!
From 7a4aa2ea5131d381be9dcd43e68b644fd05b39ce Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Fri, 24 Feb 2023 10:55:15 +0100 Subject: [PATCH 28/32] Formatting --- .../trigger/api/rest/services/CheckpointService.scala | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala index c4d1d4438..caba5d7d9 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala @@ -35,7 +35,9 @@ trait CheckpointService { implicit ugi: UserGroupInformation ): Try[Option[(String, Boolean)]] - def getLatestCommittedOffset(params: HdfsParameters)(implicit ugi: UserGroupInformation): Try[Option[TopicPartitionOffsets]] + def getLatestCommittedOffset(params: HdfsParameters)( + implicit ugi: UserGroupInformation + ): Try[Option[TopicPartitionOffsets]] } class HdfsParameters( From d7659165f53a13b7217b711e3192dce159374fab Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Thu, 9 Mar 2023 15:02:06 +0100 Subject: [PATCH 29/32] PR Fixes --- .../services/HyperdriveOffsetService.scala | 45 +++++++++---------- 1 file changed, 22 insertions(+), 23 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala index f4c1de107..4695761fc 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala @@ -26,7 +26,7 @@ import org.springframework.context.annotation.Lazy import org.springframework.stereotype.Service import za.co.absa.hyperdrive.trigger.configuration.application.SparkConfig import za.co.absa.hyperdrive.trigger.models.enums.JobTypes -import za.co.absa.hyperdrive.trigger.models.{JobInstanceParameters, SparkInstanceParameters} +import za.co.absa.hyperdrive.trigger.models.{BeginningEndOffsets, JobInstanceParameters, SparkInstanceParameters} import java.util.Properties import javax.inject.Inject @@ -81,30 +81,29 @@ class HyperdriveOffsetServiceImpl @Inject() (sparkConfig: SparkConfig, hdfsParameters <- hdfsParametersOpt } yield { val kafkaOffsets = kafkaService.getBeginningEndOffsets(kafkaParameters._1, kafkaParameters._2) - if ( - kafkaOffsets.beginningOffsets.isEmpty || kafkaOffsets.endOffsets.isEmpty || kafkaOffsets.beginningOffsets.keySet != kafkaOffsets.endOffsets.keySet - ) { - logger.warn(s"Inconsistent response from kafka for topic: ${kafkaOffsets.topic}") - None - } else { - val ugi = userGroupInformationService.loginUserFromKeytab(hdfsParameters.principal, hdfsParameters.keytab) - val hdfsOffsetsTry = checkpointService.getLatestCommittedOffset(hdfsParameters)(ugi).map(_.map(_.head._2)) + kafkaOffsets match { + case BeginningEndOffsets(_, start, end) if start.nonEmpty && end.nonEmpty && start.keySet == end.keySet => + val ugi = userGroupInformationService.loginUserFromKeytab(hdfsParameters.principal, hdfsParameters.keytab) + val hdfsOffsetsTry = checkpointService.getLatestCommittedOffset(hdfsParameters)(ugi).map(_.map(_.head._2)) - hdfsOffsetsTry match { - case Failure(_) => None - case Success(hdfsOffsetsOption) => - val messagesLeft = kafkaOffsets.beginningOffsets.map { case (partition, kafkaBeginningOffset) => - val kafkaEndOffset = kafkaOffsets.endOffsets(partition) - val numberOfMessages = hdfsOffsetsOption.flatMap(_.get(partition)) match { - case Some(hdfsOffset) if hdfsOffset > kafkaEndOffset => kafkaEndOffset - hdfsOffset - case Some(hdfsOffset) if hdfsOffset > kafkaBeginningOffset => kafkaEndOffset - hdfsOffset - case Some(hdfsOffset) if hdfsOffset <= kafkaBeginningOffset => kafkaEndOffset - kafkaBeginningOffset - case None => kafkaEndOffset - kafkaBeginningOffset + hdfsOffsetsTry match { + case Failure(_) => None + case Success(hdfsOffsetsOption) => + val messagesLeft = kafkaOffsets.beginningOffsets.map { case (partition, kafkaBeginningOffset) => + val kafkaEndOffset = kafkaOffsets.endOffsets(partition) + val numberOfMessages = hdfsOffsetsOption.flatMap(_.get(partition)) match { + case Some(hdfsOffset) if hdfsOffset > kafkaEndOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset > kafkaBeginningOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset <= kafkaBeginningOffset => kafkaEndOffset - kafkaBeginningOffset + case None => kafkaEndOffset - kafkaBeginningOffset + } + partition -> numberOfMessages } - partition -> numberOfMessages - } - Some((kafkaOffsets.topic, messagesLeft)) - } + Some((kafkaOffsets.topic, messagesLeft)) + } + case _ => + logger.warn(s"Inconsistent response from kafka for topic: ${kafkaOffsets.topic}") + None } } ).map(_.flatten) From 8669327478b7206c7b015dbed8b46cc244366e24 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Thu, 9 Mar 2023 15:11:39 +0100 Subject: [PATCH 30/32] PR Fixes --- .../trigger/api/rest/services/HyperdriveService.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala index 90f5b68d2..bbb617ba1 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveService.scala @@ -51,7 +51,7 @@ class HyperdriveServiceImpl( hyperdriveOffsetService.getNumberOfMessagesLeft(resolvedJob.jobParameters).transformWith { case Failure(exception) => logger.error(s"Failed to get number of messages left to ingest for a workflow: $id", exception) - Future( + Future.successful( IngestionStatus( jobName = resolvedJob.name, jobType = resolvedJob.jobParameters.jobType.name, @@ -59,7 +59,7 @@ class HyperdriveServiceImpl( ) ) case Success(messagesLeftOpt) => - Future( + Future.successful( IngestionStatus( jobName = resolvedJob.name, jobType = resolvedJob.jobParameters.jobType.name, @@ -68,7 +68,7 @@ class HyperdriveServiceImpl( ) } case resolvedJob => - Future( + Future.successful( IngestionStatus( jobName = resolvedJob.name, jobType = resolvedJob.jobParameters.jobType.name, From bf47a0e5c05af003522425af764d3d8f49ddd3b3 Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Thu, 9 Mar 2023 15:36:25 +0100 Subject: [PATCH 31/32] PR Fixes --- .../trigger/api/rest/services/CheckpointService.scala | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala index caba5d7d9..1baabdb4d 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/CheckpointService.scala @@ -25,7 +25,7 @@ import org.springframework.stereotype.Service import za.co.absa.hyperdrive.trigger.api.rest.utils.ScalaUtil.swap import javax.inject.Inject -import scala.util.Try +import scala.util.{Success, Try} trait CheckpointService { type TopicPartitionOffsets = Map[String, Map[Int, Long]] @@ -106,11 +106,9 @@ class CheckpointServiceImpl @Inject() (@Lazy hdfsService: HdfsService) extends C params: HdfsParameters )(implicit ugi: UserGroupInformation): Try[Option[TopicPartitionOffsets]] = { getLatestCommitBatchId(params.checkpointLocation).flatMap { - case Some(latestCommit) => - val pathToLatestCommit = new Path(s"${params.checkpointLocation}/$offsetsDirName/$latestCommit") - getOffsetsFromFile(pathToLatestCommit.toString) - .map(_.map(topicPartitionOffsets => topicPartitionOffsets)) - case None => Try(Option.empty[TopicPartitionOffsets]) + _.map { latestCommit => + getOffsetsFromFile(new Path(s"${params.checkpointLocation}/$offsetsDirName/$latestCommit").toString) + }.getOrElse(Success(None)) } } From 806a29c1be448dbd056503fd221ba8824b5ea1bd Mon Sep 17 00:00:00 2001 From: jozefbakus Date: Thu, 9 Mar 2023 15:37:52 +0100 Subject: [PATCH 32/32] PR Fixes --- .../trigger/api/rest/services/HyperdriveOffsetService.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala index 4695761fc..b8f1ee9d6 100644 --- a/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala +++ b/src/main/scala/za/co/absa/hyperdrive/trigger/api/rest/services/HyperdriveOffsetService.scala @@ -92,10 +92,10 @@ class HyperdriveOffsetServiceImpl @Inject() (sparkConfig: SparkConfig, val messagesLeft = kafkaOffsets.beginningOffsets.map { case (partition, kafkaBeginningOffset) => val kafkaEndOffset = kafkaOffsets.endOffsets(partition) val numberOfMessages = hdfsOffsetsOption.flatMap(_.get(partition)) match { - case Some(hdfsOffset) if hdfsOffset > kafkaEndOffset => kafkaEndOffset - hdfsOffset - case Some(hdfsOffset) if hdfsOffset > kafkaBeginningOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset > kafkaEndOffset => kafkaEndOffset - hdfsOffset + case Some(hdfsOffset) if hdfsOffset > kafkaBeginningOffset => kafkaEndOffset - hdfsOffset case Some(hdfsOffset) if hdfsOffset <= kafkaBeginningOffset => kafkaEndOffset - kafkaBeginningOffset - case None => kafkaEndOffset - kafkaBeginningOffset + case None => kafkaEndOffset - kafkaBeginningOffset } partition -> numberOfMessages }