diff --git a/CHANGELOG.md b/CHANGELOG.md index 2a43caa93d4..9d7babe76ab 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -33,8 +33,8 @@ be found [here](https://cromwell.readthedocs.io/en/stable/backends/HPC/#optional - Fixes the reference disk feature. - Fixes pulling Docker image metadata from private GCR repositories. - Fixed `google_project` and `google_compute_service_account` workflow options not taking effect when using GCP Batch backend -- Added a way to use a custom LogsPolicy for the job execution, setting `backend.providers.batch.config.batch.logs-policy` to "CLOUD_LOGGING" (default) keeps the current behavior, or, set it to "PATH" to save the logs into the the mounted disk, at the end, this log file gets copied to the google cloud storage bucket with "task.log" as the name. -- When "CLOUD_LOGGING" is used, many more Cromwell / WDL labels for workflow, root workflow, call, shard etc. are now assigned to GCP Batch log entries. +- A task log file with the name "task.log" that combines standard output and standard error is now streamed to the task directory in Google Cloud Storage. +- When Cloud Logging is enabled, many more Cromwell / WDL labels for workflow, root workflow, call, shard etc. are now assigned to GCP Batch log entries. ### Improved handling of Life Sciences API quota errors diff --git a/backend/src/main/scala/cromwell/backend/io/JobPaths.scala b/backend/src/main/scala/cromwell/backend/io/JobPaths.scala index 05ad6a56dc1..817aa1dc76f 100644 --- a/backend/src/main/scala/cromwell/backend/io/JobPaths.scala +++ b/backend/src/main/scala/cromwell/backend/io/JobPaths.scala @@ -14,9 +14,9 @@ object JobPaths { val ScriptPathKey = "script" val StdoutPathKey = "stdout" val StdErrPathKey = "stderr" + val TaskLogPathKey = "taskLog" val ReturnCodePathKey = "returnCode" val CallRootPathKey = "callRootPath" - val DockerCidPathKey = "dockerCidPath" def callPathBuilder(root: Path, jobKey: JobKey, isCallCacheCopyAttempt: Boolean) = { val callName = jobKey.node.localName @@ -43,7 +43,9 @@ trait JobPaths { def memoryRetryRCFilename: String = "memory_retry_rc" def defaultStdoutFilename = "stdout" def defaultStderrFilename = "stderr" + def defaultTaskLogFilename = "task.log" def isDocker: Boolean = false + def implementsTaskLogging: Boolean = false // In this non-Docker version of `JobPaths` there is no distinction between host and container roots so this is // just called 'rootWithSlash'. @@ -73,7 +75,8 @@ trait JobPaths { // enable dynamic standard output and error file names for languages like CWL that support this feature. var standardPaths: StandardPaths = StandardPaths( output = callExecutionRoot.resolve(defaultStdoutFilename), - error = callExecutionRoot.resolve(defaultStderrFilename) + error = callExecutionRoot.resolve(defaultStderrFilename), + taskLog = callExecutionRoot.resolve(defaultTaskLogFilename) ) lazy val script = callExecutionRoot.resolve(scriptFilename) @@ -86,7 +89,7 @@ trait JobPaths { def standardOutputAndErrorPaths: Map[String, Path] = Map( CallMetadataKeys.Stdout -> standardPaths.output, CallMetadataKeys.Stderr -> standardPaths.error - ) + ) ++ (if (implementsTaskLogging) Map(CallMetadataKeys.TaskLog -> standardPaths.taskLog) else Map.empty) private lazy val commonMetadataPaths: Map[String, Path] = standardOutputAndErrorPaths + (CallMetadataKeys.CallRoot -> callRoot) @@ -99,7 +102,7 @@ trait JobPaths { JobPaths.StdoutPathKey -> standardPaths.output, JobPaths.StdErrPathKey -> standardPaths.error, JobPaths.ReturnCodePathKey -> returnCode - ) + ) ++ (if (implementsTaskLogging) Map(JobPaths.TaskLogPathKey -> standardPaths.taskLog) else Map.empty) private lazy val commonLogPaths: Map[String, Path] = Map( JobPaths.StdoutPathKey -> standardPaths.output, diff --git a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala index 8f8312023f1..4a9b75135e1 100644 --- a/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala +++ b/backend/src/main/scala/cromwell/backend/standard/StandardAsyncExecutionActor.scala @@ -380,6 +380,7 @@ trait StandardAsyncExecutionActor instantiatedCommand.evaluatedStdoutOverride.getOrElse(jobPaths.defaultStdoutFilename) |> absolutizeContainerPath def executionStderr: String = instantiatedCommand.evaluatedStderrOverride.getOrElse(jobPaths.defaultStderrFilename) |> absolutizeContainerPath + def executionTaskLog: String = jobPaths.defaultTaskLogFilename |> absolutizeContainerPath /* * Ensures the standard paths are correct w.r.t overridden paths. This is called in two places: when generating the command and @@ -393,9 +394,10 @@ trait StandardAsyncExecutionActor // .get's are safe on stdout and stderr after falling back to default names above. jobPaths.standardPaths = StandardPaths( output = hostPathFromContainerPath(executionStdout), - error = hostPathFromContainerPath(executionStderr) + error = hostPathFromContainerPath(executionStderr), + taskLog = hostPathFromContainerPath(executionTaskLog) ) - // Re-publish stdout and stderr paths that were possibly just updated. + // Re-publish stdout, stderr and task log paths that were possibly just updated. tellMetadata(jobPaths.standardOutputAndErrorPaths) jobPathsUpdated = true } @@ -423,6 +425,7 @@ trait StandardAsyncExecutionActor val stdinRedirection = executionStdin.map("< " + _.shellQuote).getOrElse("") val stdoutRedirection = executionStdout.shellQuote val stderrRedirection = executionStderr.shellQuote + val taskLogRedirection = executionTaskLog.shellQuote val rcTmpPath = rcPath.plusExt("tmp") val errorOrDirectoryOutputs: ErrorOr[List[WomUnlistedDirectory]] = @@ -471,6 +474,10 @@ trait StandardAsyncExecutionActor } } + val taskLoggingCommand = + if (jobPaths.implementsTaskLogging) s"tail -q -f $stdoutRedirection $stderrRedirection > $taskLogRedirection &" + else "" + // The `tee` trickery below is to be able to redirect to known filenames for CWL while also streaming // stdout and stderr for PAPI to periodically upload to cloud storage. // https://stackoverflow.com/questions/692000/how-do-i-write-stderr-to-a-file-while-using-tee-with-a-pipe @@ -491,6 +498,7 @@ trait StandardAsyncExecutionActor |touch $stdoutRedirection $stderrRedirection |tee $stdoutRedirection < "$$$out" & |tee $stderrRedirection < "$$$err" >&2 & + |TASK_LOGGING_COMMAND |( |cd ${cwd.pathAsString} |ENVIRONMENT_VARIABLES @@ -511,6 +519,7 @@ trait StandardAsyncExecutionActor .replace("INSTANTIATED_COMMAND", commandString) .replace("SCRIPT_EPILOGUE", scriptEpilogue) .replace("DOCKER_OUTPUT_DIR_LINK", dockerOutputDir) + .replace("TASK_LOGGING_COMMAND", taskLoggingCommand) ) } diff --git a/centaur/src/main/resources/standardTestCases/gcpbatch_exhaustive_delete.test b/centaur/src/main/resources/standardTestCases/gcpbatch_exhaustive_delete.test index 0f00ec0a5ce..29f3cb54d43 100644 --- a/centaur/src/main/resources/standardTestCases/gcpbatch_exhaustive_delete.test +++ b/centaur/src/main/resources/standardTestCases/gcpbatch_exhaustive_delete.test @@ -18,7 +18,7 @@ metadata { fileSystemCheck: "gcs" outputExpectations: { "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/exhaustive_delete/<>/call-exhaustive/delete.txt": 0 - "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/exhaustive_delete/<>/call-exhaustive/": 8 + "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/exhaustive_delete/<>/call-exhaustive/": 9 "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/exhaustive_delete/<>/call-exhaustive/gcs_delocalization.sh": 1 "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/exhaustive_delete/<>/call-exhaustive/gcs_localization.sh": 1 "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/exhaustive_delete/<>/call-exhaustive/gcs_transfer.sh": 1 diff --git a/centaur/src/main/resources/standardTestCases/gcpbatch_sub_workflow_delete.test b/centaur/src/main/resources/standardTestCases/gcpbatch_sub_workflow_delete.test index cae387a6148..b68f4095055 100644 --- a/centaur/src/main/resources/standardTestCases/gcpbatch_sub_workflow_delete.test +++ b/centaur/src/main/resources/standardTestCases/gcpbatch_sub_workflow_delete.test @@ -22,7 +22,7 @@ metadata { fileSystemCheck: "gcs" outputExpectations: { # No current way to match on the subworkflow id, so for now just make sure the total directory count matches. - "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/sub_workflow_delete/<>/call-sub_call/sub_workflow_delete_import/": 8 + "gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/sub_workflow_delete/<>/call-sub_call/sub_workflow_delete_import/": 9 #"gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/sub_workflow_delete/<>/call-sub_call/sub_workflow_delete_import/<>/call-sub_workflow_task/gcs_delocalization.sh": 1 #"gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/sub_workflow_delete/<>/call-sub_call/sub_workflow_delete_import/<>/call-sub_workflow_task/gcs_localization.sh": 1 #"gs://cloud-cromwell-dev-self-cleaning/cromwell_execution/ci/sub_workflow_delete/<>/call-sub_call/sub_workflow_delete_import/<>/call-sub_workflow_task/gcs_transfer.sh": 1 diff --git a/core/src/main/scala/cromwell/core/core.scala b/core/src/main/scala/cromwell/core/core.scala index 60ead3fcf7a..c2da5d32e7e 100644 --- a/core/src/main/scala/cromwell/core/core.scala +++ b/core/src/main/scala/cromwell/core/core.scala @@ -8,7 +8,7 @@ import mouse.boolean._ import scala.concurrent.duration.FiniteDuration import scala.util.control.NoStackTrace -case class StandardPaths(output: Path, error: Path) +case class StandardPaths(output: Path, error: Path, taskLog: Path) case class CallContext(root: Path, standardPaths: StandardPaths, isDocker: Boolean) diff --git a/services/src/main/scala/cromwell/services/metadata/CallMetadataKeys.scala b/services/src/main/scala/cromwell/services/metadata/CallMetadataKeys.scala index cb7f31730f0..bbab5db826d 100644 --- a/services/src/main/scala/cromwell/services/metadata/CallMetadataKeys.scala +++ b/services/src/main/scala/cromwell/services/metadata/CallMetadataKeys.scala @@ -16,6 +16,7 @@ object CallMetadataKeys { val Failures = "failures" val Stdout = "stdout" val Stderr = "stderr" + val TaskLog = "taskLog" val BackendLogsPrefix = "backendLogs" val BackendStatus = "backendStatus" val JobId = "jobId" diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala index def59339847..1ef8e16f9c6 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActor.scala @@ -838,16 +838,6 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar contentType = plainTextContentType ) - val logFileOutput = GcpBatchFileOutput( - logFilename, - logGcsPath, - DefaultPathBuilder.get(logFilename), - workingDisk, - optional = true, - secondary = false, - contentType = plainTextContentType - ) - val memoryRetryRCFileOutput = GcpBatchFileOutput( memoryRetryRCFilename, memoryRetryRCGcsPath, @@ -864,7 +854,8 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar val standardStreams = List( StandardStream("stdout", _.output), - StandardStream("stderr", _.error) + StandardStream("stderr", _.error), + StandardStream("taskLog", _.taskLog) ) map { s => GcpBatchFileOutput( s.name, @@ -888,8 +879,7 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar DetritusOutputParameters( monitoringScriptOutputParameter = monitoringOutput, rcFileOutputParameter = rcFileOutput, - memoryRetryRCFileOutputParameter = memoryRetryRCFileOutput, - logFileOutputParameter = logFileOutput + memoryRetryRCFileOutputParameter = memoryRetryRCFileOutput ), List.empty ) @@ -908,10 +898,7 @@ class GcpBatchAsyncBackendJobExecutionActor(override val standardParams: Standar runtimeAttributes = runtimeAttributes, batchAttributes = batchAttributes, projectId = batchAttributes.project, - region = batchAttributes.location, - logfile = createParameters.commandScriptContainerPath.sibling( - batchParameters.detritusOutputParameters.logFileOutputParameter.name - ) + region = batchAttributes.location ) drsLocalizationManifestCloudPath = jobPaths.callExecutionRoot / GcpBatchJobPaths.DrsLocalizationManifestName diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActor.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActor.scala index d78100dd340..95c0a74022c 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActor.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchInitializationActor.scala @@ -23,6 +23,7 @@ import cromwell.backend.google.batch.models.GcpBatchConfigurationAttributes.{ } import cromwell.backend.google.batch.models._ import cromwell.backend.google.batch.runnable.WorkflowOptionKeys +import cromwell.backend.io.JobPaths import cromwell.backend.standard.{ StandardInitializationActor, StandardInitializationActorParams, @@ -274,7 +275,7 @@ object GcpBatchInitializationActor { // For metadata publishing purposes default to using the name of a standard stream as the stream's filename. def defaultStandardStreamNameToFileNameMetadataMapper(gcpBatchJobPaths: GcpBatchJobPaths, streamName: String - ): String = streamName + ): String = if (streamName == JobPaths.TaskLogPathKey) gcpBatchJobPaths.batchLogFilename else streamName def encryptKms(keyName: String, credentials: OAuth2Credentials, plainText: String): String = { val httpCredentialsAdapter = new HttpCredentialsAdapter(credentials) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchJobCachingActorHelper.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchJobCachingActorHelper.scala index edb778d3928..773222fb07f 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchJobCachingActorHelper.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/actors/GcpBatchJobCachingActorHelper.scala @@ -34,9 +34,6 @@ trait GcpBatchJobCachingActorHelper extends StandardCachingActorHelper { lazy val memoryRetryRCFilename: String = gcpBatchCallPaths.memoryRetryRCFilename lazy val memoryRetryRCGcsPath: Path = gcpBatchCallPaths.memoryRetryRC - lazy val logFilename: String = "task.log" - lazy val logGcsPath: Path = gcpBatchCallPaths.callExecutionRoot.resolve(logFilename) - lazy val batchAttributes: GcpBatchConfigurationAttributes = batchConfiguration.batchAttributes lazy val defaultLabels: Labels = { diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactory.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactory.scala index d2f8242b213..5aa633dab4b 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactory.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactory.scala @@ -41,11 +41,9 @@ object GcpBatchRequestFactory { case class DetritusOutputParameters( monitoringScriptOutputParameter: Option[GcpBatchFileOutput], rcFileOutputParameter: GcpBatchFileOutput, - memoryRetryRCFileOutputParameter: GcpBatchFileOutput, - logFileOutputParameter: GcpBatchFileOutput + memoryRetryRCFileOutputParameter: GcpBatchFileOutput ) { def all: List[GcpBatchFileOutput] = memoryRetryRCFileOutputParameter :: - logFileOutputParameter :: rcFileOutputParameter :: monitoringScriptOutputParameter.toList } diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala index 8071acbf9e8..41c18e192e8 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/api/GcpBatchRequestFactoryImpl.scala @@ -241,11 +241,6 @@ class GcpBatchRequestFactoryImpl()(implicit gcsTransferConfiguration: GcsTransfe val logsPolicy = data.gcpBatchParameters.batchAttributes.logsPolicy match { case GcpBatchLogsPolicy.CloudLogging => LogsPolicy.newBuilder.setDestination(Destination.CLOUD_LOGGING).build - case GcpBatchLogsPolicy.Path => - LogsPolicy.newBuilder - .setDestination(Destination.PATH) - .setLogsPath(data.gcpBatchParameters.logfile.toString) - .build } val googleLabels = data.createParameters.googleLabels.map(l => Label(l.key, l.value)) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/CreateGcpBatchParameters.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/CreateGcpBatchParameters.scala index 0c47b95fb7d..456f7115ae8 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/CreateGcpBatchParameters.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/CreateGcpBatchParameters.scala @@ -1,12 +1,10 @@ package cromwell.backend.google.batch.models import cromwell.backend.BackendJobDescriptor -import cromwell.core.path.Path case class CreateGcpBatchParameters(jobDescriptor: BackendJobDescriptor, runtimeAttributes: GcpBatchRuntimeAttributes, batchAttributes: GcpBatchConfigurationAttributes, projectId: String, - region: String, - logfile: Path + region: String ) diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributes.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributes.scala index cf71a1a3426..e594819b10f 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributes.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributes.scala @@ -216,7 +216,6 @@ object GcpBatchConfigurationAttributes extends GcpBatchReferenceFilesMappingOper val logsPolicy: ErrorOr[GcpBatchLogsPolicy] = validate { backendConfig.as[Option[String]]("batch.logs-policy").getOrElse("CLOUD_LOGGING") match { case "CLOUD_LOGGING" => GcpBatchLogsPolicy.CloudLogging - case "PATH" => GcpBatchLogsPolicy.Path case other => throw new IllegalArgumentException( s"Unrecognized logs policy entry: $other. Supported strategies are CLOUD_LOGGING and PATH." diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchJobPaths.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchJobPaths.scala index 6da61d9bcf6..732a09a3213 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchJobPaths.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchJobPaths.scala @@ -21,14 +21,9 @@ case class GcpBatchJobPaths(override val workflowPaths: GcpBatchWorkflowPaths, override val isCallCacheCopyAttempt: Boolean = false ) extends JobPaths { - def batchLogBasename = { - val index = jobKey.index - .map(s => s"-$s") - .getOrElse("") - s"${jobKey.node.localName}$index" - } - - val batchLogFilename: String = s"$batchLogBasename.log" + override def implementsTaskLogging: Boolean = true + + val batchLogFilename: String = "task.log" lazy val batchLogPath: Path = callExecutionRoot.resolve(batchLogFilename) val batchMonitoringLogFilename: String = s"${GcpBatchJobPaths.BatchMonitoringKey}.log" diff --git a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchLogsPolicy.scala b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchLogsPolicy.scala index bcc8fab8d13..d43714e4956 100644 --- a/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchLogsPolicy.scala +++ b/supportedBackends/google/batch/src/main/scala/cromwell/backend/google/batch/models/GcpBatchLogsPolicy.scala @@ -4,5 +4,4 @@ sealed trait GcpBatchLogsPolicy extends Product with Serializable object GcpBatchLogsPolicy { case object CloudLogging extends GcpBatchLogsPolicy - case object Path extends GcpBatchLogsPolicy } diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala index a162165f765..74edb009ae4 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/actors/GcpBatchAsyncBackendJobExecutionActorSpec.scala @@ -1084,7 +1084,7 @@ class GcpBatchAsyncBackendJobExecutionActorSpec "gs://path/to/gcs_root/wf_hello/e6236763-c518-41d0-9688-432549a8bf7c/call-hello/stderr" batchBackend.gcpBatchCallPaths.batchLogPath should be(a[GcsPath]) batchBackend.gcpBatchCallPaths.batchLogPath.pathAsString shouldBe - "gs://path/to/gcs_root/wf_hello/e6236763-c518-41d0-9688-432549a8bf7c/call-hello/hello.log" + "gs://path/to/gcs_root/wf_hello/e6236763-c518-41d0-9688-432549a8bf7c/call-hello/task.log" } it should "return Batch log paths for scattered call" in { @@ -1132,7 +1132,7 @@ class GcpBatchAsyncBackendJobExecutionActorSpec "gs://path/to/gcs_root/w/e6236763-c518-41d0-9688-432549a8bf7d/call-B/shard-2/stderr" batchBackend.gcpBatchCallPaths.batchLogPath should be(a[GcsPath]) batchBackend.gcpBatchCallPaths.batchLogPath.pathAsString shouldBe - "gs://path/to/gcs_root/w/e6236763-c518-41d0-9688-432549a8bf7d/call-B/shard-2/B-2.log" + "gs://path/to/gcs_root/w/e6236763-c518-41d0-9688-432549a8bf7d/call-B/shard-2/task.log" } it should "return the project from the workflow options in the start metadata" in { @@ -1202,7 +1202,8 @@ class GcpBatchAsyncBackendJobExecutionActorSpec "runtimeAttributes:zones" -> "us-central1-b,us-central1-a", "runtimeAttributes:maxRetries" -> "0", "stderr" -> s"$batchGcsRoot/wf_hello/$workflowId/call-goodbye/stderr", - "stdout" -> s"$batchGcsRoot/wf_hello/$workflowId/call-goodbye/stdout" + "stdout" -> s"$batchGcsRoot/wf_hello/$workflowId/call-goodbye/stdout", + "taskLog" -> s"$batchGcsRoot/wf_hello/$workflowId/call-goodbye/task.log" ) ) } diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributesSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributesSpec.scala index 19805debbed..39dc0a69b13 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributesSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchConfigurationAttributesSpec.scala @@ -135,12 +135,6 @@ class GcpBatchConfigurationAttributesSpec gcpBatchAttributes.logsPolicy should be(GcpBatchLogsPolicy.CloudLogging) } - it should "parse logs-policy = PATH" in { - val backendConfig = ConfigFactory.parseString(configString(batch = "logs-policy = PATH")) - val gcpBatchAttributes = GcpBatchConfigurationAttributes(googleConfig, backendConfig, "batch") - gcpBatchAttributes.logsPolicy should be(GcpBatchLogsPolicy.Path) - } - it should "reject invalid logs-policy" in { val expected = "Google Cloud Batch configuration is not valid: Errors:\nUnrecognized logs policy entry: INVALID. Supported strategies are CLOUD_LOGGING and PATH." diff --git a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchJobPathsSpec.scala b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchJobPathsSpec.scala index 933d2958689..45e8241b66a 100644 --- a/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchJobPathsSpec.scala +++ b/supportedBackends/google/batch/src/test/scala/cromwell/backend/google/batch/models/GcpBatchJobPathsSpec.scala @@ -41,7 +41,7 @@ class GcpBatchJobPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matche callPaths.returnCodeFilename should be("rc") callPaths.stderr.getFileName.pathAsString should be("gs://my-cromwell-workflows-bucket/stderr") callPaths.stdout.getFileName.pathAsString should be("gs://my-cromwell-workflows-bucket/stdout") - callPaths.batchLogFilename should be("hello.log") + callPaths.batchLogFilename should be("task.log") } it should "map the correct paths" in { @@ -69,7 +69,7 @@ class GcpBatchJobPathsSpec extends TestKitSuite with AnyFlatSpecLike with Matche callPaths.stderr.pathAsString should be(s"gs://my-cromwell-workflows-bucket/wf_hello/${workflowDescriptor.id}/call-hello/stderr") callPaths.batchLogPath.pathAsString should - be(s"gs://my-cromwell-workflows-bucket/wf_hello/${workflowDescriptor.id}/call-hello/hello.log") + be(s"gs://my-cromwell-workflows-bucket/wf_hello/${workflowDescriptor.id}/call-hello/task.log") } it should "map the correct call context" in {