Skip to content

Commit 2357ffa

Browse files
fm3normanrzcoderabbitai[bot]
authored
More logging in finishUpload to debug issues (#9248)
adds logging and error messages to finishUpload code to help debug https://scm.slack.com/archives/CMBMU5684/p1769589829329099 and https://scm.slack.com/archives/CMBMU5684/p1769544206045789 --------- Co-authored-by: Norman Rzepka <code@normanrz.com> Co-authored-by: coderabbitai[bot] <136622811+coderabbitai[bot]@users.noreply.github.com>
1 parent 30b82e4 commit 2357ffa

File tree

1 file changed

+13
-13
lines changed
  • webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading

1 file changed

+13
-13
lines changed

webknossos-datastore/app/com/scalableminds/webknossos/datastore/services/uploading/UploadService.scala

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,6 @@ import com.scalableminds.webknossos.datastore.models.datasource.UsableDataSource
2222
import com.scalableminds.webknossos.datastore.models.datasource._
2323
import com.scalableminds.webknossos.datastore.services.{DSRemoteWebknossosClient, DataSourceService, ManagedS3Service}
2424
import com.scalableminds.webknossos.datastore.storage.{DataStoreRedisStore, DataVaultService}
25-
import com.scalableminds.webknossos.datastore.slacknotification.DSSlackNotificationService
2625
import com.typesafe.scalalogging.LazyLogging
2726
import org.apache.commons.io.FileUtils
2827
import play.api.libs.json.{Json, OFormat, Reads}
@@ -97,7 +96,6 @@ class UploadService @Inject()(dataSourceService: DataSourceService,
9796
exploreLocalLayerService: ExploreLocalLayerService,
9897
dataStoreConfig: DataStoreConfig,
9998
managedS3Service: ManagedS3Service,
100-
slackNotificationService: DSSlackNotificationService,
10199
val remoteWebknossosClient: DSRemoteWebknossosClient)(implicit ec: ExecutionContext)
102100
extends DatasetDeleter
103101
with DirectoryConstants
@@ -315,8 +313,8 @@ class UploadService @Inject()(dataSourceService: DataSourceService,
315313
needsConversion = uploadInformation.needsConversion.getOrElse(false)
316314
uploadDir = uploadDirectoryFor(dataSourceId.organizationId, uploadId)
317315
_ <- backupRawUploadedData(uploadDir, uploadBackupDirectoryFor(dataSourceId.organizationId, uploadId), datasetId).toFox
318-
_ <- checkWithinRequestedFileSize(uploadDir, uploadId, datasetId)
319-
_ <- checkAllChunksUploaded(uploadId)
316+
_ <- checkWithinRequestedFileSize(uploadDir, uploadId, datasetId) ?~> "dataset.upload.fileSizeCheck.failed"
317+
_ <- checkAllChunksUploaded(uploadId) ?~> "dataset.upload.allChunksUploadedCheck.failed"
320318
unpackToDir = unpackToDirFor(dataSourceId)
321319
_ <- PathUtils.ensureDirectoryBox(unpackToDir.getParent).toFox ?~> "dataset.import.fileAccessDenied"
322320
unpackResult <- unpackDataset(uploadDir, unpackToDir, datasetId).shiftBox
@@ -332,8 +330,8 @@ class UploadService @Inject()(dataSourceService: DataSourceService,
332330
dataSourceId,
333331
needsConversion,
334332
label = s"processing dataset at $unpackToDir")
335-
datasetSizeBytes <- tryo(FileUtils.sizeOfDirectoryAsBigInteger(new File(unpackToDir.toString)).longValue).toFox
336-
dataSourceWithAbsolutePathsOpt <- moveUnpackedToTarget(unpackToDir, needsConversion, datasetId, dataSourceId)
333+
datasetSizeBytes <- tryo(FileUtils.sizeOfDirectoryAsBigInteger(new File(unpackToDir.toString)).longValue).toFox ?~> "dataset.upload.measureTotalSize.failed"
334+
dataSourceWithAbsolutePathsOpt <- moveUnpackedToTarget(unpackToDir, needsConversion, datasetId, dataSourceId) ?~> "dataset.upload.moveUnpackedToTarget.failed"
337335

338336
_ <- remoteWebknossosClient.reportUpload(
339337
datasetId,
@@ -343,16 +341,16 @@ class UploadService @Inject()(dataSourceService: DataSourceService,
343341
dataSourceWithAbsolutePathsOpt,
344342
linkedLayerIdentifiers.layersToLink.getOrElse(List.empty)
345343
)
346-
) ?~> "reportUpload.failed"
344+
) ?~> "dataset.upload.reportUpload.failed"
347345
} yield ()
348346
}
349347

350348
private def checkWithinRequestedFileSize(uploadDir: Path, uploadId: String, datasetId: ObjectId): Fox[Unit] =
351349
for {
352-
totalFileSizeInBytesOpt <- runningUploadMetadataStore.find(redisKeyForTotalFileSizeInBytes(uploadId))
350+
totalFileSizeInBytesOpt <- runningUploadMetadataStore.find(redisKeyForTotalFileSizeInBytes(uploadId)) ?~> "Could not look up reserved total file size"
353351
_ <- totalFileSizeInBytesOpt.map { reservedFileSize =>
354352
for {
355-
actualFileSize <- tryo(FileUtils.sizeOfDirectoryAsBigInteger(uploadDir.toFile).longValue).toFox
353+
actualFileSize <- tryo(FileUtils.sizeOfDirectoryAsBigInteger(uploadDir.toFile).longValue).toFox ?~> "Could not measure actual file size"
356354
_ <- if (actualFileSize > reservedFileSize.toLong) {
357355
cleanUpDatasetExceedingSize(uploadDir, uploadId)
358356
Fox.failure(
@@ -567,10 +565,10 @@ class UploadService @Inject()(dataSourceService: DataSourceService,
567565

568566
private def checkAllChunksUploaded(uploadId: String): Fox[Unit] =
569567
for {
570-
fileCountStringOpt <- runningUploadMetadataStore.find(redisKeyForFileCount(uploadId))
568+
fileCountStringOpt <- runningUploadMetadataStore.find(redisKeyForFileCount(uploadId)) ?~> "Could not look up reserved file count"
571569
fileCountString <- fileCountStringOpt.toFox ?~> "dataset.upload.noFiles"
572-
fileCount <- tryo(fileCountString.toLong).toFox
573-
fileNames <- runningUploadMetadataStore.findSet(redisKeyForFileNameSet(uploadId))
570+
fileCount <- tryo(fileCountString.toLong).toFox ?~> "Could not look up reserved file count (toLong)"
571+
fileNames <- runningUploadMetadataStore.findSet(redisKeyForFileNameSet(uploadId)) ?~> "Could not look up reserved file names"
574572
_ <- Fox.fromBool(fileCount == fileNames.size)
575573
list <- Fox.serialCombined(fileNames.toList) { fileName =>
576574
val chunkCount =
@@ -579,7 +577,7 @@ class UploadService @Inject()(dataSourceService: DataSourceService,
579577
.map(s => s.getOrElse("").toLong)
580578
val chunks = runningUploadMetadataStore.findSet(redisKeyForFileChunkSet(uploadId, fileName))
581579
chunks.flatMap(set => chunkCount.map(_ == set.size))
582-
}
580+
} ?~> "Could not look up reserved file sizes"
583581
_ <- Fox.fromBool(list.forall(identity))
584582
} yield ()
585583

@@ -745,6 +743,8 @@ class UploadService @Inject()(dataSourceService: DataSourceService,
745743
for {
746744
deepFileList: List[Path] <- PathUtils.listFilesRecursive(uploadDir, silent = false, maxDepth = 10).toFox
747745
commonPrefixPreliminary = PathUtils.commonPrefix(deepFileList)
746+
_ = logger.info(
747+
s"Detected dataset root during upload of $datasetId from ${deepFileList.length} files in $uploadDir with commonPrefixPreliminary=$commonPrefixPreliminary")
748748
strippedPrefix = PathUtils.cutOffPathAtLastOccurrenceOf(commonPrefixPreliminary, excludeFromPrefix)
749749
commonPrefix = PathUtils.removeSingleFileNameFromPrefix(strippedPrefix,
750750
deepFileList.map(_.getFileName.toString))

0 commit comments

Comments
 (0)