Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
39 changes: 36 additions & 3 deletions app/controllers/DatasetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import com.scalableminds.util.tools.{Empty, Failure, Fox, Full, TristateOptionJs
import com.scalableminds.webknossos.datastore.datareaders.AxisOrder
import com.scalableminds.webknossos.datastore.helpers.UPath
import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate
import com.scalableminds.webknossos.datastore.models.datasource.LayerAttachmentType.LayerAttachmentType
import com.scalableminds.webknossos.datastore.models.datasource.{
DataSourceId,
DataSourceStatus,
Expand Down Expand Up @@ -52,13 +53,22 @@ case class DatasetUpdateParameters(
metadata: Option[JsArray],
folderId: Option[ObjectId],
dataSource: Option[UsableDataSource],
layerRenamings: Option[Seq[LayerRenaming]]
layerRenamings: Option[Seq[LayerRenaming]],
attachmentRenamings: Option[Seq[AttachmentRenaming]]
)

case class LayerRenaming(oldName: String, newName: String)
object LayerRenaming {
implicit val jsonFormat: OFormat[LayerRenaming] = Json.format[LayerRenaming]
}
case class AttachmentRenaming(
layerName: String, // Note: if a request contains a layer renaming *and* attachment renaming, this must use the *new* layerName.
oldName: String,
attachmentType: LayerAttachmentType,
newName: String)
object AttachmentRenaming {
implicit val jsonFormat: OFormat[AttachmentRenaming] = Json.format[AttachmentRenaming]
}

object DatasetUpdateParameters extends TristateOptionJsonHelper {
implicit val jsonFormat: OFormat[DatasetUpdateParameters] =
Expand Down Expand Up @@ -465,7 +475,8 @@ class DatasetController @Inject()(userService: UserService,
dataSourceUpdates =>
datasetService.updateDataSourceFromUserChanges(dataset,
dataSourceUpdates,
request.body.layerRenamings.getOrElse(Seq.empty)))
request.body.layerRenamings.getOrElse(Seq.empty),
request.body.attachmentRenamings.getOrElse(Seq.empty)))
Comment on lines 476 to +479
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🟠 Major

Normalize attachment renames against layer renames in the same request.

Line 478 forwards attachmentRenamings unchanged even though the request already contains the old→new layer mapping. A payload that renames a layer and still references the pre-rename layerName for an attachment rename will only apply half of the change. Please rewrite or reject those mixed requests here before delegating.

🔧 Suggested normalization in the controller
+        layerRenamingMap = request.body.layerRenamings.getOrElse(Seq.empty).map(r => r.oldName -> r.newName).toMap
+        normalizedAttachmentRenamings = request.body.attachmentRenamings.getOrElse(Seq.empty).map { renaming =>
+          renaming.copy(layerName = layerRenamingMap.getOrElse(renaming.layerName, renaming.layerName))
+        }
         _ <- Fox.runOptional(request.body.dataSource)(
           dataSourceUpdates =>
             datasetService.updateDataSourceFromUserChanges(dataset,
                                                            dataSourceUpdates,
                                                            request.body.layerRenamings.getOrElse(Seq.empty),
-                                                           request.body.attachmentRenamings.getOrElse(Seq.empty)))
+                                                           normalizedAttachmentRenamings))
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@app/controllers/DatasetController.scala` around lines 476 - 479,
DatasetController currently forwards request.body.attachmentRenamings into
datasetService.updateDataSourceFromUserChanges without applying same-request
layer renames; update the controller to normalize attachment renames first:
build a Map from request.body.layerRenamings.getOrElse(Seq.empty) mapping
oldLayerName -> newLayerName, then transform each entry in
request.body.attachmentRenamings.getOrElse(Seq.empty) by replacing its layerName
with the mapped newLayerName when present (preserving other fields), and pass
that normalized sequence instead of the raw attachmentRenamings into
datasetService.updateDataSourceFromUserChanges; use the existing symbols
DatasetController, datasetService.updateDataSourceFromUserChanges,
request.body.layerRenamings and request.body.attachmentRenamings to locate and
change the call.

updated <- datasetDAO.findOne(datasetId)
_ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated))
js <- datasetService.publicWrites(updated, Some(request.identity))
Expand Down Expand Up @@ -679,6 +690,27 @@ class DatasetController @Inject()(userService: UserService,
} yield Ok(Json.obj("newDatasetId" -> newDatasetId))
}

def composeAddLayer(datasetId: ObjectId): Action[ComposeRequestLayer] =
sil.SecuredAction.async(validateJson[ComposeRequestLayer]) { implicit request =>
for {
_ <- composeService.addLayer(datasetId, request.body) ?~> "dataset.compose.addLayer.failed"
} yield Ok
}

def composeAddMag(datasetId: ObjectId): Action[ComposeAddMagRequest] =
sil.SecuredAction.async(validateJson[ComposeAddMagRequest]) { implicit request =>
for {
_ <- composeService.addMag(datasetId, request.body) ?~> "dataset.compose.addMag.failed"
} yield Ok
}

def composeAddAttachment(datasetId: ObjectId): Action[ComposeAddAttachmentRequest] =
sil.SecuredAction.async(validateJson[ComposeAddAttachmentRequest]) { implicit request =>
for {
_ <- composeService.addAttachment(datasetId, request.body) ?~> "dataset.compose.addAttachment.failed"
} yield Ok
}

def reserveMagUploadToPath(datasetId: ObjectId): Action[ReserveMagUploadToPathRequest] =
sil.SecuredAction.async(validateJson[ReserveMagUploadToPathRequest]) { implicit request =>
for {
Expand Down Expand Up @@ -725,13 +757,14 @@ class DatasetController @Inject()(userService: UserService,
request.body.layerName,
request.body.attachmentName,
request.body.attachmentType)
dataStoreClient <- datasetService.clientFor(dataset)
_ <- Fox.runIf(!dataset.isVirtual) {
for {
updatedDataSource <- datasetService.usableDataSourceFor(dataset)
dataStoreClient <- datasetService.clientFor(dataset)
_ <- dataStoreClient.updateDataSourceOnDisk(datasetId, updatedDataSource)
} yield ()
}
_ <- dataStoreClient.invalidateDatasetInDSCache(datasetId)
} yield Ok
}

Expand Down
50 changes: 49 additions & 1 deletion app/controllers/LegacyApiController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,14 @@ import play.api.libs.json._
import play.api.mvc.{Action, AnyContent, PlayBodyParsers, Result}
import security.WkEnv
import com.scalableminds.util.objectid.ObjectId
import com.scalableminds.webknossos.datastore.models.datasource.{
StaticColorLayer,
StaticSegmentationLayer,
UsableDataSource
}
import models.analytics.{AnalyticsService, ChangeDatasetSettingsEvent}
import play.api.i18n.Messages
import utils.MetadataAssertions

import scala.concurrent.ExecutionContext

Expand Down Expand Up @@ -44,8 +52,48 @@ class LegacyApiController @Inject()(datasetController: DatasetController,
organizationDAO: OrganizationDAO,
datasetService: DatasetService,
datasetDAO: DatasetDAO,
analyticsService: AnalyticsService,
sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers)
extends Controller {
extends Controller
with MetadataAssertions {

def updatePartialV12(datasetId: ObjectId): Action[DatasetUpdateParameters] =
sil.SecuredAction.async(validateJson[DatasetUpdateParameters]) { implicit request =>
for {
dataset <- datasetDAO.findOne(datasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND
_ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN
_ <- Fox.runOptional(request.body.metadata)(assertNoDuplicateMetadataKeys)
_ <- datasetDAO.updatePartial(dataset._id, request.body)
_ <- Fox.runOptional(request.body.dataSource) { dataSourceUpdates =>
def findOriginalAttachments(existingDataSource: UsableDataSource, layerName: String) = {
val reverseLayerRenamingMap: Map[String, String] = request.body.layerRenamings
.getOrElse(Seq.empty)
.map(layerRenaming => (layerRenaming.newName, layerRenaming.oldName))
.toMap
val existingLayerName = reverseLayerRenamingMap.getOrElse(layerName, layerName)
val existingLayer = existingDataSource.dataLayers.find(_.name == existingLayerName)
existingLayer.flatMap(_.attachments)
}
for {
existingDataSource <- datasetService.usableDataSourceFor(dataset)
updatesWithUndoneAttachmentChanges = dataSourceUpdates.copy(
dataLayers = dataSourceUpdates.dataLayers.map {
case s: StaticColorLayer => s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
case s: StaticSegmentationLayer =>
s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
}
)
Comment on lines +79 to +85
Copy link
Copy Markdown
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

⚠️ Potential issue | 🔴 Critical

Non-exhaustive pattern match will throw MatchError for other layer types.

The pattern match in .map only handles StaticColorLayer and StaticSegmentationLayer. If dataSourceUpdates.dataLayers contains any other layer type (e.g., remote layers, virtual layers), this will throw a runtime scala.MatchError.

🐛 Proposed fix: Add a fallback case to pass through other layer types unchanged
 updatesWithUndoneAttachmentChanges = dataSourceUpdates.copy(
   dataLayers = dataSourceUpdates.dataLayers.map {
     case s: StaticColorLayer => s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
     case s: StaticSegmentationLayer =>
       s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
+    case other => other // Pass through layer types without attachments
   }
 )
📝 Committable suggestion

‼️ IMPORTANT
Carefully review the code before committing. Ensure that it accurately replaces the highlighted code, contains no missing lines, and has no issues with indentation. Thoroughly test & benchmark the code to ensure it meets the requirements.

Suggested change
updatesWithUndoneAttachmentChanges = dataSourceUpdates.copy(
dataLayers = dataSourceUpdates.dataLayers.map {
case s: StaticColorLayer => s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
case s: StaticSegmentationLayer =>
s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
}
)
updatesWithUndoneAttachmentChanges = dataSourceUpdates.copy(
dataLayers = dataSourceUpdates.dataLayers.map {
case s: StaticColorLayer => s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
case s: StaticSegmentationLayer =>
s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
case other => other // Pass through layer types without attachments
}
)
🤖 Prompt for AI Agents
Verify each finding against the current code and only fix it if needed.

In `@app/controllers/LegacyApiController.scala` around lines 79 - 85, The map over
dataSourceUpdates.dataLayers in updatesWithUndoneAttachmentChanges currently
only handles StaticColorLayer and StaticSegmentationLayer and will throw a
MatchError for other layer types; modify the anonymous partial function in that
map to be exhaustive by adding a fallback case (e.g., case other => other) so
non-matching layer types are passed through unchanged, while still calling
findOriginalAttachments(existingDataSource, s.name) for the two handled layer
types.

_ <- datasetService.updateDataSourceFromUserChanges(dataset,
updatesWithUndoneAttachmentChanges,
request.body.layerRenamings.getOrElse(Seq.empty),
request.body.attachmentRenamings.getOrElse(Seq.empty))
} yield ()
}
updated <- datasetDAO.findOne(datasetId)
_ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated))
js <- datasetService.publicWrites(updated, Some(request.identity))
} yield Ok(js)
}

/* provide v8 */

Expand Down
128 changes: 116 additions & 12 deletions app/models/dataset/ComposeService.scala
Original file line number Diff line number Diff line change
@@ -1,10 +1,12 @@
package models.dataset

import com.scalableminds.util.accesscontext.DBAccessContext
import com.scalableminds.util.accesscontext.{DBAccessContext, GlobalAccessContext}
import com.scalableminds.util.geometry.Vec3Int
import com.scalableminds.util.objectid.ObjectId
import com.scalableminds.util.tools.{Fox, FoxImplicits}
import com.scalableminds.webknossos.datastore.explore.ExploreLayerUtils
import com.scalableminds.webknossos.datastore.models.VoxelSize
import com.scalableminds.webknossos.datastore.models.datasource.LayerAttachmentType.LayerAttachmentType
import com.scalableminds.webknossos.datastore.models.datasource._
import models.user.User
import play.api.i18n.MessagesProvider
Expand All @@ -25,16 +27,39 @@ object ComposeRequest {
implicit val composeRequestFormat: OFormat[ComposeRequest] = Json.format[ComposeRequest]
}
case class ComposeRequestLayer(
datasetId: ObjectId,
sourceName: String,
newName: String,
transformations: Seq[CoordinateTransformation]
sourceDatasetId: ObjectId,
sourceLayerName: String,
targetLayerName: String,
transformations: Option[Seq[CoordinateTransformation]]
)

object ComposeRequestLayer {
implicit val composeLayerFormat: OFormat[ComposeRequestLayer] = Json.format[ComposeRequestLayer]
}

case class ComposeAddMagRequest(sourceDatasetId: ObjectId,
sourceLayerName: String,
targetLayerName: String,
sourceMag: Vec3Int,
targetMag: Option[Vec3Int] // None means use sourceMag
)

object ComposeAddMagRequest {
implicit val jsonFormat: OFormat[ComposeAddMagRequest] = Json.format[ComposeAddMagRequest]
}

case class ComposeAddAttachmentRequest(sourceDatasetId: ObjectId,
sourceLayerName: String,
targetLayerName: String,
attachmentType: LayerAttachmentType,
sourceAttachmentName: String,
targetAttachmentName: Option[String] // None means use sourceAttachmentName
)

object ComposeAddAttachmentRequest {
implicit val jsonFormat: OFormat[ComposeAddAttachmentRequest] = Json.format[ComposeAddAttachmentRequest]
}

class ComposeService @Inject()(datasetDAO: DatasetDAO, dataStoreDAO: DataStoreDAO, datasetService: DatasetService)(
implicit ec: ExecutionContext)
extends ExploreLayerUtils
Expand All @@ -60,15 +85,15 @@ class ComposeService @Inject()(datasetDAO: DatasetDAO, dataStoreDAO: DataStoreDA
implicit ctx: DBAccessContext,
mp: MessagesProvider): Fox[(StaticLayer, VoxelSize)] =
for {
dataset <- datasetDAO.findOne(composeLayer.datasetId) ?~> "Dataset not found"
dataset <- datasetDAO.findOne(composeLayer.sourceDatasetId) ?~> "Dataset not found"
usableDataSource <- datasetService.usableDataSourceFor(dataset)
layer <- usableDataSource.dataLayers.find(_.name == composeLayer.sourceName).toFox
applyCoordinateTransformations = (cOpt: Option[List[CoordinateTransformation]]) =>
layer <- usableDataSource.dataLayers.find(_.name == composeLayer.sourceLayerName).toFox
applyCoordinateTransformations = (cOpt: Option[Seq[CoordinateTransformation]]) =>
cOpt match {
case Some(c) => Some(c ++ composeLayer.transformations.toList)
case None => Some(composeLayer.transformations.toList)
case Some(c) => Some(c ++ composeLayer.transformations.getOrElse(Seq.empty))
case None => Some(composeLayer.transformations.getOrElse(Seq.empty))
}
editedLayer = layer.mapped(name = composeLayer.newName,
editedLayer = layer.mapped(name = composeLayer.targetLayerName,
coordinateTransformations =
applyCoordinateTransformations(layer.coordinateTransformations))
} yield (editedLayer, usableDataSource.scale)
Expand All @@ -79,7 +104,7 @@ class ComposeService @Inject()(datasetDAO: DatasetDAO, dataStoreDAO: DataStoreDA
// stores, however, the data store is only stored for each dataset and not per mag.
for {
_ <- Fox.fromBool(composeRequest.layers.nonEmpty) ?~> "Cannot compose dataset with no layers"
datasetIds = composeRequest.layers.map(_.datasetId).distinct
datasetIds = composeRequest.layers.map(_.sourceDatasetId).distinct
datasets <- Fox.serialCombined(datasetIds)(datasetDAO.findOne(_))
dataStores = datasets.map(_._dataStore)
} yield {
Expand All @@ -105,4 +130,83 @@ class ComposeService @Inject()(datasetDAO: DatasetDAO, dataStoreDAO: DataStoreDA
)
} yield dataSource

def addLayer(targetDatasetId: ObjectId, request: ComposeRequestLayer)(implicit ctx: DBAccessContext,
mp: MessagesProvider): Fox[Unit] =
for {
targetDataset <- datasetDAO.findOne(targetDatasetId) ?~> "dataset.notFound"
_ <- Fox.fromBool(targetDataset.isVirtual) ?~> "dataset.composeInPlace.mustBeVirtual"
targetDataSource <- datasetService.usableDataSourceFor(targetDataset)
_ <- Fox.fromBool(!targetDataSource.dataLayers.exists(_.name == request.targetLayerName)) ?~> "dataset.layer.nameAlreadyExists"
sourceDataset <- datasetDAO.findOne(request.sourceDatasetId) ?~> "dataset.notFound"
_ <- Fox.fromBool(targetDataset._dataStore == sourceDataset._dataStore) ?~> "compose.differingDataStores"
sourceDataSource <- datasetService.usableDataSourceFor(sourceDataset)
sourceLayer <- sourceDataSource.getDataLayer(request.sourceLayerName).toFox ?~> "layer.notFound"
updatedDataSource = targetDataSource.copy(
dataLayers = targetDataSource.dataLayers :+ sourceLayer.mapped(name = request.targetLayerName))
_ <- datasetDAO.updateDataSource(targetDatasetId,
targetDataset._dataStore,
updatedDataSource.hashCode(),
updatedDataSource,
isUsable = true)(GlobalAccessContext)
dataStoreClient <- datasetService.clientFor(targetDataset)
_ <- dataStoreClient.invalidateDatasetInDSCache(targetDatasetId)
} yield ()

def addMag(targetDatasetId: ObjectId, request: ComposeAddMagRequest)(implicit ctx: DBAccessContext,
mp: MessagesProvider): Fox[Unit] =
for {
targetDataset <- datasetDAO.findOne(targetDatasetId) ?~> "dataset.notFound"
_ <- Fox.fromBool(targetDataset.isVirtual) ?~> "dataset.composeInPlace.mustBeVirtual"
targetDataSource <- datasetService.usableDataSourceFor(targetDataset)
targetLayer <- targetDataSource.getDataLayer(request.targetLayerName).toFox ?~> "layer.notFound"
sourceDataset <- datasetDAO.findOne(request.sourceDatasetId) ?~> "dataset.notFound"
_ <- Fox.fromBool(targetDataset._dataStore == sourceDataset._dataStore) ?~> "compose.differingDataStores"
sourceDataSource <- datasetService.usableDataSourceFor(sourceDataset)
sourceLayer <- sourceDataSource.getDataLayer(request.sourceLayerName).toFox ?~> "layer.notFound"
sourceMag <- sourceLayer.mags.find(_.mag == request.sourceMag).toFox ?~> "mag.notFound"
adaptedMag = sourceMag.copy(mag = request.targetMag.getOrElse(sourceMag.mag))
updatedLayer = targetLayer.mapped(newMags = Some((targetLayer.mags :+ adaptedMag).sortBy(_.mag.maxDim)))
_ <- Fox.fromBool(updatedLayer.mags.distinctBy(_.mag.maxDim).length == updatedLayer.mags.length) ?~> "compose.duplicateMag"
updatedLayers = targetDataSource.dataLayers.map(l => if (l.name == request.targetLayerName) updatedLayer else l)
updatedDataSource = targetDataSource.copy(dataLayers = updatedLayers)
_ <- datasetDAO.updateDataSource(targetDatasetId,
targetDataset._dataStore,
updatedDataSource.hashCode(),
updatedDataSource,
isUsable = true)(GlobalAccessContext)
dataStoreClient <- datasetService.clientFor(targetDataset)
_ <- dataStoreClient.invalidateDatasetInDSCache(targetDatasetId)
} yield ()

def addAttachment(targetDatasetId: ObjectId, request: ComposeAddAttachmentRequest)(implicit ctx: DBAccessContext,
mp: MessagesProvider): Fox[Unit] =
for {
targetDataset <- datasetDAO.findOne(targetDatasetId) ?~> "dataset.notFound"
_ <- Fox.fromBool(targetDataset.isVirtual) ?~> "dataset.composeInPlace.mustBeVirtual"
targetDataSource <- datasetService.usableDataSourceFor(targetDataset)
targetLayer <- targetDataSource.getDataLayer(request.targetLayerName).toFox ?~> "layer.notFound"
sourceDataset <- datasetDAO.findOne(request.sourceDatasetId) ?~> "dataset.notFound"
_ <- Fox.fromBool(targetDataset._dataStore == sourceDataset._dataStore) ?~> "compose.differingDataStores"
sourceDataSource <- datasetService.usableDataSourceFor(sourceDataset)
sourceLayer <- sourceDataSource.getDataLayer(request.sourceLayerName).toFox ?~> "layer.notFound"
targetAttachments = targetLayer.attachments.getOrElse(DataLayerAttachments())
sourceAttachment <- sourceLayer.attachments
.getOrElse(DataLayerAttachments())
.getByTypeAndName(request.attachmentType, request.sourceAttachmentName)
.toFox ?~> "attachment.notFound"
adaptedAttachment = sourceAttachment.copy(name = request.targetAttachmentName.getOrElse(sourceAttachment.name))
updatedAttachments = targetAttachments.withAdded(adaptedAttachment, request.attachmentType)
_ <- Fox.fromBool(!updatedAttachments.containsDuplicateNames) ?~> "Attachment renamings create name collisions."
updatedLayer = targetLayer.withAttachments(updatedAttachments)
updatedLayers = targetDataSource.dataLayers.map(l => if (l.name == request.targetLayerName) updatedLayer else l)
updatedDataSource = targetDataSource.copy(dataLayers = updatedLayers)
_ <- datasetDAO.updateDataSource(targetDatasetId,
targetDataset._dataStore,
updatedDataSource.hashCode(),
updatedDataSource,
isUsable = true)(GlobalAccessContext)
dataStoreClient <- datasetService.clientFor(targetDataset)
_ <- dataStoreClient.invalidateDatasetInDSCache(targetDatasetId)
} yield ()

}
3 changes: 2 additions & 1 deletion app/models/dataset/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -615,7 +615,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
metadata = Some(metadata),
folderId = Some(folderId),
dataSource = None,
layerRenamings = None
layerRenamings = None,
attachmentRenamings = None
)
updatePartial(datasetId, updateParameters)
}
Expand Down
Loading
Loading