Skip to content

Commit 114c004

Browse files
committed
Merge branch 'master' of github.com:scalableminds/webknossos into admin-views-use-query
2 parents 14540a7 + 9e3b60a commit 114c004

File tree

61 files changed

+4202
-243
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

61 files changed

+4202
-243
lines changed

app/controllers/DatasetController.scala

Lines changed: 36 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ import com.scalableminds.util.tools.{Empty, Failure, Fox, Full, TristateOptionJs
99
import com.scalableminds.webknossos.datastore.datareaders.AxisOrder
1010
import com.scalableminds.webknossos.datastore.helpers.UPath
1111
import com.scalableminds.webknossos.datastore.models.AdditionalCoordinate
12+
import com.scalableminds.webknossos.datastore.models.datasource.LayerAttachmentType.LayerAttachmentType
1213
import com.scalableminds.webknossos.datastore.models.datasource.{
1314
DataSourceId,
1415
DataSourceStatus,
@@ -52,13 +53,22 @@ case class DatasetUpdateParameters(
5253
metadata: Option[JsArray],
5354
folderId: Option[ObjectId],
5455
dataSource: Option[UsableDataSource],
55-
layerRenamings: Option[Seq[LayerRenaming]]
56+
layerRenamings: Option[Seq[LayerRenaming]],
57+
attachmentRenamings: Option[Seq[AttachmentRenaming]]
5658
)
5759

5860
case class LayerRenaming(oldName: String, newName: String)
5961
object LayerRenaming {
6062
implicit val jsonFormat: OFormat[LayerRenaming] = Json.format[LayerRenaming]
6163
}
64+
case class AttachmentRenaming(
65+
layerName: String, // Note: if a request contains a layer renaming *and* attachment renaming, this must use the *new* layerName.
66+
oldName: String,
67+
attachmentType: LayerAttachmentType,
68+
newName: String)
69+
object AttachmentRenaming {
70+
implicit val jsonFormat: OFormat[AttachmentRenaming] = Json.format[AttachmentRenaming]
71+
}
6272

6373
object DatasetUpdateParameters extends TristateOptionJsonHelper {
6474
implicit val jsonFormat: OFormat[DatasetUpdateParameters] =
@@ -465,7 +475,8 @@ class DatasetController @Inject()(userService: UserService,
465475
dataSourceUpdates =>
466476
datasetService.updateDataSourceFromUserChanges(dataset,
467477
dataSourceUpdates,
468-
request.body.layerRenamings.getOrElse(Seq.empty)))
478+
request.body.layerRenamings.getOrElse(Seq.empty),
479+
request.body.attachmentRenamings.getOrElse(Seq.empty)))
469480
updated <- datasetDAO.findOne(datasetId)
470481
_ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated))
471482
js <- datasetService.publicWrites(updated, Some(request.identity))
@@ -679,6 +690,27 @@ class DatasetController @Inject()(userService: UserService,
679690
} yield Ok(Json.obj("newDatasetId" -> newDatasetId))
680691
}
681692

693+
def composeAddLayer(datasetId: ObjectId): Action[ComposeRequestLayer] =
694+
sil.SecuredAction.async(validateJson[ComposeRequestLayer]) { implicit request =>
695+
for {
696+
_ <- composeService.addLayer(datasetId, request.body) ?~> "dataset.compose.addLayer.failed"
697+
} yield Ok
698+
}
699+
700+
def composeAddMag(datasetId: ObjectId): Action[ComposeAddMagRequest] =
701+
sil.SecuredAction.async(validateJson[ComposeAddMagRequest]) { implicit request =>
702+
for {
703+
_ <- composeService.addMag(datasetId, request.body) ?~> "dataset.compose.addMag.failed"
704+
} yield Ok
705+
}
706+
707+
def composeAddAttachment(datasetId: ObjectId): Action[ComposeAddAttachmentRequest] =
708+
sil.SecuredAction.async(validateJson[ComposeAddAttachmentRequest]) { implicit request =>
709+
for {
710+
_ <- composeService.addAttachment(datasetId, request.body) ?~> "dataset.compose.addAttachment.failed"
711+
} yield Ok
712+
}
713+
682714
def reserveMagUploadToPath(datasetId: ObjectId): Action[ReserveMagUploadToPathRequest] =
683715
sil.SecuredAction.async(validateJson[ReserveMagUploadToPathRequest]) { implicit request =>
684716
for {
@@ -725,13 +757,14 @@ class DatasetController @Inject()(userService: UserService,
725757
request.body.layerName,
726758
request.body.attachmentName,
727759
request.body.attachmentType)
760+
dataStoreClient <- datasetService.clientFor(dataset)
728761
_ <- Fox.runIf(!dataset.isVirtual) {
729762
for {
730763
updatedDataSource <- datasetService.usableDataSourceFor(dataset)
731-
dataStoreClient <- datasetService.clientFor(dataset)
732764
_ <- dataStoreClient.updateDataSourceOnDisk(datasetId, updatedDataSource)
733765
} yield ()
734766
}
767+
_ <- dataStoreClient.invalidateDatasetInDSCache(datasetId)
735768
} yield Ok
736769
}
737770

app/controllers/LegacyApiController.scala

Lines changed: 49 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,14 @@ import play.api.libs.json._
1717
import play.api.mvc.{Action, AnyContent, PlayBodyParsers, Result}
1818
import security.WkEnv
1919
import com.scalableminds.util.objectid.ObjectId
20+
import com.scalableminds.webknossos.datastore.models.datasource.{
21+
StaticColorLayer,
22+
StaticSegmentationLayer,
23+
UsableDataSource
24+
}
25+
import models.analytics.{AnalyticsService, ChangeDatasetSettingsEvent}
26+
import play.api.i18n.Messages
27+
import utils.MetadataAssertions
2028

2129
import scala.concurrent.ExecutionContext
2230

@@ -44,8 +52,48 @@ class LegacyApiController @Inject()(datasetController: DatasetController,
4452
organizationDAO: OrganizationDAO,
4553
datasetService: DatasetService,
4654
datasetDAO: DatasetDAO,
55+
analyticsService: AnalyticsService,
4756
sil: Silhouette[WkEnv])(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers)
48-
extends Controller {
57+
extends Controller
58+
with MetadataAssertions {
59+
60+
def updatePartialV12(datasetId: ObjectId): Action[DatasetUpdateParameters] =
61+
sil.SecuredAction.async(validateJson[DatasetUpdateParameters]) { implicit request =>
62+
for {
63+
dataset <- datasetDAO.findOne(datasetId) ?~> Messages("dataset.notFound", datasetId) ~> NOT_FOUND
64+
_ <- Fox.assertTrue(datasetService.isEditableBy(dataset, Some(request.identity))) ?~> "notAllowed" ~> FORBIDDEN
65+
_ <- Fox.runOptional(request.body.metadata)(assertNoDuplicateMetadataKeys)
66+
_ <- datasetDAO.updatePartial(dataset._id, request.body)
67+
_ <- Fox.runOptional(request.body.dataSource) { dataSourceUpdates =>
68+
def findOriginalAttachments(existingDataSource: UsableDataSource, layerName: String) = {
69+
val reverseLayerRenamingMap: Map[String, String] = request.body.layerRenamings
70+
.getOrElse(Seq.empty)
71+
.map(layerRenaming => (layerRenaming.newName, layerRenaming.oldName))
72+
.toMap
73+
val existingLayerName = reverseLayerRenamingMap.getOrElse(layerName, layerName)
74+
val existingLayer = existingDataSource.dataLayers.find(_.name == existingLayerName)
75+
existingLayer.flatMap(_.attachments)
76+
}
77+
for {
78+
existingDataSource <- datasetService.usableDataSourceFor(dataset)
79+
updatesWithUndoneAttachmentChanges = dataSourceUpdates.copy(
80+
dataLayers = dataSourceUpdates.dataLayers.map {
81+
case s: StaticColorLayer => s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
82+
case s: StaticSegmentationLayer =>
83+
s.copy(attachments = findOriginalAttachments(existingDataSource, s.name))
84+
}
85+
)
86+
_ <- datasetService.updateDataSourceFromUserChanges(dataset,
87+
updatesWithUndoneAttachmentChanges,
88+
request.body.layerRenamings.getOrElse(Seq.empty),
89+
request.body.attachmentRenamings.getOrElse(Seq.empty))
90+
} yield ()
91+
}
92+
updated <- datasetDAO.findOne(datasetId)
93+
_ = analyticsService.track(ChangeDatasetSettingsEvent(request.identity, updated))
94+
js <- datasetService.publicWrites(updated, Some(request.identity))
95+
} yield Ok(js)
96+
}
4997

5098
/* provide v8 */
5199

app/models/annotation/AnnotationUploadService.scala

Lines changed: 22 additions & 20 deletions
Original file line numberDiff line numberDiff line change
@@ -128,32 +128,34 @@ class AnnotationUploadService @Inject()(tempFileService: WkTempFileService, nmlP
128128
if (segmentGroups.isEmpty) 0
129129
else Math.max(segmentGroups.map(_.groupId).max, getMaximumSegmentGroupId(segmentGroups.flatMap(_.children)))
130130

131-
def wrapTreesInGroup(name: String, tracing: SkeletonTracing): SkeletonTracing = {
132-
val unusedGroupId = getMaximumTreeGroupId(tracing.treeGroups) + 1
133-
val newTrees = tracing.trees.map(tree => tree.copy(groupId = Some(tree.groupId.getOrElse(unusedGroupId))))
134-
val newTreeGroups = Seq(TreeGroup(name, unusedGroupId, tracing.treeGroups, isExpanded = Some(true)))
135-
tracing.copy(trees = newTrees, treeGroups = newTreeGroups)
136-
}
131+
def wrapTreesInGroup(name: String, tracing: SkeletonTracing): SkeletonTracing =
132+
if (tracing.trees.isEmpty && tracing.treeGroups.isEmpty) tracing
133+
else {
134+
val unusedGroupId = getMaximumTreeGroupId(tracing.treeGroups) + 1
135+
val newTrees = tracing.trees.map(tree => tree.copy(groupId = Some(tree.groupId.getOrElse(unusedGroupId))))
136+
val newTreeGroups = Seq(TreeGroup(name, unusedGroupId, tracing.treeGroups, isExpanded = Some(true)))
137+
tracing.copy(trees = newTrees, treeGroups = newTreeGroups)
138+
}
137139

138-
def wrapSegmentsInGroup(name: String, tracing: VolumeTracing): VolumeTracing = {
139-
val unusedGroupId = getMaximumSegmentGroupId(tracing.segmentGroups) + 1
140-
val newSegments =
141-
tracing.segments.map(segment => segment.copy(groupId = Some(segment.groupId.getOrElse(unusedGroupId))))
142-
val newSegmentGroups = Seq(SegmentGroup(name, unusedGroupId, tracing.segmentGroups))
143-
tracing.copy(segments = newSegments, segmentGroups = newSegmentGroups)
144-
}
140+
def wrapSegmentsInGroup(name: String, tracing: VolumeTracing): VolumeTracing =
141+
if (tracing.segments.isEmpty && tracing.segmentGroups.isEmpty) tracing
142+
else {
143+
val unusedGroupId = getMaximumSegmentGroupId(tracing.segmentGroups) + 1
144+
val newSegments =
145+
tracing.segments.map(segment => segment.copy(groupId = Some(segment.groupId.getOrElse(unusedGroupId))))
146+
val newSegmentGroups = Seq(SegmentGroup(name, unusedGroupId, tracing.segmentGroups))
147+
tracing.copy(segments = newSegments, segmentGroups = newSegmentGroups)
148+
}
145149

146150
def wrapVolumeLayers(name: String, volumeLayers: List[UploadedVolumeLayer]): List[UploadedVolumeLayer] =
147151
volumeLayers.map(v => v.copy(tracing = wrapSegmentsInGroup(name, v.tracing)))
148152

149153
parseResults.map {
150-
case NmlParseSuccess(name, skeletonTracing, uploadedVolumeLayers, datasetId, description, wkUrl) =>
151-
NmlParseSuccess(name,
152-
wrapTreesInGroup(name, skeletonTracing),
153-
wrapVolumeLayers(name, uploadedVolumeLayers),
154-
datasetId,
155-
description,
156-
wkUrl)
154+
case s: NmlParseSuccess =>
155+
s.copy(
156+
skeletonTracing = wrapTreesInGroup(s.fileNameWithoutExtension, s.skeletonTracing),
157+
volumeLayers = wrapVolumeLayers(s.fileNameWithoutExtension, s.volumeLayers)
158+
)
157159
case r => r
158160
}
159161
}

app/models/annotation/WKRemoteTracingStoreClient.scala

Lines changed: 23 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,7 @@ import com.scalableminds.webknossos.datastore.SkeletonTracing.{
1212
SkeletonTracings,
1313
SkeletonTracingsWithIds
1414
}
15-
import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracings}
15+
import com.scalableminds.webknossos.datastore.VolumeTracing.{VolumeTracing, VolumeTracingOpt, VolumeTracings}
1616
import com.scalableminds.webknossos.datastore.models.VoxelSize
1717
import com.scalableminds.webknossos.datastore.models.annotation.{
1818
AnnotationLayer,
@@ -208,19 +208,39 @@ class WKRemoteTracingStoreClient(
208208
logger.debug(
209209
s"Called to merge ${tracings.tracings.length} VolumeTracings by contents into $newAnnotationId/$newTracingId." + baseInfo)
210210
for {
211-
_ <- rpc(s"${tracingStore.url}/tracings/volume/mergedFromContents")
211+
_ <- rpc(s"${tracingStore.url}/tracings/volume/initializeForMerge")
212212
.addQueryParam("newTracingId", newTracingId)
213213
.addQueryParam("token", RpcTokenHolder.webknossosToken)
214-
.postProto[VolumeTracings](tracings)
214+
.postProto[VolumeTracings](thinOutVolumeTracings(tracings))
215215
packedVolumeDataZips = packVolumeDataZips(initialData.flatten)
216216
_ = annotationDataSourceTemporaryStore.store(newAnnotationId, dataSource, datasetId)
217217
_ <- rpc(s"${tracingStore.url}/tracings/volume/$newTracingId/initialDataMultiple").withLongTimeout
218218
.addQueryParam("token", RpcTokenHolder.webknossosToken)
219219
.addQueryParam("annotationId", newAnnotationId.toString)
220220
.postFile(packedVolumeDataZips)
221+
_ <- rpc(s"${tracingStore.url}/tracings/volume/mergedFromContents")
222+
.addQueryParam("newTracingId", newTracingId)
223+
.addQueryParam("token", RpcTokenHolder.webknossosToken)
224+
.postProto[VolumeTracings](tracings)
221225
} yield ()
222226
}
223227

228+
// The initializeForMerge step expects tracings without segments, groups, bboxes, user states.
229+
// They don’t hurt but create unneeded traffic. So we’ll strip them here.
230+
private def thinOutVolumeTracings(tracings: VolumeTracings): VolumeTracings =
231+
tracings.copy(
232+
tracings = tracings.tracings.map { tracingOpt: VolumeTracingOpt =>
233+
tracingOpt.copy(
234+
tracing = tracingOpt.tracing.map(
235+
_.copy(
236+
segments = Seq.empty,
237+
segmentGroups = Seq.empty,
238+
userBoundingBoxes = Seq.empty,
239+
userStates = Seq.empty
240+
)))
241+
}
242+
)
243+
224244
private def packVolumeDataZips(files: List[File]): File =
225245
ZipIO.zipToTempFile(files)
226246

app/models/annotation/nml/NmlResults.scala

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -75,6 +75,11 @@ object NmlResults extends LazyLogging {
7575
override def wkUrl: Option[String] = _wkUrl
7676

7777
override def withName(name: String): NmlParseResult = this.copy(fileName = name)
78+
79+
def fileNameWithoutExtension: String =
80+
if (fileName.toLowerCase.endsWith(".nml") || fileName.toLowerCase.endsWith(".zip"))
81+
fileName.dropRight(4)
82+
else fileName
7883
}
7984

8085
case class NmlParseFailure(fileName: String, error: String) extends NmlParseResult {

0 commit comments

Comments
 (0)