Skip to content

Commit f07364a

Browse files
Merge branch 'master' into toolbar-redesign
2 parents a2dd0a9 + 8763cae commit f07364a

27 files changed

+483
-343
lines changed

app/controllers/AiModelController.scala

Lines changed: 78 additions & 44 deletions
Original file line numberDiff line numberDiff line change
@@ -17,6 +17,7 @@ import com.scalableminds.util.objectid.ObjectId
1717
import javax.inject.Inject
1818
import scala.concurrent.ExecutionContext
1919
import com.scalableminds.util.time.Instant
20+
import com.scalableminds.webknossos.datastore.rpc.RPC
2021
import com.scalableminds.webknossos.datastore.helpers.UPath
2122
import models.aimodels.AiModelCategory.AiModelCategory
2223
import models.organization.{OrganizationDAO, OrganizationService}
@@ -52,17 +53,21 @@ object RunInstanceModelTrainingParameters {
5253
implicit val jsonFormat: OFormat[RunInstanceModelTrainingParameters] = Json.format[RunInstanceModelTrainingParameters]
5354
}
5455

55-
case class RunInferenceParameters(annotationId: Option[ObjectId],
56-
aiModelId: ObjectId,
57-
datasetDirectoryName: String,
58-
organizationId: String,
56+
case class RunInferenceParameters(datasetId: ObjectId,
57+
aiModelId: Option[ObjectId],
5958
colorLayerName: String,
60-
boundingBox: String,
61-
newDatasetName: String,
59+
boundingBox: String, // Always in mag1
60+
annotationId: Option[ObjectId],
6261
maskAnnotationLayerName: Option[String],
62+
newDatasetName: String,
6363
workflowYaml: Option[String],
6464
invertColorLayer: Option[Boolean],
65-
seedGeneratorDistanceThreshold: Option[Double])
65+
seedGeneratorDistanceThreshold: Option[Double],
66+
doSplitMergerEvaluation: Boolean = false,
67+
evalUseSparseTracing: Option[Boolean],
68+
evalMaxEdgeLength: Option[Double],
69+
evalSparseTubeThresholdNm: Option[Double],
70+
evalMinMergerPathLengthNm: Option[Double])
6671

6772
object RunInferenceParameters {
6873
implicit val jsonFormat: OFormat[RunInferenceParameters] = Json.format[RunInferenceParameters]
@@ -101,18 +106,26 @@ class AiModelController @Inject()(
101106
jobService: JobService,
102107
datasetDAO: DatasetDAO,
103108
dataStoreDAO: DataStoreDAO,
109+
rpc: RPC,
104110
uploadToPathsService: UploadToPathsService)(implicit ec: ExecutionContext, bodyParsers: PlayBodyParsers)
105111
extends Controller
106112
with FoxImplicits {
107113

108114
def readAiModelInfo(aiModelId: ObjectId): Action[AnyContent] = sil.SecuredAction.async { implicit request =>
109-
{
110-
for {
111-
_ <- organizationService.assertIsSuperUserOrOrganizationHasAiPlan(request.identity)
112-
aiModel <- aiModelDAO.findOne(aiModelId) ?~> "aiModel.notFound" ~> NOT_FOUND
113-
jsResult <- aiModelService.publicWrites(aiModel, request.identity)
114-
} yield Ok(jsResult)
115-
}
115+
for {
116+
_ <- organizationService.assertIsSuperUserOrOrganizationHasAiPlan(request.identity)
117+
aiModel <- aiModelDAO.findOne(aiModelId) ?~> "aiModel.notFound" ~> NOT_FOUND
118+
jsResult <- aiModelService.publicWrites(aiModel, request.identity)
119+
} yield Ok(jsResult)
120+
}
121+
122+
def aiModelVoxelSize(aiModelId: ObjectId): Action[AnyContent] = sil.SecuredAction.async { implicit request =>
123+
for {
124+
_ <- organizationService.assertIsSuperUserOrOrganizationHasAiPlan(request.identity)
125+
aiModel <- aiModelDAO.findOne(aiModelId) ?~> "aiModel.notFound" ~> NOT_FOUND
126+
dataStore <- dataStoreDAO.findOneByName(aiModel._dataStore)
127+
voxelSize <- aiModelService.findModelVoxelSize(Some(aiModel), usePretrainedNeuronModel = false, dataStore)
128+
} yield Ok(Json.toJson(voxelSize))
116129
}
117130

118131
def readAiInferenceInfo(aiInferenceId: ObjectId): Action[AnyContent] = sil.SecuredAction.async { implicit request =>
@@ -260,36 +273,45 @@ class AiModelController @Inject()(
260273
} yield Ok(newAiModelJs)
261274
}
262275

263-
def runCustomInstanceModelInference: Action[RunInferenceParameters] =
276+
// If no model is selected, the pretrained *nuclei model* is used
277+
def runInstanceModelInference: Action[RunInferenceParameters] =
264278
sil.SecuredAction.async(validateJson[RunInferenceParameters]) { implicit request =>
265279
for {
266-
organization <- organizationDAO.findOne(request.body.organizationId)(GlobalAccessContext) ?~> Messages(
267-
"organization.notFound",
268-
request.body.organizationId)
269-
_ <- Fox.fromBool(request.identity._organization == organization._id) ?~> "job.runInference.notAllowed.organization" ~> FORBIDDEN
270-
dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(request.body.datasetDirectoryName, organization._id)
280+
dataset <- datasetDAO.findOne(request.body.datasetId)
281+
_ <- Fox.fromBool(request.identity._organization == dataset._organization) ?~> "job.runInference.notAllowed.organization" ~> FORBIDDEN
282+
aiModelOpt <- Fox.runOptional(request.body.aiModelId)(aiModelDAO.findOne) ?~> "aiModel.notFound"
283+
_ <- Fox.runOptional(aiModelOpt) { aiModel =>
284+
Fox.fromBool(aiModel._dataStore == dataset._dataStore) ?~> "aiModel.dataStoreMismatch"
285+
}
286+
(dataSource, layer) <- datasetService.getDataSourceAndLayerFor(dataset, request.body.colorLayerName)
271287
dataStore <- dataStoreDAO.findOneByName(dataset._dataStore) ?~> "dataStore.notFound"
272-
aiModel <- aiModelDAO.findOne(request.body.aiModelId) ?~> "aiModel.notFound"
273288
_ <- datasetService.assertValidDatasetName(request.body.newDatasetName)
274289
jobCommand = JobCommand.infer_instances
275-
boundingBox <- BoundingBox.fromLiteral(request.body.boundingBox).toFox
290+
mag1BoundingBox <- BoundingBox.fromLiteral(request.body.boundingBox).toFox
276291
commandArgs = Json.obj(
277292
"dataset_id" -> dataset._id,
278-
"organization_id" -> organization._id,
293+
"organization_id" -> dataset._organization,
279294
"dataset_name" -> dataset.name,
280295
"layer_name" -> request.body.colorLayerName,
281-
"bbox" -> boundingBox.toLiteral,
296+
"bbox" -> mag1BoundingBox.toLiteral,
282297
"model_id" -> request.body.aiModelId,
283-
"model_organization_id" -> aiModel._organization,
284-
"dataset_directory_name" -> request.body.datasetDirectoryName,
298+
"model_organization_id" -> aiModelOpt.map(_._organization),
299+
"dataset_directory_name" -> dataset.directoryName,
285300
"new_dataset_name" -> request.body.newDatasetName,
286301
"custom_workflow_provided_by_user" -> request.body.workflowYaml,
287-
"seed_generator_distance_threshold" -> request.body.seedGeneratorDistanceThreshold
302+
"invert_color_layer" -> request.body.invertColorLayer,
303+
"seed_generator_distance_threshold" -> request.body.seedGeneratorDistanceThreshold,
288304
)
289305
creditTransactionComment = s"AI custom instance segmentation with model ${request.body.aiModelId} for dataset ${dataset.name}"
306+
targetMagBoundingBox <- aiModelService.inferenceBBoxToTargetMag(mag1BoundingBox,
307+
layer,
308+
dataSource.scale,
309+
aiModelOpt,
310+
usePretrainedNeuronModel = false,
311+
dataStore)
290312
newInferenceJob <- jobService.submitPaidJob(jobCommand,
291313
commandArgs,
292-
boundingBox,
314+
targetMagBoundingBox,
293315
creditTransactionComment,
294316
request.identity,
295317
dataStore.name) ?~> "job.couldNotRunInferWithModel"
@@ -299,7 +321,7 @@ class AiModelController @Inject()(
299321
_aiModel = request.body.aiModelId,
300322
_newDataset = None,
301323
_annotation = request.body.annotationId,
302-
boundingBox = boundingBox,
324+
boundingBox = mag1BoundingBox,
303325
_inferenceJob = newInferenceJob._id,
304326
newSegmentationLayerName = "segmentation",
305327
maskAnnotationLayerName = request.body.maskAnnotationLayerName
@@ -309,36 +331,48 @@ class AiModelController @Inject()(
309331
} yield Ok(newAiModelJs)
310332
}
311333

312-
def runCustomNeuronInference: Action[RunInferenceParameters] =
334+
def runNeuronModelInference: Action[RunInferenceParameters] =
313335
sil.SecuredAction.async(validateJson[RunInferenceParameters]) { implicit request =>
314336
for {
315-
organization <- organizationDAO.findOne(request.body.organizationId)(GlobalAccessContext) ?~> Messages(
316-
"organization.notFound",
317-
request.body.organizationId)
318-
_ <- Fox.fromBool(request.identity._organization == organization._id) ?~> "job.runInference.notAllowed.organization" ~> FORBIDDEN
319-
dataset <- datasetDAO.findOneByDirectoryNameAndOrganization(request.body.datasetDirectoryName, organization._id)
337+
dataset <- datasetDAO.findOne(request.body.datasetId)
338+
_ <- Fox.fromBool(request.identity._organization == dataset._organization) ?~> "job.runInference.notAllowed.organization" ~> FORBIDDEN
339+
aiModelOpt <- Fox.runOptional(request.body.aiModelId)(aiModelDAO.findOne) ?~> "aiModel.notFound"
340+
_ <- Fox.runOptional(aiModelOpt) { aiModel =>
341+
Fox.fromBool(aiModel._dataStore == dataset._dataStore) ?~> "aiModel.dataStoreMismatch"
342+
}
343+
(dataSource, layer) <- datasetService.getDataSourceAndLayerFor(dataset, request.body.colorLayerName)
320344
dataStore <- dataStoreDAO.findOneByName(dataset._dataStore) ?~> "dataStore.notFound"
321-
aiModel <- aiModelDAO.findOne(request.body.aiModelId) ?~> "aiModel.notFound"
322345
_ <- datasetService.assertValidDatasetName(request.body.newDatasetName)
323346
jobCommand = JobCommand.infer_neurons
324-
boundingBox <- BoundingBox.fromLiteral(request.body.boundingBox).toFox
347+
mag1BoundingBox <- BoundingBox.fromLiteral(request.body.boundingBox).toFox
348+
targetMagBoundingBox <- aiModelService.inferenceBBoxToTargetMag(mag1BoundingBox,
349+
layer,
350+
dataSource.scale,
351+
aiModelOpt,
352+
usePretrainedNeuronModel = aiModelOpt.isEmpty,
353+
dataStore)
325354
commandArgs = Json.obj(
326355
"dataset_id" -> dataset._id,
327-
"organization_id" -> organization._id,
356+
"organization_id" -> dataset._organization,
328357
"dataset_name" -> dataset.name,
329358
"layer_name" -> request.body.colorLayerName,
330-
"bbox" -> boundingBox.toLiteral,
359+
"bbox" -> mag1BoundingBox.toLiteral,
331360
"model_id" -> request.body.aiModelId,
332-
"model_organization_id" -> aiModel._organization,
333-
"dataset_directory_name" -> request.body.datasetDirectoryName,
361+
"model_organization_id" -> aiModelOpt.map(_._organization),
362+
"dataset_directory_name" -> dataset.directoryName,
334363
"new_dataset_name" -> request.body.newDatasetName,
335364
"custom_workflow_provided_by_user" -> request.body.workflowYaml,
336-
"invert_color_layer" -> request.body.invertColorLayer
365+
"invert_color_layer" -> request.body.invertColorLayer,
366+
"do_split_merger_evaluation" -> request.body.doSplitMergerEvaluation,
367+
"eval_use_sparse_tracing" -> request.body.evalUseSparseTracing,
368+
"eval_max_edge_length" -> request.body.evalMaxEdgeLength,
369+
"eval_sparse_tube_threshold_nm" -> request.body.evalSparseTubeThresholdNm,
370+
"eval_min_merger_path_length_nm" -> request.body.evalMinMergerPathLengthNm,
337371
)
338372
creditTransactionComment = s"AI custom neuron segmentation with model ${request.body.aiModelId} for dataset ${dataset.name}"
339373
newInferenceJob <- jobService.submitPaidJob(jobCommand,
340374
commandArgs,
341-
boundingBox,
375+
targetMagBoundingBox,
342376
creditTransactionComment,
343377
request.identity,
344378
dataStore.name) ?~> "job.couldNotRunInferWithModel"
@@ -348,7 +382,7 @@ class AiModelController @Inject()(
348382
_aiModel = request.body.aiModelId,
349383
_newDataset = None,
350384
_annotation = request.body.annotationId,
351-
boundingBox = boundingBox,
385+
boundingBox = mag1BoundingBox,
352386
_inferenceJob = newInferenceJob._id,
353387
newSegmentationLayerName = "segmentation",
354388
maskAnnotationLayerName = request.body.maskAnnotationLayerName

0 commit comments

Comments
 (0)