Skip to content

Commit d1d7166

Browse files
bodom0015lmarini
andauthored
Ability to queue archive / unarchive for full datasets (#234)
* Ability to queue up archive / unarchive for full datasets * Add to CHANGELOG * Call incrementDownloads when unarchiving, add optional flag to incrementDownload to only update the date (don't increment the count) * fix: dateOnly should default to false to preserve current default behavior * Default value changed false -> true Co-authored-by: Luigi Marini <[email protected]>
1 parent e89417b commit d1d7166

14 files changed

+204
-16
lines changed

CHANGELOG.md

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,9 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
77
## Unreleased
88
- Added folder and folder id to api datasets files list [#34](https://github.com/clowder-framework/clowder/issues/34)
99

10+
### Added
11+
- Ability to queue archive / unarchive for full datasets.
12+
1013
### Fixed
1114
- RabbitMQ will not use connection if None exists
1215
- previews returns 404 if preview is not found

app/api/Datasets.scala

Lines changed: 45 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3108,6 +3108,51 @@ class Datasets @Inject()(
31083108
}
31093109

31103110
}
3111+
3112+
/**
3113+
* Recursively submit requests to archive the contents of the given dataset
3114+
* @param id dataset to archive
3115+
* @return
3116+
*/
3117+
def queueArchival(id: UUID) = PermissionAction(Permission.ArchiveDataset, Some(ResourceRef(ResourceRef.dataset, id)))(parse.json) { implicit request =>
3118+
val reqParams = (request.body \ "parameters").asOpt[JsObject].getOrElse(JsObject(Seq.empty[(String, JsValue)]))
3119+
val parameters = reqParams + FileService.ARCHIVE_PARAMETER
3120+
datasets.get(id) match {
3121+
case Some(ds) => {
3122+
val host = Utils.baseUrl(request)
3123+
datasets.recursiveArchive(ds, host, parameters, request.apiKey, request.user)
3124+
sinkService.logDatasetArchiveEvent(ds, request.user)
3125+
Ok(toJson(Map("status" -> "success")))
3126+
}
3127+
case None => {
3128+
Logger.error("Error getting dataset " + id)
3129+
NotFound
3130+
}
3131+
}
3132+
}
3133+
3134+
3135+
/**
3136+
* Recursively submit requests to unarchive the contents of the given dataset
3137+
* @param id dataset to unarchive
3138+
* @return
3139+
*/
3140+
def queueUnarchival(id: UUID) = PermissionAction(Permission.ArchiveDataset, Some(ResourceRef(ResourceRef.dataset, id)))(parse.json) { implicit request =>
3141+
val reqParams = (request.body \ "parameters").asOpt[JsObject].getOrElse(JsObject(Seq.empty[(String, JsValue)]))
3142+
val parameters = reqParams + FileService.UNARCHIVE_PARAMETER
3143+
datasets.get(id) match {
3144+
case Some(ds) => {
3145+
val host = Utils.baseUrl(request)
3146+
datasets.recursiveArchive(ds, host, parameters, request.apiKey, request.user)
3147+
sinkService.logDatasetUnarchiveEvent(ds, request.user)
3148+
Ok(toJson(Map("status" -> "success")))
3149+
}
3150+
case None => {
3151+
Logger.error("Error getting dataset " + id)
3152+
NotFound
3153+
}
3154+
}
3155+
}
31113156
}
31123157

31133158
object ActivityFound extends Exception {}

app/api/Files.scala

Lines changed: 7 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1885,7 +1885,7 @@ class Files @Inject()(
18851885
}
18861886
case None => {
18871887
Logger.error("Error getting file " + id)
1888-
InternalServerError
1888+
NotFound
18891889
}
18901890
}
18911891
}
@@ -1896,11 +1896,14 @@ class Files @Inject()(
18961896
case Some(file) => {
18971897
files.setStatus(id, FileStatus.PROCESSED)
18981898
sinkService.logFileUnarchiveEvent(file, user)
1899+
1900+
// Only increment download date, to avoid immediate auto-archive
1901+
files.incrementDownloads(id, user, true)
18991902
Ok(toJson(Map("status" -> "success")))
19001903
}
19011904
case None => {
19021905
Logger.error("Error getting file " + id)
1903-
InternalServerError
1906+
NotFound
19041907
}
19051908
}
19061909
}
@@ -1916,7 +1919,7 @@ class Files @Inject()(
19161919
}
19171920
case None => {
19181921
Logger.error("Error getting file " + id)
1919-
InternalServerError
1922+
NotFound
19201923
}
19211924
}
19221925
}
@@ -1932,7 +1935,7 @@ class Files @Inject()(
19321935
}
19331936
case None => {
19341937
Logger.error("Error getting file " + id)
1935-
InternalServerError
1938+
NotFound
19361939
}
19371940
}
19381941
}

app/api/Permissions.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,6 +29,7 @@ object Permission extends Enumeration {
2929
DeleteDataset,
3030
EditDataset,
3131
PublicDataset,
32+
ArchiveDataset,
3233
AddResourceToDataset,
3334
RemoveResourceFromDataset,
3435
ExecuteOnDataset,
@@ -120,7 +121,8 @@ object Permission extends Enumeration {
120121
CreateSensor, DeleteSensor, AddGeoStream, DeleteGeoStream, AddDatapoints,
121122
CreateRelation, ViewRelation, DeleteRelation,
122123
CreateVocabulary, DeleteVocabulary, EditVocabulary,
123-
CreateVocabularyTerm, DeleteVocabularyTerm, EditVocabularyTerm
124+
CreateVocabularyTerm, DeleteVocabularyTerm, EditVocabularyTerm,
125+
ArchiveFile, ArchiveDataset
124126
)
125127

126128
lazy val files: FileService = DI.injector.getInstance(classOf[FileService])

app/controllers/Application.scala

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -347,6 +347,8 @@ class Application @Inject() (files: FileService, collections: CollectionService,
347347
api.routes.javascript.Datasets.users,
348348
api.routes.javascript.Datasets.restoreDataset,
349349
api.routes.javascript.Datasets.emptyTrash,
350+
api.routes.javascript.Datasets.queueArchival,
351+
api.routes.javascript.Datasets.queueUnarchival,
350352
api.routes.javascript.Extractions.submitFilesToExtractor,
351353
api.routes.javascript.Files.download,
352354
api.routes.javascript.Files.archive,

app/services/DatasetService.scala

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
package services
22

33
import java.util.Date
4-
54
import api.Permission.Permission
65
import com.mongodb.casbah.Imports._
76
import models.{File, _}
7+
import play.api.libs.json.JsObject
88

99
/**
1010
* Generic dataset service.
@@ -386,4 +386,6 @@ trait DatasetService {
386386
def getIterator(space: Option[String], since: Option[String], until: Option[String]): Iterator[Dataset]
387387

388388
def getTrashedIds(): List[UUID]
389+
390+
def recursiveArchive(dataset: Dataset, host: String, parameters: JsObject, apiKey: Option[String], user: Option[User])
389391
}

app/services/EventSinkService.scala

Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -289,6 +289,34 @@ class EventSinkService {
289289
"size" -> file.length
290290
))
291291
}
292+
293+
def logDatasetArchiveEvent(ds: Dataset, archiver: Option[User]) = {
294+
logEvent(Json.obj(
295+
"category" -> "archive",
296+
"type" -> "dataset",
297+
"resource_id" -> ds.id,
298+
"resource_name" -> ds.name,
299+
"author_id" -> ds.author.id,
300+
"author_name" -> ds.author.fullName,
301+
"user_id" -> archiver.get.id,
302+
"user_name" -> archiver.get.getMiniUser.fullName,
303+
"size" -> (ds.files.length + ds.folders.length)
304+
))
305+
}
306+
307+
def logDatasetUnarchiveEvent(ds: Dataset, unarchiver: Option[User]) = {
308+
logEvent(Json.obj(
309+
"category" -> "unarchive",
310+
"type" -> "dataset",
311+
"resource_id" -> ds.id,
312+
"resource_name" -> ds.name,
313+
"author_id" -> ds.author.id,
314+
"author_name" -> ds.author.fullName,
315+
"user_id" -> unarchiver.get.id,
316+
"user_name" -> unarchiver.get.getMiniUser.fullName,
317+
"size" -> (ds.files.length + ds.folders.length)
318+
))
319+
}
292320
}
293321

294322
//case class EventSinkMessage(created: Long, category: String, metadata: JsValue)

app/services/FileService.scala

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -244,7 +244,7 @@ trait FileService {
244244

245245
def incrementViews(id: UUID, user: Option[User]): (Int, Date)
246246

247-
def incrementDownloads(id: UUID, user: Option[User])
247+
def incrementDownloads(id: UUID, user: Option[User], dateOnly: Boolean = false)
248248

249249
def getIterator(space: Option[String], since: Option[String], until: Option[String]): Iterator[File]
250250

app/services/FolderService.scala

Lines changed: 7 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package services
22

3-
import models.{DBResult, TypedID, UUID, Folder}
3+
import models.{DBResult, Folder, TypedID, UUID, User}
4+
import play.api.libs.json.JsObject
45
/**
56
* Generic Folder Service
67
*/
@@ -96,4 +97,9 @@ trait FolderService {
9697
* Get all the folders in a list of parent datasettIds. It helps to identify the files that a user has access to.
9798
*/
9899
def findByParentDatasetIds(parentIds: List[UUID]): List[Folder]
100+
101+
/**
102+
* Archive all files and subfolders within this folder.
103+
*/
104+
def recursiveArchive(folder: Folder, host: String, parameters: JsObject, apiKey: Option[String], user: Option[User]): Unit
99105
}

app/services/mongodb/MongoDBDatasetService.scala

Lines changed: 19 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -3,11 +3,10 @@ package services.mongodb
33
import java.io._
44
import java.text.SimpleDateFormat
55
import java.util.{ArrayList, Date}
6-
76
import javax.inject.{Inject, Singleton}
87
import Transformation.LidoToCidocConvertion
98
import util.{Formatters, Parsers}
10-
import api.Permission
9+
import api.{Permission, UserRequest}
1110
import api.Permission.Permission
1211
import com.mongodb.DBObject
1312
import com.mongodb.casbah.Imports._
@@ -16,6 +15,7 @@ import com.mongodb.casbah.commons.MongoDBList
1615
import com.mongodb.casbah.commons.MongoDBObject
1716
import com.mongodb.util.JSON
1817
import com.novus.salat.dao.{ModelCompanion, SalatDAO}
18+
import controllers.Utils
1919
import jsonutils.JsonUtil
2020
import models.{File, _}
2121
import org.apache.commons.io.FileUtils
@@ -24,7 +24,8 @@ import org.json.JSONObject
2424
import play.api.Logger
2525
import play.api.Play._
2626
import play.api.libs.json.Json._
27-
import play.api.libs.json.{JsArray, JsValue, Json}
27+
import play.api.libs.json.{JsArray, JsObject, JsValue, Json}
28+
import play.api.mvc.AnyContent
2829
import services._
2930
import services.mongodb.MongoContext.context
3031

@@ -1658,6 +1659,21 @@ class MongoDBDatasetService @Inject() (
16581659
})
16591660
trashedIds.toList
16601661
}
1662+
1663+
/**
1664+
* Recursively submit requests to archive or unarchive the contents of the given dataset.
1665+
* NOTE: "parameters" includes "operation", which supports both archiving and unarchiving
1666+
*/
1667+
def recursiveArchive(ds: Dataset, host: String, parameters: JsObject, apiKey: Option[String], user: Option[User]) = {
1668+
ds.files.foreach(fileId => files.get(fileId) match {
1669+
case None => Logger.error("Error getting file " + fileId)
1670+
case Some(file) => files.submitArchivalOperation(file, fileId, host, parameters, apiKey, user)
1671+
})
1672+
ds.folders.foreach(folderId => folders.get(folderId) match {
1673+
case None => Logger.error("Error getting folder " + folderId)
1674+
case Some(folder) => folders.recursiveArchive(folder, host, parameters, apiKey, user)
1675+
})
1676+
}
16611677
}
16621678

16631679
object Dataset extends ModelCompanion[Dataset, ObjectId] {

0 commit comments

Comments
 (0)