|
1 | 1 | package api |
2 | 2 |
|
3 | | -import java.io._ |
4 | | -import java.io.{File => JFile} |
5 | | -import java.net.URL |
6 | | -import java.security.{DigestInputStream, MessageDigest} |
7 | | -import java.text.SimpleDateFormat |
8 | | -import java.util.{Calendar, Date} |
9 | | - |
| 3 | +import _root_.util._ |
10 | 4 | import api.Permission.Permission |
11 | | -import java.util.zip._ |
12 | | - |
13 | | -import javax.inject.{Inject, Singleton} |
| 5 | +import controllers.Utils.https |
14 | 6 | import controllers.{Previewers, Utils} |
15 | 7 | import jsonutils.JsonUtil |
16 | 8 | import models._ |
17 | 9 | import org.apache.commons.codec.binary.Hex |
18 | 10 | import org.json.JSONObject |
19 | 11 | import play.api.Logger |
20 | | -import play.api.Play.{configuration, current, routes} |
| 12 | +import play.api.Play.{configuration, current} |
21 | 13 | import play.api.i18n.Messages |
| 14 | +import play.api.libs.Files |
22 | 15 | import play.api.libs.concurrent.Execution.Implicits._ |
23 | 16 | import play.api.libs.iteratee.Enumerator |
24 | | -import play.api.libs.json._ |
25 | 17 | import play.api.libs.json.Json._ |
26 | | -import play.api.mvc.{Action, AnyContent, MultipartFormData, SimpleResult} |
27 | | -import services._ |
28 | | -import _root_.util._ |
29 | | -import controllers.Utils.https |
30 | | -import org.json.simple.{JSONArray, JSONObject => SimpleJSONObject} |
31 | | -import org.json.simple.parser.JSONParser |
32 | | -import play.api.libs.Files |
33 | | -import play.api.libs.Files.TemporaryFile |
| 18 | +import play.api.libs.json._ |
| 19 | +import play.api.mvc.{AnyContent, MultipartFormData, SimpleResult} |
34 | 20 | import scalax.file.Path.createTempFile |
| 21 | +import services._ |
35 | 22 |
|
36 | | -import scala.concurrent.{ExecutionContext, Future} |
| 23 | +import java.io._ |
| 24 | +import java.net.URL |
| 25 | +import java.security.{DigestInputStream, MessageDigest} |
| 26 | +import java.text.SimpleDateFormat |
| 27 | +import java.util.zip._ |
| 28 | +import java.util.{Calendar, Date} |
| 29 | +import javax.inject.{Inject, Singleton} |
37 | 30 | import scala.collection.mutable.{ListBuffer, Map => MutaMap} |
| 31 | +import scala.concurrent.{ExecutionContext, Future} |
38 | 32 |
|
39 | 33 | /** |
40 | 34 | * Dataset API. |
@@ -933,6 +927,7 @@ class Datasets @Inject()( |
933 | 927 | } |
934 | 928 | } |
935 | 929 |
|
| 930 | + |
936 | 931 | def getMetadataDefinitions(id: UUID, currentSpace: Option[String]) = PermissionAction(Permission.AddMetadata, Some(ResourceRef(ResourceRef.dataset, id))) { implicit request => |
937 | 932 | implicit val user = request.user |
938 | 933 | datasets.get(id) match { |
@@ -1134,47 +1129,84 @@ class Datasets @Inject()( |
1134 | 1129 | private def getFilesWithinFolders(id: UUID, serveradmin: Boolean=false, max: Int = -1): List[JsValue] = { |
1135 | 1130 | val output = new ListBuffer[JsValue]() |
1136 | 1131 | var resultCount = 0 |
| 1132 | + var current_folder : Option[Folder] = None |
1137 | 1133 | datasets.get(id) match { |
1138 | 1134 | case Some(dataset) => { |
1139 | | - folders.findByParentDatasetId(id).map { folder => |
| 1135 | + folders.findByParentDatasetId(id).foreach { folder => |
| 1136 | + current_folder = Some(folder) |
1140 | 1137 | files.get(folder.files).found.foreach(file => { |
1141 | 1138 | if (max < 0 || resultCount < max) { |
1142 | | - output += jsonFile(file, serveradmin) |
| 1139 | + output += jsonFile(file, serveradmin, Some(folder)) |
1143 | 1140 | resultCount += 1 |
1144 | 1141 | } |
1145 | 1142 | }) |
| 1143 | + print("done with folder") |
1146 | 1144 | } |
1147 | 1145 | } |
1148 | 1146 | case None => Logger.error(s"Error getting dataset $id") |
1149 | 1147 | } |
1150 | 1148 | output.toList |
1151 | 1149 | } |
1152 | 1150 |
|
1153 | | - def jsonFile(file: models.File, serverAdmin: Boolean = false): JsValue = { |
1154 | | - val defaultMap = Map( |
1155 | | - "id" -> file.id.toString, |
1156 | | - "filename" -> file.filename, |
1157 | | - "contentType" -> file.contentType, |
1158 | | - "date-created" -> file.uploadDate.toString(), |
1159 | | - "size" -> file.length.toString) |
1160 | 1151 |
|
1161 | | - // Only include filepath if using DiskByte storage and user is serverAdmin |
1162 | | - val jsonMap = file.loader match { |
1163 | | - case "services.filesystem.DiskByteStorageService" => { |
1164 | | - if (serverAdmin) |
1165 | | - Map( |
1166 | | - "id" -> file.id.toString, |
1167 | | - "filename" -> file.filename, |
1168 | | - "filepath" -> file.loader_id, |
1169 | | - "contentType" -> file.contentType, |
1170 | | - "date-created" -> file.uploadDate.toString(), |
1171 | | - "size" -> file.length.toString) |
1172 | | - else |
1173 | | - defaultMap |
| 1152 | + def jsonFile(file: models.File, serverAdmin: Boolean = false, folder : Option[Folder] = None): JsValue = { |
| 1153 | + folder match { |
| 1154 | + case Some(f) => { |
| 1155 | + val folderMap : JsValue = Json.obj("id"->f.id, "name"->f.name) |
| 1156 | + val defaultMap : JsValue = Json.obj( |
| 1157 | + "id" -> file.id.toString, |
| 1158 | + "filename" -> file.filename, |
| 1159 | + "contentType" -> file.contentType, |
| 1160 | + "date-created" -> file.uploadDate.toString(), |
| 1161 | + "folders"->folderMap, |
| 1162 | + "size" -> file.length.toString) |
| 1163 | + |
| 1164 | + // Only include filepath if using DiskByte storage and user is serverAdmin |
| 1165 | + val jsonMap = file.loader match { |
| 1166 | + case "services.filesystem.DiskByteStorageService" => { |
| 1167 | + if (serverAdmin) |
| 1168 | + Json.obj( |
| 1169 | + "id" -> file.id.toString, |
| 1170 | + "filename" -> file.filename, |
| 1171 | + "filepath" -> file.loader_id, |
| 1172 | + "contentType" -> file.contentType, |
| 1173 | + "date-created" -> file.uploadDate.toString(), |
| 1174 | + "folders"->folderMap, |
| 1175 | + "size" -> file.length.toString) |
| 1176 | + else |
| 1177 | + defaultMap |
| 1178 | + } |
| 1179 | + case _ => defaultMap |
| 1180 | + } |
| 1181 | + toJson(jsonMap) |
1174 | 1182 | } |
1175 | | - case _ => defaultMap |
| 1183 | + case None => { |
| 1184 | + val defaultMap = Map( |
| 1185 | + "id" -> file.id.toString, |
| 1186 | + "filename" -> file.filename, |
| 1187 | + "contentType" -> file.contentType, |
| 1188 | + "date-created" -> file.uploadDate.toString(), |
| 1189 | + "size" -> file.length.toString) |
| 1190 | + |
| 1191 | + // Only include filepath if using DiskByte storage and user is serverAdmin |
| 1192 | + val jsonMap = file.loader match { |
| 1193 | + case "services.filesystem.DiskByteStorageService" => { |
| 1194 | + if (serverAdmin) |
| 1195 | + Map( |
| 1196 | + "id" -> file.id.toString, |
| 1197 | + "filename" -> file.filename, |
| 1198 | + "filepath" -> file.loader_id, |
| 1199 | + "contentType" -> file.contentType, |
| 1200 | + "date-created" -> file.uploadDate.toString(), |
| 1201 | + "size" -> file.length.toString) |
| 1202 | + else |
| 1203 | + defaultMap |
| 1204 | + } |
| 1205 | + case _ => defaultMap |
| 1206 | + } |
| 1207 | + toJson(jsonMap) |
| 1208 | + } |
1176 | 1209 | } |
1177 | | - toJson(jsonMap) |
1178 | 1210 | } |
1179 | 1211 |
|
1180 | 1212 | //Update Dataset Information code starts |
@@ -3076,6 +3108,51 @@ class Datasets @Inject()( |
3076 | 3108 | } |
3077 | 3109 |
|
3078 | 3110 | } |
| 3111 | + |
| 3112 | + /** |
| 3113 | + * Recursively submit requests to archive the contents of the given dataset |
| 3114 | + * @param id dataset to archive |
| 3115 | + * @return |
| 3116 | + */ |
| 3117 | + def queueArchival(id: UUID) = PermissionAction(Permission.ArchiveDataset, Some(ResourceRef(ResourceRef.dataset, id)))(parse.json) { implicit request => |
| 3118 | + val reqParams = (request.body \ "parameters").asOpt[JsObject].getOrElse(JsObject(Seq.empty[(String, JsValue)])) |
| 3119 | + val parameters = reqParams + FileService.ARCHIVE_PARAMETER |
| 3120 | + datasets.get(id) match { |
| 3121 | + case Some(ds) => { |
| 3122 | + val host = Utils.baseUrl(request) |
| 3123 | + datasets.recursiveArchive(ds, host, parameters, request.apiKey, request.user) |
| 3124 | + sinkService.logDatasetArchiveEvent(ds, request.user) |
| 3125 | + Ok(toJson(Map("status" -> "success"))) |
| 3126 | + } |
| 3127 | + case None => { |
| 3128 | + Logger.error("Error getting dataset " + id) |
| 3129 | + NotFound |
| 3130 | + } |
| 3131 | + } |
| 3132 | + } |
| 3133 | + |
| 3134 | + |
| 3135 | + /** |
| 3136 | + * Recursively submit requests to unarchive the contents of the given dataset |
| 3137 | + * @param id dataset to unarchive |
| 3138 | + * @return |
| 3139 | + */ |
| 3140 | + def queueUnarchival(id: UUID) = PermissionAction(Permission.ArchiveDataset, Some(ResourceRef(ResourceRef.dataset, id)))(parse.json) { implicit request => |
| 3141 | + val reqParams = (request.body \ "parameters").asOpt[JsObject].getOrElse(JsObject(Seq.empty[(String, JsValue)])) |
| 3142 | + val parameters = reqParams + FileService.UNARCHIVE_PARAMETER |
| 3143 | + datasets.get(id) match { |
| 3144 | + case Some(ds) => { |
| 3145 | + val host = Utils.baseUrl(request) |
| 3146 | + datasets.recursiveArchive(ds, host, parameters, request.apiKey, request.user) |
| 3147 | + sinkService.logDatasetUnarchiveEvent(ds, request.user) |
| 3148 | + Ok(toJson(Map("status" -> "success"))) |
| 3149 | + } |
| 3150 | + case None => { |
| 3151 | + Logger.error("Error getting dataset " + id) |
| 3152 | + NotFound |
| 3153 | + } |
| 3154 | + } |
| 3155 | + } |
3079 | 3156 | } |
3080 | 3157 |
|
3081 | 3158 | object ActivityFound extends Exception {} |
0 commit comments