Skip to content

Commit a8099b2

Browse files
committed
Merge remote-tracking branch 'ncsa/develop' into docker-minio
2 parents 103e0e6 + 6548685 commit a8099b2

File tree

20 files changed

+468
-211
lines changed

20 files changed

+468
-211
lines changed

CHANGELOG.md

Lines changed: 20 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -11,17 +11,36 @@ of these changes a reindex of Elasticsearch is required. This can be started by
1111
### Added
1212
- Can now switch storage provider to for example S3 storage. See also env.example for configuration options.
1313
- Added ability to delete extractor, both from api and GUI.
14+
- Script to test extractions through the API.
1415
[CATS-1044](https://opensource.ncsa.illinois.edu/jira/browse/CATS-1044)
1516
- API add tags endpoint now returns the added tags.
1617
[CATS-1053](https://opensource.ncsa.illinois.edu/jira/browse/CATS-1053)
1718
- Ability to search by creator name and email address for all resources.
1819
- List Spaces/Datasets/Collections created by each user on their User Profile.
1920
[CATS-1056](https://opensource.ncsa.illinois.edu/jira/browse/CATS-1056)
20-
21+
- Allow user to easily flip through the files in a dataset.
22+
[CATS-1058](https://opensource.ncsa.illinois.edu/jira/browse/CATS-1058)
23+
- Ability to filter the files and folders in a dataset when sorting is enabled.
24+
- Visualize existing relations between datasets on the dataset page. This can be extended other resource types.
25+
[CATS-1000](https://opensource.ncsa.illinois.edu/jira/browse/CATS-1000)
26+
2127
### Fixed
2228
- When adding tags to a section of an image, show the new tag without having to refresh the page.
2329
[CATS-1053](https://opensource.ncsa.illinois.edu/jira/browse/CATS-1053)
2430

31+
### Changed
32+
- Removed buttons to remove datasets from spaces and collections from certain pages where it was creating confusion for
33+
the user. Users thought it was a dataset delete button instead. Also moved Remove button for subcollections to right
34+
side of page to be consistent with other widgets.
35+
[CATS-1055](https://opensource.ncsa.illinois.edu/jira/browse/CATS-1055)
36+
37+
## 1.8.4 - 2020-05-15
38+
**_Warning:_ This update modifies how information is stored in Elasticsearch for text based searching. To take advantage
39+
of these changes a reindex of Elasticsearch is required. This can be started by an admin either from GUI or through the API.**
40+
41+
### Fixed
42+
- Fixed a bug related to improper indexing of files in nested subfolders, which could also affect searching by parent dataset.
43+
2544
## 1.8.3 - 2020-04-28
2645
**_Warning:_ This update modifies how information is stored in Elasticsearch for text based searching. To take advantage
2746
of these changes a reindex of Elasticsearch is required. This can be started by an admin either from GUI or through the API.**

app/controllers/Datasets.scala

Lines changed: 49 additions & 49 deletions
Original file line numberDiff line numberDiff line change
@@ -483,8 +483,7 @@ class Datasets @Inject() (
483483
/**
484484
* Dataset.
485485
*/
486-
def dataset(id: UUID, currentSpace: Option[String], limit: Int) = PermissionAction(Permission.ViewDataset, Some(ResourceRef(ResourceRef.dataset, id))) { implicit request =>
487-
486+
def dataset(id: UUID, currentSpace: Option[String], limit: Int, filter: Option[String]) = PermissionAction(Permission.ViewDataset, Some(ResourceRef(ResourceRef.dataset, id))) { implicit request =>
488487
implicit val user = request.user
489488
Previewers.findDatasetPreviewers().foreach(p => Logger.debug("Previewer found " + p.id))
490489
datasets.get(id) match {
@@ -612,11 +611,16 @@ class Datasets @Inject() (
612611
// increment view count for dataset
613612
val view_data = datasets.incrementViews(id, user)
614613

614+
// related datasets
615+
val relatedThings = relations.findRelationships(dataset.id.stringify, ResourceType.dataset, ResourceType.dataset)
616+
val relatedDatasets = for(r <- relatedThings) yield NodeDataset(datasets.get(UUID(r.target.id)).get, r.rdfType)
617+
618+
615619
// view_data is passed as tuple in dataset case only, because template is at limit of 22 parameters
616620
Ok(views.html.dataset(dataset, commentsByDataset, filteredPreviewers.toList, m,
617621
decodedCollectionsInside.toList, sensors, Some(decodedSpaces_canRemove), toPublish, curPubObjects,
618622
currentSpace, limit, showDownload, accessData, canAddDatasetToCollection,
619-
stagingAreaDefined, view_data, extractionGroups))
623+
stagingAreaDefined, view_data, extractionGroups, relatedDatasets, filter))
620624
}
621625
case None => {
622626
Logger.error("Error getting dataset" + id)
@@ -625,78 +629,74 @@ class Datasets @Inject() (
625629
}
626630
}
627631

628-
def getUpdatedFilesAndFolders(datasetId: UUID, limit: Int, pageIndex: Int, space: Option[String]) = PermissionAction(Permission.ViewDataset, Some(ResourceRef(ResourceRef.dataset, datasetId)))(parse.json) { implicit request =>
632+
def getUpdatedFilesAndFolders(datasetId: UUID, limit: Int, pageIndex: Int, space: Option[String], filter: Option[String]) = PermissionAction(Permission.ViewDataset, Some(ResourceRef(ResourceRef.dataset, datasetId)))(parse.json) { implicit request =>
629633
implicit val user = request.user
630634
val filepageUpdate = if (pageIndex < 0) 0 else pageIndex
631635
val sortOrder: String =
632636
request.cookies.get("sort-order") match {
633637
case Some(cookie) => cookie.value
634638
case None => "dateN" //If there is no cookie, and an order was not passed in, the view will choose its default
635639
}
640+
636641
datasets.get(datasetId) match {
637642
case Some(dataset) => {
643+
val folderHierarchy = new ListBuffer[Folder]()
644+
638645
val folderId = (request.body \ "folderId").asOpt[String]
639-
folderId match {
640-
case Some(fId) => {
641-
folders.get(UUID(fId)) match {
646+
val (childFolders: List[UUID], childFiles: List[UUID]) =
647+
folderId match {
648+
case Some(fId) => folders.get(UUID(fId)) match {
642649
case Some(folder) => {
643-
val (foldersList: List[Folder], limitFileList: List[File]) =
644-
if(play.Play.application().configuration().getBoolean("sortInMemory")) {
645-
(SortingUtils.sortFolders(folder.folders.flatMap(f => folders.get(f)), sortOrder).slice(limit * filepageUpdate, limit * (filepageUpdate + 1)),
646-
SortingUtils.sortFiles(files.get(folder.files).found, sortOrder).slice(limit * filepageUpdate - folder.folders.length, limit * (filepageUpdate + 1) - folder.folders.length))
647-
} else {
648-
(folder.folders.reverse.slice(limit * filepageUpdate, limit * (filepageUpdate+1)).flatMap(f => folders.get(f)),
649-
folder.files.reverse.slice(limit * filepageUpdate - folder.folders.length, limit * (filepageUpdate+1) - folder.folders.length).flatMap(f => files.get(f)))
650-
}
651-
var folderHierarchy = new ListBuffer[Folder]()
652650
folderHierarchy += folder
653-
var f1: Folder = folder
651+
var f1 = folder
654652
while (f1.parentType == "folder") {
655653
folders.get(f1.parentId) match {
656654
case Some(fparent) => {
657655
folderHierarchy += fparent
658656
f1 = fparent
659657
}
660-
case None =>
658+
case None => Logger.error("Parent folder " + f1.parentId.toString + " not found.")
661659
}
662660
}
663-
val fileComments = limitFileList.map { file =>
664-
var allComments = comments.findCommentsByFileId(file.id)
665-
sections.findByFileId(file.id).map { section =>
666-
allComments ++= comments.findCommentsBySectionId(section.id)
667-
}
668-
file.id -> allComments.size
669-
}.toMap
670-
val next = folder.files.length + folder.folders.length > limit * (filepageUpdate + 1)
671-
672-
Ok(views.html.datasets.filesAndFolders(dataset, Some(folder.id.stringify), foldersList, folderHierarchy.reverse.toList, pageIndex, next, limitFileList.toList, fileComments, space)(request.user))
673-
661+
(folder.folders, folder.files)
674662
}
675-
case None => InternalServerError(s"No folder with id $fId found")
663+
case None => Logger.error("Folder " + fId + " not found.")
676664
}
665+
case None => (dataset.folders, dataset.files)
677666
}
678-
case None => {
679-
val (foldersList: List[Folder], limitFileList: List[File]) = if(play.Play.application().configuration().getBoolean("sortInMemory")) {
680-
(SortingUtils.sortFolders(dataset.folders.flatMap(f => folders.get(f)), sortOrder).slice(limit * filepageUpdate, limit * (filepageUpdate + 1)),
681-
SortingUtils.sortFiles(files.get(dataset.files).found, sortOrder).slice(limit * filepageUpdate - dataset.folders.length, limit * (filepageUpdate + 1) - dataset.folders.length))
682-
} else {
683-
(dataset.folders.reverse.slice(limit * filepageUpdate, limit * (filepageUpdate+1)).flatMap(f => folders.get(f)),
684-
dataset.files.reverse.slice(limit * filepageUpdate - dataset.folders.length, limit * (filepageUpdate+1) - dataset.folders.length).flatMap(f => files.get(f)))
685-
}
686667

687-
val fileComments = limitFileList.map { file =>
688-
var allComments = comments.findCommentsByFileId(file.id)
689-
sections.findByFileId(file.id).map { section =>
690-
allComments ++= comments.findCommentsBySectionId(section.id)
691-
}
692-
file.id -> allComments.size
693-
}.toMap
668+
val filteredFiles = filter match {
669+
case Some(filt) => files.get(childFiles).found.filter(f => f.filename.toLowerCase.contains(filt.toLowerCase))
670+
case None => files.get(childFiles).found
671+
}
694672

695-
val folderHierarchy = new ListBuffer[Folder]()
696-
val next = dataset.files.length + dataset.folders.length > limit * (filepageUpdate + 1)
697-
Ok(views.html.datasets.filesAndFolders(dataset, None, foldersList, folderHierarchy.reverse.toList, pageIndex, next, limitFileList.toList, fileComments, space)(request.user))
698-
}
673+
val filteredFolders = filter match {
674+
case Some(filt) => folders.get(childFolders).found.filter(f => f.name.toLowerCase.contains(filt.toLowerCase))
675+
case None => folders.get(childFolders).found
699676
}
677+
678+
val (foldersList: List[Folder], limitFileList: List[File]) =
679+
if (play.Play.application().configuration().getBoolean("sortInMemory")) {
680+
(SortingUtils.sortFolders(filteredFolders, sortOrder).slice(limit * filepageUpdate, limit * (filepageUpdate + 1)),
681+
SortingUtils.sortFiles(filteredFiles, sortOrder).slice(limit * filepageUpdate - filteredFolders.length, limit * (filepageUpdate + 1) - filteredFolders.length))
682+
} else {
683+
(folders.get(childFolders.reverse.slice(limit * filepageUpdate, limit * (filepageUpdate + 1))).found,
684+
files.get(childFiles.reverse.slice(limit * filepageUpdate - childFolders.length, limit * (filepageUpdate + 1) - childFolders.length)).found)
685+
}
686+
687+
// Get comment counts per file
688+
val fileComments = limitFileList.map { file =>
689+
var allComments = comments.findCommentsByFileId(file.id)
690+
sections.findByFileId(file.id).map { section =>
691+
allComments ++= comments.findCommentsBySectionId(section.id)
692+
}
693+
file.id -> allComments.size
694+
}.toMap
695+
696+
// Pagination
697+
val next = childFiles.length + childFolders.length > limit * (filepageUpdate + 1)
698+
699+
Ok(views.html.datasets.filesAndFolders(dataset, folderId, foldersList, folderHierarchy.reverse.toList, pageIndex, next, limitFileList.toList, fileComments, space, filter)(request.user))
700700
}
701701
case None => InternalServerError(s"Dataset with id $datasetId not Found")
702702
}

app/controllers/Files.scala

Lines changed: 30 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -220,6 +220,34 @@ class Files @Inject() (
220220
}
221221
}
222222

223+
val pager: models.Pager = dataset match {
224+
case None => Pager(None, None)
225+
case Some(dsId) => {
226+
datasets.get(new UUID(dsId)) match {
227+
case None => Pager(None, None)
228+
case Some(ds) => {
229+
val lastIndex = ds.files.length - 1
230+
val index = ds.files.indexOf(id)
231+
232+
// Set prevFile / nextFile, if applicable
233+
if (index > 0 && index < lastIndex) {
234+
// Yields UUID of prevFile and nextFile respectively
235+
Pager(Some(ds.files(index + 1)), Some(ds.files(index - 1)))
236+
}else if (index == 0 && index < lastIndex) {
237+
// This is the first file in the list, but not the last
238+
Pager(Some(ds.files(index + 1)), None)
239+
} else if (index > 0 && index == lastIndex) {
240+
// This is the last file in the list, but not the first
241+
Pager(None, Some(ds.files(index - 1)))
242+
} else {
243+
// There is one item on the list, disable paging
244+
Pager(None, None)
245+
}
246+
}
247+
}
248+
}
249+
}
250+
223251
//call Polyglot to get all possible output formats for this file's content type
224252
current.plugin[PolyglotPlugin] match {
225253
case Some(plugin) => {
@@ -238,7 +266,7 @@ class Files @Inject() (
238266
plugin.getOutputFormats(contentTypeEnding).map(outputFormats =>
239267
Ok(views.html.file(file, id.stringify, commentsByFile, previewsWithPreviewer, sectionsWithPreviews,
240268
extractorsActive, decodedDatasetsContaining.toList, foldersContainingFile,
241-
mds, isRDFExportEnabled, extractionGroups, outputFormats, space, access, folderHierarchy.reverse.toList, decodedSpacesContaining.toList, allDecodedDatasets.toList, view_count, view_date)))
269+
mds, isRDFExportEnabled, extractionGroups, outputFormats, space, access, folderHierarchy.reverse.toList, decodedSpacesContaining.toList, allDecodedDatasets.toList, view_count, view_date, pager)))
242270
}
243271
case None =>
244272
Logger.debug("Polyglot plugin not found")
@@ -249,7 +277,7 @@ class Files @Inject() (
249277
//passing None as the last parameter (list of output formats)
250278
Future(Ok(views.html.file(file, id.stringify, commentsByFile, previewsWithPreviewer, sectionsWithPreviews,
251279
extractorsActive, decodedDatasetsContaining.toList, foldersContainingFile,
252-
mds, isRDFExportEnabled, extractionGroups, None, space, access, folderHierarchy.reverse.toList, decodedSpacesContaining.toList, allDecodedDatasets.toList, view_count, view_date)))
280+
mds, isRDFExportEnabled, extractionGroups, None, space, access, folderHierarchy.reverse.toList, decodedSpacesContaining.toList, allDecodedDatasets.toList, view_count, view_date, pager)))
253281
}
254282
}
255283

app/models/Pager.scala

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,22 @@
1+
package models
2+
3+
/**
4+
* A simple structure to assist in handling paging through a list of resources.
5+
* `prev` and `next` contain the ID of the previous and next resources (respectively).
6+
*
7+
* Currently this class is used for paging through files in a dataset, but could
8+
* be used to handle paging for any list of arbitrary UUIDs / resources in other contexts.
9+
*
10+
* See:
11+
* {@link views.files},
12+
* {@link controllers.Files#file}
13+
*
14+
* @param prev UUID of the previous resource in the list
15+
* @param next UUID of the next resource in the list
16+
*
17+
* @author lambert8
18+
*/
19+
case class Pager(
20+
prev: Option[models.UUID],
21+
next: Option[models.UUID]
22+
)

app/models/Relation.scala

Lines changed: 13 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
package models
22

3+
import java.net.URL
4+
35
/**
46
* Track relations between two arbitrary resources.
57
*/
@@ -10,10 +12,21 @@ case class Relation (
1012
rdfType: Option[String] = None // rdfType: Option[URI]
1113
)
1214

15+
/**
16+
* Source or sink node of a relationship.
17+
*
18+
* @param id a string so that it could be both a local UIUD as well as an external URL
19+
* @param resourceType internal resource type (dataset, file, etc.)
20+
*/
1321
case class Node (
1422
id: String,
1523
resourceType: ResourceType.Value
1624
)
1725

26+
case class NodeDataset(dataset: Dataset, rdfType: Option[String])
27+
28+
case class NodeFile(file: File, rdfType: Option[String])
29+
30+
case class NodeURL(url: URL, rdfType: Option[String])
1831

1932

app/services/ElasticsearchPlugin.scala

Lines changed: 2 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -441,14 +441,8 @@ class ElasticsearchPlugin(application: Application) extends Plugin {
441441
// Perform recursion first if necessary
442442
if (recursive) {
443443
files.get(dataset.files).found.foreach(f => index(f, idx))
444-
for (folderid <- dataset.folders) {
445-
folders.get(folderid) match {
446-
case Some(f) => {
447-
files.get(f.files).found.foreach(fi => index(fi, idx))
448-
}
449-
case None => Logger.error(s"Error getting file $folderid for recursive indexing")
450-
}
451-
}
444+
for (f <- folders.findByParentDatasetId(dataset.id))
445+
files.get(f.files).found.foreach(fi => index(fi, idx))
452446
}
453447
index(SearchUtils.getElasticsearchObject(dataset), idx.getOrElse(nameOfIndex))
454448
}

app/util/SearchUtils.scala

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -26,20 +26,16 @@ object SearchUtils {
2626

2727
// Get child_of relationships for File
2828
var child_of: ListBuffer[String] = ListBuffer()
29-
datasets.findByFileIdDirectlyContain(id).map(ds => {
29+
// ...first, the dataset which contains the file or its folder
30+
datasets.findByFileIdAllContain(id).map(ds => {
3031
child_of += ds.id.toString
3132
ds.spaces.map(spid => child_of += spid.toString)
3233
ds.collections.map(collid => child_of += collid.toString)
3334
})
34-
val folderlist = folders.findByFileId(id).map(fld => {
35+
// ...second, the immediate parent folder ID (and the folder's parent) itself
36+
folders.findByFileId(id).map(fld => {
3537
child_of += fld.id.toString
3638
child_of += fld.parentDatasetId.toString
37-
fld.id
38-
})
39-
datasets.get(folderlist).found.foreach(ds => {
40-
child_of += ds.id.toString
41-
ds.spaces.map(spid => child_of += spid.toString)
42-
ds.collections.map(collid => child_of += collid.toString)
4339
})
4440
val child_of_distinct = child_of.toList.distinct
4541

app/views/collections/listchildcollection.scala.html

Lines changed: 0 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -53,16 +53,6 @@ <h3><a href="@(routes.Collections.collection(collection.id))">@Html(collection.n
5353
<span class='glyphicon glyphicon-star-empty'></span> Unfollow</a>
5454
</div>
5555
}
56-
<!-- If user can delete, the button is enabled, otherwise the button is present but disabled to provide consistent UE. -->
57-
@if( user.get.id.equals(collection.author.id) || Permission.checkPermission(Permission.RemoveResourceFromCollection, ResourceRef(ResourceRef.collection, collection.id))){
58-
<button onclick="confirmRemoveResourceFromResource('collection','collection','@(parent.id)','collection','@(collection.id)','@(collection.name)',true,'@(redirect)')"
59-
class="btn btn-link" title="Remove the child collection from the collection @parent.name">
60-
<span class="glyphicon glyphicon-remove"></span> Remove</button>
61-
} else {
62-
<div class="inline" title="No permission to delete the child collection">
63-
<button class="btn btn-link btn-sm disabled"><span class="glyphicon glyphicon-remove"></span> Remove</button>
64-
</div>
65-
}
6656
}
6757
</div>
6858
</div>

0 commit comments

Comments
 (0)