Skip to content
Open
Show file tree
Hide file tree
Changes from 11 commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion app/controllers/DatasetController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -283,7 +283,8 @@ class DatasetController @Inject()(userService: UserService,
searchQuery,
request.identity.map(_._id),
recursive.getOrElse(false),
limitOpt = limit
limitOpt = limit,
requestingUserOrga = request.identity.map(_._organization)
)
} yield Json.toJson(datasetInfos)
} else {
Expand Down
14 changes: 7 additions & 7 deletions app/controllers/WKRemoteDataStoreController.scala
Original file line number Diff line number Diff line change
Expand Up @@ -110,13 +110,13 @@ class WKRemoteDataStoreController @Inject()(
teamIdsPerDataset <- Fox.combined(datasets.map(dataset => teamDAO.findAllowedTeamIdsForDataset(dataset.id)))
unfinishedUploads = datasets.zip(teamIdsPerDataset).map {
case (d, teamIds) =>
new UnfinishedUpload("<filled-in by datastore>",
d.dataSourceId,
d.name,
d.folderId.toString,
d.created,
None, // Filled by datastore.
teamIds.map(_.toString))
UnfinishedUpload("<filled-in by datastore>",
d.dataSourceId,
d.name,
d.folderId.toString,
d.created,
None, // Filled by datastore.
teamIds.map(_.toString))
}
} yield Ok(Json.toJson(unfinishedUploads))
}
Expand Down
40 changes: 25 additions & 15 deletions app/models/dataset/Dataset.scala
Original file line number Diff line number Diff line change
Expand Up @@ -89,6 +89,7 @@ case class DatasetCompactInfo(
isUnreported: Boolean,
colorLayerNames: List[String],
segmentationLayerNames: List[String],
usedStorageBytes: Option[Long],
) {
def dataSourceId = new DataSourceId(directoryName, owningOrganization)
}
Expand Down Expand Up @@ -230,18 +231,19 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
parsed <- parseAll(r)
} yield parsed

def findAllCompactWithSearch(isActiveOpt: Option[Boolean] = None,
isUnreported: Option[Boolean] = None,
organizationIdOpt: Option[String] = None,
folderIdOpt: Option[ObjectId] = None,
uploaderIdOpt: Option[ObjectId] = None,
searchQuery: Option[String] = None,
requestingUserIdOpt: Option[ObjectId] = None,
includeSubfolders: Boolean = false,
statusOpt: Option[String] = None,
createdSinceOpt: Option[Instant] = None,
limitOpt: Option[Int] = None,
)(implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] =
def findAllCompactWithSearch(
isActiveOpt: Option[Boolean] = None,
isUnreported: Option[Boolean] = None,
organizationIdOpt: Option[String] = None,
folderIdOpt: Option[ObjectId] = None,
uploaderIdOpt: Option[ObjectId] = None,
searchQuery: Option[String] = None,
requestingUserIdOpt: Option[ObjectId] = None,
includeSubfolders: Boolean = false,
statusOpt: Option[String] = None,
createdSinceOpt: Option[Instant] = None,
limitOpt: Option[Int] = None,
requestingUserOrga: Option[String] = None)(implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] =
for {
selectionPredicates <- buildSelectionPredicates(isActiveOpt,
isUnreported,
Expand Down Expand Up @@ -288,7 +290,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
d.status,
d.tags,
cl.names AS colorLayerNames,
sl.names AS segmentationLayerNames
sl.names AS segmentationLayerNames,
COALESCE(magStorage.storage, 0) + COALESCE(attachmentStorage.storage, 0) AS usedStorageBytes
FROM
(SELECT $columns FROM $existingCollectionName WHERE $selectionPredicates $limitQuery) d
JOIN webknossos.organizations o
Expand All @@ -301,6 +304,10 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
ON d._id = cl._dataset
LEFT JOIN (SELECT _dataset, ARRAY_AGG(name ORDER BY name) AS names FROM webknossos.dataset_layers WHERE category = 'segmentation' GROUP BY _dataset) sl
ON d._id = sl._dataset
LEFT JOIN (SELECT _dataset, COALESCE(SUM(usedStorageBytes), 0) AS storage FROM webknossos.organization_usedStorage_mags GROUP BY _dataset) magStorage
ON d._id = magStorage._dataset
LEFT JOIN (SELECT _dataset, COALESCE(SUM(usedStorageBytes), 0) AS storage FROM webknossos.organization_usedStorage_attachments GROUP BY _dataset) attachmentStorage
ON d._id = attachmentStorage._dataset
"""
rows <- run(
query.as[
Expand All @@ -316,7 +323,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
String,
String,
String,
String)])
String,
Long)])
} yield
rows.toList.map(
row =>
Expand All @@ -334,7 +342,9 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA
tags = parseArrayLiteral(row._11),
isUnreported = DataSourceStatus.unreportedStatusList.contains(row._10),
colorLayerNames = parseArrayLiteral(row._12),
segmentationLayerNames = parseArrayLiteral(row._13)
segmentationLayerNames = parseArrayLiteral(row._13),
// Only include usedStorage for datasets of your own organization.
usedStorageBytes = if (requestingUserOrga.contains(row._3) && row._14 > 0) Some(row._14) else None,
))

private def buildSelectionPredicates(isActiveOpt: Option[Boolean],
Expand Down
3 changes: 2 additions & 1 deletion app/models/dataset/DatasetService.scala
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO,
includeSubfolders = true,
statusOpt = Some(DataSourceStatus.notYetUploaded),
// Only list pending uploads since the two last weeks.
createdSinceOpt = Some(Instant.now - (14 days))
createdSinceOpt = Some(Instant.now - (14 days)),
requestingUserOrga = Some(organizationId)
) ?~> "dataset.list.fetchFailed"

def createAndSetUpDataset(datasetName: String,
Expand Down
76 changes: 51 additions & 25 deletions frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import type {
TablePaginationConfig,
} from "antd/lib/table/interface";
import classNames from "classnames";
import FastTooltip from "components/fast_tooltip";
import FixedExpandableTable from "components/fixed_expandable_table";
import FormattedDate from "components/formatted_date";
import DatasetActionView, {
Expand All @@ -22,7 +23,7 @@ import {
useDatasetDrop,
} from "dashboard/folders/folder_tree";
import { diceCoefficient as dice } from "dice-coefficient";
import { stringToColor } from "libs/format_utils";
import { formatCountToDataAmountUnit, stringToColor } from "libs/format_utils";
import { useWkSelector } from "libs/react_hooks";
import Shortcut from "libs/shortcut_component";
import * as Utils from "libs/utils";
Expand Down Expand Up @@ -287,6 +288,13 @@ class DatasetRenderer {
return DatasetRenderer.getRowKey(this.data);
}

renderStorageColumn(): React.ReactNode {
return this.data.usedStorageBytes != null ?
(<FastTooltip title={"Note that linked and remote layers aren’t measured."}>
? formatCountToDataAmountUnit(this.data.usedStorageBytes, true)
</FastTooltip>
) : null;
}
renderTypeColumn(): React.ReactNode {
return <FileOutlined style={{ fontSize: "18px" }} />;
}
Expand Down Expand Up @@ -388,6 +396,9 @@ class FolderRenderer {
</>
);
}
renderStorageColumn(): React.ReactNode {
return null;
}
renderCreationDateColumn(): React.ReactNode {
return null;
}
Expand All @@ -411,17 +422,18 @@ class DatasetTable extends React.PureComponent<Props, State> {
// rendering). That's why it's not included in this.state (also it
// would lead to infinite loops, too).
currentPageData: RowRenderer[] = [];
isUserAdminOrDatasetManager: boolean = this.props.isUserAdmin || this.props.isUserDatasetManager;

static getDerivedStateFromProps(nextProps: Props, prevState: State): Partial<State> {
const maybeSortedInfo: { sortedInfo: SorterResult<string> } | EmptyObject = // Clear the sorting exactly when the search box is initially filled
// (searchQuery changes from empty string to non-empty string)
nextProps.searchQuery !== "" && prevState.prevSearchQuery === ""
? {
sortedInfo: {
columnKey: "",
order: "ascend",
},
}
sortedInfo: {
columnKey: "",
order: "ascend",
},
}
: {};
return {
prevSearchQuery: nextProps.searchQuery,
Expand Down Expand Up @@ -463,9 +475,7 @@ class DatasetTable extends React.PureComponent<Props, State> {
});

const filterByHasLayers = (datasets: APIDatasetCompact[]) =>
this.props.isUserAdmin || this.props.isUserDatasetManager
? datasets
: datasets.filter((dataset) => dataset.isActive);
this.isUserAdminOrDatasetManager ? datasets : datasets.filter((dataset) => dataset.isActive);

return filteredByTags(filterByMode(filterByHasLayers(this.props.datasets)));
}
Expand Down Expand Up @@ -556,20 +566,20 @@ class DatasetTable extends React.PureComponent<Props, State> {
// and if the query is at least 3 characters long to avoid sorting *all* datasets
isSearchQueryLongEnough && sortedInfo.columnKey == null
? _.chain([...filteredDataSource, ...activeSubfolders])
.map((datasetOrFolder) => {
const diceCoefficient = dice(datasetOrFolder.name, this.props.searchQuery);
const rank = useLruRank ? datasetToRankMap.get(datasetOrFolder) || 0 : 0;
const rankCoefficient = 1 - rank / filteredDataSource.length;
const coefficient = (diceCoefficient + rankCoefficient) / 2;
return {
datasetOrFolder,
coefficient,
};
})
.sortBy("coefficient")
.map(({ datasetOrFolder }) => datasetOrFolder)
.reverse()
.value()
.map((datasetOrFolder) => {
const diceCoefficient = dice(datasetOrFolder.name, this.props.searchQuery);
const rank = useLruRank ? datasetToRankMap.get(datasetOrFolder) || 0 : 0;
const rankCoefficient = 1 - rank / filteredDataSource.length;
const coefficient = (diceCoefficient + rankCoefficient) / 2;
return {
datasetOrFolder,
coefficient,
};
})
.sortBy("coefficient")
.map(({ datasetOrFolder }) => datasetOrFolder)
.reverse()
.value()
: dataSourceSortedByRank;
const sortedDataSourceRenderers: RowRenderer[] = sortedDataSource.map((record) =>
isRecordADataset(record)
Expand Down Expand Up @@ -604,7 +614,6 @@ class DatasetTable extends React.PureComponent<Props, State> {
sortOrder: sortedInfo.columnKey === "created" ? sortedInfo.order : undefined,
render: (_created, rowRenderer: RowRenderer) => rowRenderer.renderCreationDateColumn(),
},

{
width: 200,
title: "Actions",
Expand All @@ -613,6 +622,23 @@ class DatasetTable extends React.PureComponent<Props, State> {
render: (__, rowRenderer: RowRenderer) => rowRenderer.renderActionsColumn(),
},
];
if (this.isUserAdminOrDatasetManager && context) {
const datasetStorageSizeColumn = {
title: "Size",
key: "size",
width: 120,
render: (_: any, rowRenderer: RowRenderer) => {
return isRecordADataset(rowRenderer.data) ? rowRenderer.renderStorageColumn() : null;
},
sorter: Utils.compareBy<RowRenderer>((rowRenderer) =>
isRecordADataset(rowRenderer.data) && rowRenderer.data.usedStorageBytes
? rowRenderer.data.usedStorageBytes
: 0,
),
sortOrder: sortedInfo.columnKey === "size" ? sortedInfo.order : undefined,
};
columns.splice(2, 0, datasetStorageSizeColumn);
}

return (
<DndProvider backend={HTML5Backend}>
Expand All @@ -626,7 +652,7 @@ class DatasetTable extends React.PureComponent<Props, State> {
contextMenuPosition={contextMenuPosition}
datasetCollectionContext={context}
editFolder={
folderForContextMenu != null ? () => this.editFolder(folderForContextMenu) : () => {}
folderForContextMenu != null ? () => this.editFolder(folderForContextMenu) : () => { }
}
/>
<FixedExpandableTable
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { useIsMutating } from "@tanstack/react-query";
import { type DatasetUpdater, getDatastores, triggerDatasetCheck } from "admin/rest_api";
import { useEffectOnlyOnce, usePrevious } from "libs/react_hooks";
import { useEffectOnlyOnce, usePrevious, useWkSelector } from "libs/react_hooks";
import UserLocalStorage from "libs/user_local_storage";
import _ from "lodash";
import type React from "react";
Expand Down Expand Up @@ -55,6 +55,7 @@ export type DatasetCollectionContextValue = {
deleteFolderMutation: ReturnType<typeof useDeleteFolderMutation>;
updateDatasetMutation: ReturnType<typeof useUpdateDatasetMutation>;
};
usedStorageInOrga: number | undefined;
};

export const DatasetCollectionContext = createContext<DatasetCollectionContextValue | undefined>(
Expand Down Expand Up @@ -84,6 +85,7 @@ export default function DatasetCollectionContextProvider({
const [isChecking, setIsChecking] = useState(false);
const isMutating = useIsMutating() > 0;
const { data: folder, isError: didFolderLoadingError } = useFolderQuery(activeFolderId);
const usedStorageInOrga = useWkSelector((state) => state.activeOrganization?.usedStorageBytes);

const [selectedDatasets, setSelectedDatasets] = useState<APIDatasetCompact[]>([]);
const [selectedFolder, setSelectedFolder] = useState<FolderItem | null>(null);
Expand Down Expand Up @@ -254,6 +256,7 @@ export default function DatasetCollectionContextProvider({
moveFolderMutation,
updateDatasetMutation,
},
usedStorageInOrga,
}),
[
isChecking,
Expand All @@ -279,6 +282,7 @@ export default function DatasetCollectionContextProvider({
getBreadcrumbs,
selectedFolder,
setGlobalSearchQuery,
usedStorageInOrga,
],
);

Expand Down
2 changes: 2 additions & 0 deletions frontend/javascripts/types/api_types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -266,6 +266,7 @@ export type APIDatasetCompactWithoutStatusAndLayerNames = Pick<
| "lastUsedByUser"
| "tags"
| "isUnreported"
| "usedStorageBytes"
>;
export type APIDatasetCompact = APIDatasetCompactWithoutStatusAndLayerNames & {
id: string;
Expand Down Expand Up @@ -295,6 +296,7 @@ export function convertDatasetToCompact(dataset: APIDataset): APIDatasetCompact
isUnreported: dataset.isUnreported,
colorLayerNames: colorLayerNames,
segmentationLayerNames: segmentationLayerNames,
usedStorageBytes: dataset.usedStorageBytes,
};
}

Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -104,6 +104,7 @@
"licenses-backend": "sbt dumpLicenseReport",
"docs": "node_modules/.bin/documentation build --shallow frontend/javascripts/viewer/api/api_loader.ts frontend/javascripts/viewer/api/api_latest.ts --github --project-name \"WEBKNOSSOS Frontend API\" --format html --output public/docs/frontend-api",
"refresh-schema": "./tools/postgres/dbtool.js refresh-schema && rm -f target/scala-2.13/src_managed/schema/com/scalableminds/webknossos/schema/Tables.scala",
"enable-storage-scan": "./tools/postgres/dbtool.js enable-storage-scan",
"enable-jobs": "sed -i -e 's/jobsEnabled = false/jobsEnabled = true/g' ./conf/application.conf; sed -i -e 's/voxelyticsEnabled = false/voxelyticsEnabled = true/g' ./conf/application.conf; ./tools/postgres/dbtool.js enable-jobs",
"disable-jobs": "sed -i -e 's/jobsEnabled = true/jobsEnabled = false/g' ./conf/application.conf; sed -i -e 's/voxelyticsEnabled = true/voxelyticsEnabled = false/g' ./conf/application.conf; ./tools/postgres/dbtool.js disable-jobs",
"insert-local-datastore": "./tools/postgres/dbtool.js insert-local-datastore",
Expand Down
18 changes: 18 additions & 0 deletions tools/postgres/dbtool.js
Original file line number Diff line number Diff line change
Expand Up @@ -420,6 +420,24 @@ program
console.log("✨✨ Done");
});

program
.command("enable-storage-scan")
.description("Activates dataset storage scan in WEBKNOSSOS for the default datastore.")
.action(() => {
console.log("Activating dataset storage scan in WEBKNOSSOS for the default datastore...");
console.log(
callPsql(
`UPDATE webknossos.datastores SET reportUsedStorageEnabled = TRUE WHERE name = 'localhost'`,
),
);
console.log(
callPsql(
`UPDATE webknossos.organizations SET lastStorageScanTime = '1970-01-01T00:00:00.000Z' WHERE _id = 'sample_organization'`,
),
);
console.log("✨✨ Done");
});

program
.command("dump-schema <schemaDir>")
.description("Dumps current schema into a folder")
Expand Down
2 changes: 2 additions & 0 deletions unreleased_changes/9025.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
### Added
- Display used storage for each dataset in the dashboard's dataset table.
Loading