diff --git a/app/controllers/DatasetController.scala b/app/controllers/DatasetController.scala index edb062c2086..42463293c8c 100755 --- a/app/controllers/DatasetController.scala +++ b/app/controllers/DatasetController.scala @@ -283,7 +283,8 @@ class DatasetController @Inject()(userService: UserService, searchQuery, request.identity.map(_._id), recursive.getOrElse(false), - limitOpt = limit + limitOpt = limit, + requestingUserOrga = request.identity.map(_._organization) ) } yield Json.toJson(datasetInfos) } else { diff --git a/app/controllers/WKRemoteDataStoreController.scala b/app/controllers/WKRemoteDataStoreController.scala index 9564ac496c2..dae9325e3bf 100644 --- a/app/controllers/WKRemoteDataStoreController.scala +++ b/app/controllers/WKRemoteDataStoreController.scala @@ -110,13 +110,13 @@ class WKRemoteDataStoreController @Inject()( teamIdsPerDataset <- Fox.combined(datasets.map(dataset => teamDAO.findAllowedTeamIdsForDataset(dataset.id))) unfinishedUploads = datasets.zip(teamIdsPerDataset).map { case (d, teamIds) => - new UnfinishedUpload("", - d.dataSourceId, - d.name, - d.folderId.toString, - d.created, - None, // Filled by datastore. - teamIds.map(_.toString)) + UnfinishedUpload("", + d.dataSourceId, + d.name, + d.folderId.toString, + d.created, + None, // Filled by datastore. + teamIds.map(_.toString)) } } yield Ok(Json.toJson(unfinishedUploads)) } diff --git a/app/models/dataset/Dataset.scala b/app/models/dataset/Dataset.scala index a10dc03c31a..f4916ce5151 100755 --- a/app/models/dataset/Dataset.scala +++ b/app/models/dataset/Dataset.scala @@ -89,6 +89,7 @@ case class DatasetCompactInfo( isUnreported: Boolean, colorLayerNames: List[String], segmentationLayerNames: List[String], + usedStorageBytes: Option[Long], ) { def dataSourceId = new DataSourceId(directoryName, owningOrganization) } @@ -230,18 +231,19 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA parsed <- parseAll(r) } yield parsed - def findAllCompactWithSearch(isActiveOpt: Option[Boolean] = None, - isUnreported: Option[Boolean] = None, - organizationIdOpt: Option[String] = None, - folderIdOpt: Option[ObjectId] = None, - uploaderIdOpt: Option[ObjectId] = None, - searchQuery: Option[String] = None, - requestingUserIdOpt: Option[ObjectId] = None, - includeSubfolders: Boolean = false, - statusOpt: Option[String] = None, - createdSinceOpt: Option[Instant] = None, - limitOpt: Option[Int] = None, - )(implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] = + def findAllCompactWithSearch( + isActiveOpt: Option[Boolean] = None, + isUnreported: Option[Boolean] = None, + organizationIdOpt: Option[String] = None, + folderIdOpt: Option[ObjectId] = None, + uploaderIdOpt: Option[ObjectId] = None, + searchQuery: Option[String] = None, + requestingUserIdOpt: Option[ObjectId] = None, + includeSubfolders: Boolean = false, + statusOpt: Option[String] = None, + createdSinceOpt: Option[Instant] = None, + limitOpt: Option[Int] = None, + requestingUserOrga: Option[String] = None)(implicit ctx: DBAccessContext): Fox[List[DatasetCompactInfo]] = for { selectionPredicates <- buildSelectionPredicates(isActiveOpt, isUnreported, @@ -288,7 +290,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA d.status, d.tags, cl.names AS colorLayerNames, - sl.names AS segmentationLayerNames + sl.names AS segmentationLayerNames, + COALESCE(magStorage.storage, 0) + COALESCE(attachmentStorage.storage, 0) AS usedStorageBytes FROM (SELECT $columns FROM $existingCollectionName WHERE $selectionPredicates $limitQuery) d JOIN webknossos.organizations o @@ -301,6 +304,10 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA ON d._id = cl._dataset LEFT JOIN (SELECT _dataset, ARRAY_AGG(name ORDER BY name) AS names FROM webknossos.dataset_layers WHERE category = 'segmentation' GROUP BY _dataset) sl ON d._id = sl._dataset + LEFT JOIN (SELECT _dataset, COALESCE(SUM(usedStorageBytes), 0) AS storage FROM webknossos.organization_usedStorage_mags GROUP BY _dataset) magStorage + ON d._id = magStorage._dataset + LEFT JOIN (SELECT _dataset, COALESCE(SUM(usedStorageBytes), 0) AS storage FROM webknossos.organization_usedStorage_attachments GROUP BY _dataset) attachmentStorage + ON d._id = attachmentStorage._dataset """ rows <- run( query.as[ @@ -316,7 +323,8 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA String, String, String, - String)]) + String, + Long)]) } yield rows.toList.map( row => @@ -334,7 +342,9 @@ class DatasetDAO @Inject()(sqlClient: SqlClient, datasetLayerDAO: DatasetLayerDA tags = parseArrayLiteral(row._11), isUnreported = DataSourceStatus.unreportedStatusList.contains(row._10), colorLayerNames = parseArrayLiteral(row._12), - segmentationLayerNames = parseArrayLiteral(row._13) + segmentationLayerNames = parseArrayLiteral(row._13), + // Only include usedStorage for datasets of your own organization. + usedStorageBytes = if (requestingUserOrga.contains(row._3) && row._14 > 0) Some(row._14) else None, )) private def buildSelectionPredicates(isActiveOpt: Option[Boolean], diff --git a/app/models/dataset/DatasetService.scala b/app/models/dataset/DatasetService.scala index f028ea1119e..54588e4b90f 100644 --- a/app/models/dataset/DatasetService.scala +++ b/app/models/dataset/DatasetService.scala @@ -94,7 +94,8 @@ class DatasetService @Inject()(organizationDAO: OrganizationDAO, includeSubfolders = true, statusOpt = Some(DataSourceStatus.notYetUploaded), // Only list pending uploads since the two last weeks. - createdSinceOpt = Some(Instant.now - (14 days)) + createdSinceOpt = Some(Instant.now - (14 days)), + requestingUserOrga = Some(organizationId) ) ?~> "dataset.list.fetchFailed" def createAndSetUpDataset(datasetName: String, diff --git a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx index 252ea6fba47..8b9d2049600 100644 --- a/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx +++ b/frontend/javascripts/dashboard/advanced_dataset/dataset_table.tsx @@ -1,6 +1,13 @@ -import { FileOutlined, FolderOpenOutlined, PlusOutlined, WarningOutlined } from "@ant-design/icons"; +import { + FileOutlined, + FolderOpenOutlined, + InfoCircleOutlined, + PlusOutlined, + WarningOutlined, +} from "@ant-design/icons"; import type { DatasetUpdater } from "admin/rest_api"; import { Dropdown, type MenuProps, Tag, Tooltip } from "antd"; +import { Space } from "antd/lib"; import type { ColumnType, FilterValue, @@ -8,6 +15,7 @@ import type { TablePaginationConfig, } from "antd/lib/table/interface"; import classNames from "classnames"; +import FastTooltip from "components/fast_tooltip"; import FixedExpandableTable from "components/fixed_expandable_table"; import FormattedDate from "components/formatted_date"; import DatasetActionView, { @@ -22,7 +30,7 @@ import { useDatasetDrop, } from "dashboard/folders/folder_tree"; import { diceCoefficient as dice } from "dice-coefficient"; -import { stringToColor } from "libs/format_utils"; +import { formatCountToDataAmountUnit, stringToColor } from "libs/format_utils"; import { useWkSelector } from "libs/react_hooks"; import Shortcut from "libs/shortcut_component"; import * as Utils from "libs/utils"; @@ -287,6 +295,13 @@ class DatasetRenderer { return DatasetRenderer.getRowKey(this.data); } + renderStorageColumn(): React.ReactNode { + return this.data.usedStorageBytes != null ? ( + + {formatCountToDataAmountUnit(this.data.usedStorageBytes, true)} + + ) : null; + } renderTypeColumn(): React.ReactNode { return ; } @@ -388,6 +403,9 @@ class FolderRenderer { ); } + renderStorageColumn(): React.ReactNode { + return null; + } renderCreationDateColumn(): React.ReactNode { return null; } @@ -411,6 +429,9 @@ class DatasetTable extends React.PureComponent { // rendering). That's why it's not included in this.state (also it // would lead to infinite loops, too). currentPageData: RowRenderer[] = []; + getIsUserAdminOrDatasetManager(): boolean { + return this.props.isUserAdmin || this.props.isUserDatasetManager; + } static getDerivedStateFromProps(nextProps: Props, prevState: State): Partial { const maybeSortedInfo: { sortedInfo: SorterResult } | EmptyObject = // Clear the sorting exactly when the search box is initially filled @@ -463,7 +484,7 @@ class DatasetTable extends React.PureComponent { }); const filterByHasLayers = (datasets: APIDatasetCompact[]) => - this.props.isUserAdmin || this.props.isUserDatasetManager + this.getIsUserAdminOrDatasetManager() ? datasets : datasets.filter((dataset) => dataset.isActive); @@ -604,7 +625,6 @@ class DatasetTable extends React.PureComponent { sortOrder: sortedInfo.columnKey === "created" ? sortedInfo.order : undefined, render: (_created, rowRenderer: RowRenderer) => rowRenderer.renderCreationDateColumn(), }, - { width: 200, title: "Actions", @@ -613,6 +633,46 @@ class DatasetTable extends React.PureComponent { render: (__, rowRenderer: RowRenderer) => rowRenderer.renderActionsColumn(), }, ]; + if ( + this.getIsUserAdminOrDatasetManager() && + context.usedStorageInOrga != null && + context.usedStorageInOrga > 0 + ) { + const datasetStorageSizeColumn = { + title: ( + + Used Storage{" "} + + Storage used by this dataset within your organization. It may be zero because: +
    +
  • The storage hasn't been scanned yet.
  • +
  • The data is streamed from outside sources.
  • +
  • It’s counted in other datasets.
  • +
  • The dataset belongs to another organization.
  • +
+ + } + > + +
{" "} +
+ ), + key: "storage", + width: 200, + render: (_: any, rowRenderer: RowRenderer) => { + return isRecordADataset(rowRenderer.data) ? rowRenderer.renderStorageColumn() : null; + }, + sorter: Utils.compareBy((rowRenderer) => + isRecordADataset(rowRenderer.data) && rowRenderer.data.usedStorageBytes + ? rowRenderer.data.usedStorageBytes + : 0, + ), + sortOrder: sortedInfo.columnKey === "storage" ? sortedInfo.order : undefined, + }; + columns.splice(2, 0, datasetStorageSizeColumn); + } return ( diff --git a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx index 1442276b46d..353a9589980 100644 --- a/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx +++ b/frontend/javascripts/dashboard/dataset/dataset_collection_context.tsx @@ -1,6 +1,6 @@ import { useIsMutating } from "@tanstack/react-query"; import { type DatasetUpdater, getDatastores, triggerDatasetCheck } from "admin/rest_api"; -import { useEffectOnlyOnce, usePrevious } from "libs/react_hooks"; +import { useEffectOnlyOnce, usePrevious, useWkSelector } from "libs/react_hooks"; import UserLocalStorage from "libs/user_local_storage"; import _ from "lodash"; import type React from "react"; @@ -55,6 +55,7 @@ export type DatasetCollectionContextValue = { deleteFolderMutation: ReturnType; updateDatasetMutation: ReturnType; }; + usedStorageInOrga: number | undefined; }; export const DatasetCollectionContext = createContext( @@ -84,6 +85,7 @@ export default function DatasetCollectionContextProvider({ const [isChecking, setIsChecking] = useState(false); const isMutating = useIsMutating() > 0; const { data: folder, isError: didFolderLoadingError } = useFolderQuery(activeFolderId); + const usedStorageInOrga = useWkSelector((state) => state.activeOrganization?.usedStorageBytes); const [selectedDatasets, setSelectedDatasets] = useState([]); const [selectedFolder, setSelectedFolder] = useState(null); @@ -254,6 +256,7 @@ export default function DatasetCollectionContextProvider({ moveFolderMutation, updateDatasetMutation, }, + usedStorageInOrga, }), [ isChecking, @@ -279,6 +282,7 @@ export default function DatasetCollectionContextProvider({ getBreadcrumbs, selectedFolder, setGlobalSearchQuery, + usedStorageInOrga, ], ); diff --git a/frontend/javascripts/dashboard/folders/details_sidebar.tsx b/frontend/javascripts/dashboard/folders/details_sidebar.tsx index 31b445261be..5efa0fa32a9 100644 --- a/frontend/javascripts/dashboard/folders/details_sidebar.tsx +++ b/frontend/javascripts/dashboard/folders/details_sidebar.tsx @@ -204,7 +204,10 @@ function DatasetDetails({ selectedDataset }: { selectedDataset: APIDatasetCompac {fullDataset?.usedStorageBytes && fullDataset.usedStorageBytes > 10000 ? (
Used Storage
- +
{formatCountToDataAmountUnit(fullDataset.usedStorageBytes, true)}
diff --git a/frontend/javascripts/types/api_types.ts b/frontend/javascripts/types/api_types.ts index 20f958467c8..253c21e804b 100644 --- a/frontend/javascripts/types/api_types.ts +++ b/frontend/javascripts/types/api_types.ts @@ -272,6 +272,7 @@ export type APIDatasetCompact = APIDatasetCompactWithoutStatusAndLayerNames & { status: MutableAPIDataSourceBase["status"]; colorLayerNames: Array; segmentationLayerNames: Array; + usedStorageBytes?: number | null; }; export function convertDatasetToCompact(dataset: APIDataset): APIDatasetCompact { @@ -295,6 +296,7 @@ export function convertDatasetToCompact(dataset: APIDataset): APIDatasetCompact isUnreported: dataset.isUnreported, colorLayerNames: colorLayerNames, segmentationLayerNames: segmentationLayerNames, + usedStorageBytes: dataset.usedStorageBytes, }; } diff --git a/package.json b/package.json index a1c5670d965..01baf383ce4 100644 --- a/package.json +++ b/package.json @@ -104,6 +104,7 @@ "licenses-backend": "sbt dumpLicenseReport", "docs": "node_modules/.bin/documentation build --shallow frontend/javascripts/viewer/api/api_loader.ts frontend/javascripts/viewer/api/api_latest.ts --github --project-name \"WEBKNOSSOS Frontend API\" --format html --output public/docs/frontend-api", "refresh-schema": "./tools/postgres/dbtool.js refresh-schema && rm -f target/scala-2.13/src_managed/schema/com/scalableminds/webknossos/schema/Tables.scala", + "enable-storage-scan": "./tools/postgres/dbtool.js enable-storage-scan", "enable-jobs": "sed -i -e 's/jobsEnabled = false/jobsEnabled = true/g' ./conf/application.conf; sed -i -e 's/voxelyticsEnabled = false/voxelyticsEnabled = true/g' ./conf/application.conf; ./tools/postgres/dbtool.js enable-jobs", "disable-jobs": "sed -i -e 's/jobsEnabled = true/jobsEnabled = false/g' ./conf/application.conf; sed -i -e 's/voxelyticsEnabled = true/voxelyticsEnabled = false/g' ./conf/application.conf; ./tools/postgres/dbtool.js disable-jobs", "insert-local-datastore": "./tools/postgres/dbtool.js insert-local-datastore", diff --git a/tools/postgres/dbtool.js b/tools/postgres/dbtool.js index a8a125295b5..cfb7651afe7 100755 --- a/tools/postgres/dbtool.js +++ b/tools/postgres/dbtool.js @@ -420,6 +420,24 @@ program console.log("✨✨ Done"); }); +program + .command("enable-storage-scan") + .description("Activates dataset storage scan in WEBKNOSSOS for the default datastore.") + .action(() => { + console.log("Activating dataset storage scan in WEBKNOSSOS for the default datastore..."); + console.log( + callPsql( + `UPDATE webknossos.datastores SET reportUsedStorageEnabled = TRUE WHERE name = 'localhost'`, + ), + ); + console.log( + callPsql( + `UPDATE webknossos.organizations SET lastStorageScanTime = '1970-01-01T00:00:00.000Z' WHERE _id = 'sample_organization'`, + ), + ); + console.log("✨✨ Done"); + }); + program .command("dump-schema ") .description("Dumps current schema into a folder") diff --git a/unreleased_changes/9025.md b/unreleased_changes/9025.md new file mode 100644 index 00000000000..f9d8849603c --- /dev/null +++ b/unreleased_changes/9025.md @@ -0,0 +1,2 @@ +### Added +- Display used storage for each dataset in the dashboard's dataset table.