diff --git a/.gitignore b/.gitignore index 07243e00f7..d83cb0a983 100644 --- a/.gitignore +++ b/.gitignore @@ -26,3 +26,6 @@ tsconfig.tsbuildinfo -/docs/python/api/index.rst /docs/python/api/*.rst /.vite +/.idea +/.local +/.env diff --git a/package.json b/package.json index 2c7b4c2cf8..2e0e26be03 100644 --- a/package.json +++ b/package.json @@ -485,6 +485,37 @@ "neuroglancer/kvstore/s3:disabled": "./src/util/false.ts", "default": "./src/kvstore/s3/register_backend.ts" }, + "#kvstore/opfs/register_frontend": { + "neuroglancer/kvstore/opfs:enabled": "./src/kvstore/opfs/register_frontend.ts", + "neuroglancer/kvstore:none_by_default": "./src/util/false.ts", + "neuroglancer/kvstore/opfs:disabled": "./src/util/false.ts", + "default": "./src/kvstore/opfs/register_frontend.ts" + }, + "#kvstore/opfs/register_backend": { + "neuroglancer/kvstore/opfs:enabled": "./src/kvstore/opfs/register_backend.ts", + "neuroglancer/kvstore:none_by_default": "./src/util/false.ts", + "neuroglancer/kvstore/opfs:disabled": "./src/util/false.ts", + "default": "./src/kvstore/opfs/register_backend.ts" + }, + "#kvstore/ssa_s3/register_credentials_provider": { + "neuroglancer/python": "./src/util/false.ts", + "neuroglancer/kvstore/ssa_s3:enabled": "./src/kvstore/ssa_s3/register_credentials_provider.ts", + "neuroglancer/kvstore:none_by_default": "./src/util/false.ts", + "neuroglancer/kvstore/ssa_s3:disabled": "./src/util/false.ts", + "default": "./src/kvstore/ssa_s3/register_credentials_provider.ts" + }, + "#kvstore/ssa_s3/register_frontend": { + "neuroglancer/kvstore/ssa_s3:enabled": "./src/kvstore/ssa_s3/register_frontend.ts", + "neuroglancer/kvstore:none_by_default": "./src/util/false.ts", + "neuroglancer/kvstore/ssa_s3:disabled": "./src/util/false.ts", + "default": "./src/kvstore/ssa_s3/register_frontend.ts" + }, + "#kvstore/ssa_s3/register_backend": { + "neuroglancer/kvstore/ssa_s3:enabled": "./src/kvstore/ssa_s3/register_backend.ts", + "neuroglancer/kvstore:none_by_default": "./src/util/false.ts", + "neuroglancer/kvstore/ssa_s3:disabled": "./src/util/false.ts", + "default": "./src/kvstore/ssa_s3/register_backend.ts" + }, "#kvstore/zip/register_frontend": { "neuroglancer/kvstore/zip:enabled": "./src/kvstore/zip/register_frontend.ts", "neuroglancer/kvstore:none_by_default": "./src/util/false.ts", diff --git a/src/async_computation/encode_blosc.ts b/src/async_computation/encode_blosc.ts new file mode 100644 index 0000000000..d48ddc1526 --- /dev/null +++ b/src/async_computation/encode_blosc.ts @@ -0,0 +1,9 @@ +import { encodeBlosc } from "#src/async_computation/encode_blosc_request.js"; +import { registerAsyncComputation } from "#src/async_computation/handler.js"; + +registerAsyncComputation(encodeBlosc, async (data, config) => { + const { default: Blosc } = await import("numcodecs/blosc"); + const codec = Blosc.fromConfig({ id: "blosc", ...config }); + const result = await codec.encode(data); + return { value: result, transfer: [result.buffer] }; +}); diff --git a/src/async_computation/encode_blosc_request.ts b/src/async_computation/encode_blosc_request.ts new file mode 100644 index 0000000000..8ac946ebc3 --- /dev/null +++ b/src/async_computation/encode_blosc_request.ts @@ -0,0 +1,6 @@ +import { asyncComputation } from "#src/async_computation/index.js"; + +export const encodeBlosc = + asyncComputation<(data: Uint8Array, config: any) => Uint8Array>( + "encodeBlosc", + ); diff --git a/src/chunk_manager/backend.ts b/src/chunk_manager/backend.ts index df56bb02bb..e5c4c5d68c 100644 --- a/src/chunk_manager/backend.ts +++ b/src/chunk_manager/backend.ts @@ -20,6 +20,7 @@ import type { LayerChunkProgressInfo, } from "#src/chunk_manager/base.js"; import { + CHUNK_SOURCE_INVALIDATE_CHUNKS_RPC_ID, CHUNK_LAYER_STATISTICS_RPC_ID, CHUNK_MANAGER_RPC_ID, CHUNK_QUEUE_MANAGER_RPC_ID, @@ -1110,8 +1111,10 @@ export class ChunkQueueManager extends SharedObjectCounterpart { } } - invalidateSourceCache(source: ChunkSource) { - for (const chunk of source.chunks.values()) { + invalidateCachedChunks(source: ChunkSource, keys: string[]) { + for (const key of keys) { + const chunk = source.chunks.get(key); + if (!chunk) continue; switch (chunk.state) { case ChunkState.DOWNLOADING: cancelChunkDownload(chunk); @@ -1123,6 +1126,10 @@ export class ChunkQueueManager extends SharedObjectCounterpart { // Note: After calling this, chunk may no longer be valid. this.updateChunkState(chunk, ChunkState.QUEUED); } + } + + invalidateSourceCache(source: ChunkSource) { + this.invalidateCachedChunks(source, [...source.chunks.keys()]); this.rpc!.invoke("Chunk.update", { source: source.rpcId }); this.scheduleUpdate(); } @@ -1378,6 +1385,12 @@ registerRPC(CHUNK_SOURCE_INVALIDATE_RPC_ID, function (x) { source.chunkManager.queueManager.invalidateSourceCache(source); }); +registerRPC(CHUNK_SOURCE_INVALIDATE_CHUNKS_RPC_ID, function (x) { + const source = this.get(x.id); + source.chunkManager.queueManager.invalidateCachedChunks(source, x.keys); + source.chunkManager.queueManager.scheduleUpdate(); +}); + registerPromiseRPC( REQUEST_CHUNK_STATISTICS_RPC_ID, function (x: { queue: number }) { diff --git a/src/chunk_manager/base.ts b/src/chunk_manager/base.ts index 81b3c912ab..f897fe7d56 100644 --- a/src/chunk_manager/base.ts +++ b/src/chunk_manager/base.ts @@ -97,6 +97,8 @@ export const PREFETCH_PRIORITY_MULTIPLIER = 1e13; export const CHUNK_QUEUE_MANAGER_RPC_ID = "ChunkQueueManager"; export const CHUNK_MANAGER_RPC_ID = "ChunkManager"; export const CHUNK_SOURCE_INVALIDATE_RPC_ID = "ChunkSource.invalidate"; +export const CHUNK_SOURCE_INVALIDATE_CHUNKS_RPC_ID = + "ChunkSource.invalidateChunks"; export const REQUEST_CHUNK_STATISTICS_RPC_ID = "ChunkQueueManager.requestChunkStatistics"; diff --git a/src/chunk_manager/frontend.ts b/src/chunk_manager/frontend.ts index 3df76ce075..890475efbc 100644 --- a/src/chunk_manager/frontend.ts +++ b/src/chunk_manager/frontend.ts @@ -19,6 +19,7 @@ import type { LayerChunkProgressInfo, } from "#src/chunk_manager/base.js"; import { + CHUNK_SOURCE_INVALIDATE_CHUNKS_RPC_ID, CHUNK_LAYER_STATISTICS_RPC_ID, CHUNK_MANAGER_RPC_ID, CHUNK_QUEUE_MANAGER_RPC_ID, @@ -463,6 +464,26 @@ export class ChunkSource extends SharedObject { this.chunks.delete(key); } + invalidateChunks(keys: string[]): void { + const validKeys: string[] = []; + for (const key of keys) { + const chunk = this.chunks.get(key); + if (chunk) { + validKeys.push(key); + this.deleteChunk(key); + } + } + + if (validKeys.length > 0) { + this.rpc!.invoke(CHUNK_SOURCE_INVALIDATE_CHUNKS_RPC_ID, { + id: this.rpcId, + keys: validKeys, + }); + + this.chunkManager.chunkQueueManager.visibleChunksChanged.dispatch(); + } + } + addChunk(key: string, chunk: Chunk) { this.chunks.set(key, chunk); } diff --git a/src/chunk_worker.bundle.js b/src/chunk_worker.bundle.js index a463171535..4c1fa3e86c 100644 --- a/src/chunk_worker.bundle.js +++ b/src/chunk_worker.bundle.js @@ -12,3 +12,4 @@ import "#src/annotation/backend.js"; import "#src/datasource/enabled_backend_modules.js"; import "#src/kvstore/enabled_backend_modules.js"; import "#src/worker_rpc_context.js"; +import "#src/voxel_annotation/edit_backend.js"; diff --git a/src/datasource/index.ts b/src/datasource/index.ts index 38d2332bc5..fb43634634 100644 --- a/src/datasource/index.ts +++ b/src/datasource/index.ts @@ -54,10 +54,12 @@ import { emptyCompletionResult, getPrefixMatchesWithDescriptions, } from "#src/util/completion.js"; +import type { DataType } from "#src/util/data_type.js"; import { RefCounted } from "#src/util/disposable.js"; import type { vec3 } from "#src/util/geom.js"; import { type ProgressOptions } from "#src/util/progress_listener.js"; import type { Trackable } from "#src/util/trackable.js"; +import { CompoundTrackable } from "#src/util/trackable.js"; export type CompletionResult = BasicCompletionResult; @@ -132,6 +134,7 @@ export interface DataSubsource { singleMesh?: SingleMeshSource; segmentPropertyMap?: SegmentPropertyMap; segmentationGraph?: SegmentationGraphSource; + isPotentiallyWritable?: boolean; } export interface CompleteUrlOptionsBase extends Partial { @@ -216,6 +219,7 @@ export interface DataSourceWithRedirectInfo extends DataSource { export interface DataSubsourceSpecification { enabled?: boolean; + writable?: boolean; } export interface DataSourceSpecification { @@ -238,6 +242,25 @@ export function makeEmptyDataSourceSpecification(): DataSourceSpecification { }; } +export interface CommonCreationMetadata { + shape: number[]; + dataType: DataType; + voxelSize: number[]; + voxelUnit: string; + numScales: number; + downsamplingFactor: number[]; + name: string; +} +export abstract class DataSourceCreationState extends CompoundTrackable {} +export interface CreateDataSourceOptions { + kvStoreUrl: string; + registry: DataSourceRegistry; + metadata: { + common: CommonCreationMetadata; + sourceRelated?: DataSourceCreationState; + }; +} + export interface DataSourceProvider { scheme: string; description?: string; @@ -262,6 +285,8 @@ export interface KvStoreBasedDataSourceProvider { completeUrl?: ( options: GetKvStoreBasedDataSourceOptions, ) => Promise; + create?(options: CreateDataSourceOptions): Promise; + creationState?: CompoundTrackable; } export interface GetKvStoreBasedDataSourceOptions @@ -296,6 +321,11 @@ export class DataSourceRegistry extends RefCounted { registerKvStoreBasedProvider(provider: KvStoreBasedDataSourceProvider) { this.kvStoreBasedDataSources.set(provider.scheme, provider); } + getKvStoreBasedProvider( + scheme: string, + ): KvStoreBasedDataSourceProvider | undefined { + return this.kvStoreBasedDataSources.get(scheme); + } getProvider(url: string): [DataSourceProvider, string, string] { const m = url.match(schemePattern); diff --git a/src/datasource/zarr/async_computation.ts b/src/datasource/zarr/async_computation.ts index db56b860ff..678e1d6bfc 100644 --- a/src/datasource/zarr/async_computation.ts +++ b/src/datasource/zarr/async_computation.ts @@ -1,2 +1,3 @@ import "#src/async_computation/decode_blosc.js"; import "#src/async_computation/decode_zstd.js"; +import "#src/async_computation/encode_blosc.js"; diff --git a/src/datasource/zarr/backend.ts b/src/datasource/zarr/backend.ts index 7370f7af0e..ddb3985dc7 100644 --- a/src/datasource/zarr/backend.ts +++ b/src/datasource/zarr/backend.ts @@ -19,6 +19,10 @@ import "#src/datasource/zarr/codec/zstd/decode.js"; import "#src/datasource/zarr/codec/bytes/decode.js"; import "#src/datasource/zarr/codec/crc32c/decode.js"; +import "#src/datasource/zarr/codec/bytes/encode.js"; +import "#src/datasource/zarr/codec/gzip/encode.js"; +import "#src/datasource/zarr/codec/blosc/encode.js"; + import { WithParameters } from "#src/chunk_manager/backend.js"; import { VolumeChunkSourceParameters } from "#src/datasource/zarr/base.js"; import { @@ -28,11 +32,15 @@ import { import "#src/datasource/zarr/codec/gzip/decode.js"; import "#src/datasource/zarr/codec/sharding_indexed/decode.js"; import "#src/datasource/zarr/codec/transpose/decode.js"; +import { encodeArray } from "#src/datasource/zarr/codec/encode.js"; import { ChunkKeyEncoding } from "#src/datasource/zarr/metadata/index.js"; import { WithSharedKvStoreContextCounterpart } from "#src/kvstore/backend.js"; import { postProcessRawData } from "#src/sliceview/backend_chunk_decoders/postprocess.js"; +import { decodeChannel as decodeChannelUint32 } from "#src/sliceview/compressed_segmentation/decode_uint32.js"; +import { decodeChannel as decodeChannelUint64 } from "#src/sliceview/compressed_segmentation/decode_uint64.js"; import type { VolumeChunk } from "#src/sliceview/volume/backend.js"; import { VolumeChunkSource } from "#src/sliceview/volume/backend.js"; +import { DataType } from "#src/util/data_type.js"; import { registerSharedObject } from "#src/worker_rpc.js"; @registerSharedObject() @@ -97,4 +105,102 @@ export class ZarrVolumeChunkSource extends WithParameters( await postProcessRawData(chunk, signal, decoded); } } + + async writeChunk(chunk: VolumeChunk): Promise { + const { kvStore, getChunkKey, decodeCodecs } = this.chunkKvStore; + if (!kvStore.write) { + throw new Error( + "ZarrVolumeChunkSource.writeChunk: underlying kvStore is not writable", + ); + } + if (!chunk.data) { + throw new Error("ZarrVolumeChunkSource.writeChunk: missing chunk.data"); + } + let dataToWrite = chunk.data; + + const { compressedSegmentationBlockSize } = this.spec; + if (compressedSegmentationBlockSize !== undefined) { + const compressedData = chunk.data as Uint32Array; + const { chunkDataSize } = chunk; + if (!chunkDataSize) { + throw new Error("Cannot write chunk with unknown size."); + } + const numElements = + chunkDataSize[0] * chunkDataSize[1] * chunkDataSize[2]; + const { dataType } = this.spec; + const baseOffset = compressedData.length > 0 ? compressedData[0] : 0; + + if (dataType === DataType.UINT32) { + const uncompressedData = new Uint32Array(numElements); + if (baseOffset !== 0) { + decodeChannelUint32( + uncompressedData, + compressedData, + baseOffset, + chunkDataSize, + compressedSegmentationBlockSize, + ); + } + dataToWrite = uncompressedData; + } else { + const uncompressedData = new BigUint64Array(numElements); + if (baseOffset !== 0) { + decodeChannelUint64( + uncompressedData, + compressedData, + baseOffset, + chunkDataSize, + compressedSegmentationBlockSize, + ); + } + dataToWrite = uncompressedData; + } + } + + const encoded = await encodeArray( + decodeCodecs, + dataToWrite as ArrayBufferView, + new AbortController().signal, + ); + + const { parameters } = this; + const { chunkGridPosition } = chunk; + const { metadata } = parameters; + let baseKey = ""; + const rank = this.spec.rank; + const { physicalToLogicalDimension } = metadata.codecs.layoutInfo[0]; + let sep: string; + if (metadata.chunkKeyEncoding === ChunkKeyEncoding.DEFAULT) { + baseKey += "c"; + sep = metadata.dimensionSeparator; + } else { + sep = ""; + if (rank === 0) { + baseKey += "0"; + } + } + const keyCoords = new Array(rank); + const { readChunkShape } = metadata.codecs.layoutInfo[0]; + const { chunkShape } = metadata; + for ( + let fOrderPhysicalDim = 0; + fOrderPhysicalDim < rank; + ++fOrderPhysicalDim + ) { + const decodedDim = + physicalToLogicalDimension[rank - 1 - fOrderPhysicalDim]; + keyCoords[decodedDim] = Math.floor( + (chunkGridPosition[fOrderPhysicalDim] * readChunkShape[decodedDim]) / + chunkShape[decodedDim], + ); + } + for (let i = 0; i < rank; ++i) { + baseKey += `${sep}${keyCoords[i]}`; + sep = metadata.dimensionSeparator; + } + + const key = getChunkKey(chunkGridPosition, baseKey); + const arrayBuffer = new Uint8Array(encoded).buffer; + await kvStore.write!(key, arrayBuffer); + } } diff --git a/src/datasource/zarr/codec/blosc/encode.ts b/src/datasource/zarr/codec/blosc/encode.ts new file mode 100644 index 0000000000..97ed9f2791 --- /dev/null +++ b/src/datasource/zarr/codec/blosc/encode.ts @@ -0,0 +1,23 @@ +import { encodeBlosc } from "#src/async_computation/encode_blosc_request.js"; +import { requestAsyncComputation } from "#src/async_computation/request.js"; +import type { Configuration } from "#src/datasource/zarr/codec/blosc/resolve.js"; +import { registerCodec } from "#src/datasource/zarr/codec/encode.js"; +import { CodecKind } from "#src/datasource/zarr/codec/index.js"; + +registerCodec({ + name: "blosc", + kind: CodecKind.bytesToBytes, + encode( + configuration: Configuration, + decoded: Uint8Array, + signal: AbortSignal, + ): Promise { + return requestAsyncComputation( + encodeBlosc, + signal, + [decoded.buffer], + decoded, + configuration, + ); + }, +}); diff --git a/src/datasource/zarr/codec/bytes/encode.ts b/src/datasource/zarr/codec/bytes/encode.ts new file mode 100644 index 0000000000..dca2801f98 --- /dev/null +++ b/src/datasource/zarr/codec/bytes/encode.ts @@ -0,0 +1,26 @@ +import type { Configuration } from "#src/datasource/zarr/codec/bytes/resolve.js"; +import { registerCodec } from "#src/datasource/zarr/codec/encode.js"; +import { + type CodecArrayInfo, + CodecKind, +} from "#src/datasource/zarr/codec/index.js"; +import { DATA_TYPE_BYTES } from "#src/sliceview/base.js"; +import { convertEndian } from "#src/util/endian.js"; + +registerCodec({ + name: "bytes", + kind: CodecKind.arrayToBytes, + async encode( + configuration: Configuration, + encodedArrayInfo: CodecArrayInfo, + decoded: ArrayBufferView, + ): Promise { + const bytesPerElement = DATA_TYPE_BYTES[encodedArrayInfo.dataType]; + convertEndian(decoded, configuration.endian, bytesPerElement); + return new Uint8Array( + decoded.buffer, + decoded.byteOffset, + decoded.byteLength, + ); + }, +}); diff --git a/src/datasource/zarr/codec/decode.ts b/src/datasource/zarr/codec/decode.ts index 2cc3ea9184..519be93a15 100644 --- a/src/datasource/zarr/codec/decode.ts +++ b/src/datasource/zarr/codec/decode.ts @@ -18,16 +18,16 @@ import type { ChunkManager } from "#src/chunk_manager/backend.js"; import type { CodecArrayInfo, CodecChainSpec, + Codec, } from "#src/datasource/zarr/codec/index.js"; import { CodecKind } from "#src/datasource/zarr/codec/index.js"; -import type { KvStoreWithPath, ReadableKvStore } from "#src/kvstore/index.js"; +import type { + KvStore, + KvStoreWithPath, + ReadableKvStore, +} from "#src/kvstore/index.js"; import type { RefCounted } from "#src/util/disposable.js"; -export interface Codec { - name: string; - kind: CodecKind; -} - export interface ArrayToArrayCodec extends Codec { kind: CodecKind.arrayToArray; decode( @@ -145,14 +145,14 @@ export function applySharding( codecs: CodecChainSpec, baseKvStore: KvStoreWithPath, ): { - kvStore: ReadableKvStore; + kvStore: KvStore; getChunkKey: ( chunkGridPosition: ArrayLike, baseKey: string, ) => unknown; decodeCodecs: CodecChainSpec; } { - let kvStore: ReadableKvStore = baseKvStore.store; + let kvStore: KvStore = baseKvStore.store; let curCodecs = codecs; while (true) { const { shardingInfo } = curCodecs; diff --git a/src/datasource/zarr/codec/encode.ts b/src/datasource/zarr/codec/encode.ts new file mode 100644 index 0000000000..f2ec602c87 --- /dev/null +++ b/src/datasource/zarr/codec/encode.ts @@ -0,0 +1,77 @@ +import type { + CodecChainSpec, + Codec, + CodecArrayInfo, +} from "#src/datasource/zarr/codec/index.js"; +import { CodecKind } from "#src/datasource/zarr/codec/index.js"; + +interface ArrayToBytesCodec extends Codec { + kind: CodecKind.arrayToBytes; + encode( + configuration: Configuration, + encodedArrayInfo: CodecArrayInfo, + decoded: ArrayBufferView, + signal: AbortSignal, + ): Promise; +} + +interface BytesToBytesCodec extends Codec { + kind: CodecKind.bytesToBytes; + encode( + configuration: Configuration, + decoded: Uint8Array, + signal: AbortSignal, + ): Promise; +} + +const codecRegistry = { + [CodecKind.arrayToBytes]: new Map(), + [CodecKind.bytesToBytes]: new Map(), +}; + +export function registerCodec( + codec: ArrayToBytesCodec | BytesToBytesCodec, +) { + codecRegistry[codec.kind].set(codec.name, codec as any); +} + +export async function encodeArray( + codecs: CodecChainSpec, + decoded: ArrayBufferView, + signal: AbortSignal, +): Promise { + if (codecs[CodecKind.arrayToArray].length > 0) { + throw new Error("array -> array codecs are not supported for writing."); + } + + const arrayToBytesCodecSpec = codecs[CodecKind.arrayToBytes]; + const arrayToBytesImpl = codecRegistry[CodecKind.arrayToBytes].get( + arrayToBytesCodecSpec.name, + ); + if (!arrayToBytesImpl) { + throw new Error( + `Unsupported array -> bytes codec for writing: ${arrayToBytesCodecSpec.name}`, + ); + } + const arrayInfo = codecs.arrayInfo[codecs.arrayInfo.length - 1]; + let data = await arrayToBytesImpl.encode( + arrayToBytesCodecSpec.configuration, + arrayInfo, + decoded, + signal, + ); + + for (const codecSpec of codecs[CodecKind.bytesToBytes]) { + const bytesToBytesImpl = codecRegistry[CodecKind.bytesToBytes].get( + codecSpec.name, + ); + if (!bytesToBytesImpl) { + throw new Error( + `Unsupported bytes -> bytes codec for writing: ${codecSpec.name}`, + ); + } + data = await bytesToBytesImpl.encode(codecSpec.configuration, data, signal); + } + + return data; +} diff --git a/src/datasource/zarr/codec/gzip/encode.ts b/src/datasource/zarr/codec/gzip/encode.ts new file mode 100644 index 0000000000..aa21ea4d86 --- /dev/null +++ b/src/datasource/zarr/codec/gzip/encode.ts @@ -0,0 +1,22 @@ +import { registerCodec } from "#src/datasource/zarr/codec/encode.js"; +import type { Configuration } from "#src/datasource/zarr/codec/gzip/resolve.js"; +import { CodecKind } from "#src/datasource/zarr/codec/index.js"; +import { encodeGzip } from "#src/util/gzip.js"; + +for (const [name, compressionFormat] of [ + ["gzip", "gzip"], + ["zlib", "deflate"], +] as const) { + registerCodec({ + name, + kind: CodecKind.bytesToBytes, + async encode( + configuration: Configuration, + decoded: Uint8Array, + ): Promise { + configuration; + const result = await encodeGzip(decoded, compressionFormat); + return new Uint8Array(result); + }, + }); +} diff --git a/src/datasource/zarr/codec/index.ts b/src/datasource/zarr/codec/index.ts index bb99f48ebb..999a95bef1 100644 --- a/src/datasource/zarr/codec/index.ts +++ b/src/datasource/zarr/codec/index.ts @@ -16,6 +16,11 @@ import type { DataType } from "#src/util/data_type.js"; +export interface Codec { + name: string; + kind: CodecKind; +} + export enum CodecKind { arrayToArray = 0, arrayToBytes = 1, diff --git a/src/datasource/zarr/frontend.ts b/src/datasource/zarr/frontend.ts index c501eebec5..cce22ee5e4 100644 --- a/src/datasource/zarr/frontend.ts +++ b/src/datasource/zarr/frontend.ts @@ -27,6 +27,7 @@ import { } from "#src/coordinate_transform.js"; import type { ChannelMetadata, + CreateDataSourceOptions, DataSource, GetKvStoreBasedDataSourceOptions, KvStoreBasedDataSourceProvider, @@ -38,6 +39,10 @@ import "#src/datasource/zarr/codec/crc32c/resolve.js"; import "#src/datasource/zarr/codec/gzip/resolve.js"; import "#src/datasource/zarr/codec/sharding_indexed/resolve.js"; import "#src/datasource/zarr/codec/transpose/resolve.js"; +import { + getZarrCreator, + ZarrCreationState, +} from "#src/datasource/zarr/metadata/creation.js"; import type { ArrayMetadata, DimensionSeparator, @@ -485,6 +490,11 @@ export class ZarrDataSource implements KvStoreBasedDataSourceProvider { this.zarrVersion === undefined ? "" : ` v${this.zarrVersion}`; return `Zarr${versionStr} data source`; } + + get creationState() { + return this.zarrVersion ? new ZarrCreationState() : undefined; + } + get(options: GetKvStoreBasedDataSourceOptions): Promise { let { kvStoreUrl, additionalPath, fragment } = resolveUrl(options); kvStoreUrl = kvstoreEnsureDirectoryPipelineUrl( @@ -556,7 +566,7 @@ export class ZarrDataSource implements KvStoreBasedDataSourceProvider { id: "default", default: true, url: undefined, - subsource: { volume }, + subsource: { volume, isPotentiallyWritable: true }, }, { id: "bounds", @@ -597,6 +607,10 @@ export class ZarrDataSource implements KvStoreBasedDataSourceProvider { ), ); } + async create(options: CreateDataSourceOptions): Promise { + const creator = getZarrCreator(this.zarrVersion); + await creator.create(options); + } } export function registerAutoDetectV2(registry: AutoDetectRegistry) { diff --git a/src/datasource/zarr/metadata/creation.ts b/src/datasource/zarr/metadata/creation.ts new file mode 100644 index 0000000000..5c7fe041de --- /dev/null +++ b/src/datasource/zarr/metadata/creation.ts @@ -0,0 +1,373 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { + CreateDataSourceOptions, + CommonCreationMetadata, +} from "#src/datasource/index.js"; +import { DataSourceCreationState } from "#src/datasource/index.js"; +import { proxyWrite } from "#src/kvstore/proxy.js"; +import { joinPath } from "#src/kvstore/url.js"; +import { DataType } from "#src/util/data_type.js"; + +import { TrackableEnum } from "#src/util/trackable_enum.js"; + +export enum ZarrCompression { + RAW = 0, + GZIP = 1, + BLOSC = 2, +} + +export class ZarrCreationState extends DataSourceCreationState { + compression = new TrackableEnum( + ZarrCompression, + ZarrCompression.RAW, + ); + + constructor() { + super(); + this.add("compression", this.compression); + } +} + +const zarrUnitMapping: { [key: string]: string } = { + nm: "nanometer", + um: "micrometer", + mm: "millimeter", + cm: "centimeter", + m: "meter", + s: "second", + ms: "millisecond", + us: "microsecond", + ns: "nanosecond", +}; + +const dataTypeToZarrV2Dtype: { [key in DataType]?: string } = { + [DataType.UINT8]: "|u1", + [DataType.UINT16]: "; +} + +class ZarrV2Creator implements ZarrCreator { + async create(options: CreateDataSourceOptions): Promise { + const { kvStoreUrl, registry, metadata } = options; + const { sharedKvStoreContext } = registry; + const kvStore = sharedKvStoreContext.kvStoreContext.getKvStore(kvStoreUrl); + + const zgroupContent = JSON.stringify({ zarr_format: 2 }); + const writeZgroupPromise = proxyWrite( + sharedKvStoreContext, + kvStore.store.getUrl(joinPath(kvStore.path, ".zgroup")), + new TextEncoder().encode(zgroupContent).buffer as ArrayBuffer, + ); + + const commonMetadata = metadata.common as CommonCreationMetadata; + const zarrMetadata = metadata.sourceRelated as ZarrCreationState; + + const scales = []; + for (let i = 0; i < commonMetadata.numScales; ++i) { + const downsampleCoeffs = commonMetadata.downsamplingFactor.map( + (f: number) => Math.pow(f, i), + ); + scales.push({ + shape: commonMetadata.shape.map((dim: number, j: number) => + Math.ceil(dim / downsampleCoeffs[j]), + ), + chunks: [64, 64, 64], + dtype: dataTypeToZarrV2Dtype[commonMetadata.dataType], + compressor: this._buildV2ZarrayCompressorMetadata(zarrMetadata), + transform: commonMetadata.voxelSize.map( + (v: number, j: number) => v * downsampleCoeffs[j], + ), + }); + } + + const zattrsContent = this._buildV2OmeZattrs(commonMetadata, scales); + const writeZattrsPromise = proxyWrite( + sharedKvStoreContext, + kvStore.store.getUrl(joinPath(kvStore.path, ".zattrs")), + new TextEncoder().encode(zattrsContent).buffer as ArrayBuffer, + ); + + const writeZarrayPromises = scales.map((scale: any, i: number) => { + const zarrayUrl = kvStore.store.getUrl( + joinPath(kvStore.path, `s${i}`, ".zarray"), + ); + const zarrayContent = this._buildV2Zarray(scale); + return proxyWrite( + sharedKvStoreContext, + zarrayUrl, + new TextEncoder().encode(zarrayContent).buffer as ArrayBuffer, + ); + }); + + await Promise.all([ + writeZgroupPromise, + writeZattrsPromise, + ...writeZarrayPromises, + ]); + } + + private _buildV2OmeZattrs( + common: CommonCreationMetadata, + scales: any[], + ): string { + const fullVoxelUnit = zarrUnitMapping[common.voxelUnit] ?? common.voxelUnit; + const rank = common.shape.length; + const defaultAxes = ["x", "y", "z", "c", "t"]; + const axes = Array.from({ length: rank }, (_, i) => ({ + name: defaultAxes[i] || `dim_${i}`, + type: "space", + unit: fullVoxelUnit, + })); + + const datasets = scales.map((scale, i) => ({ + path: `s${i}`, + coordinateTransformations: [ + { + type: "scale", + scale: scale.transform, + }, + ], + })); + + const omeMetadata = { + multiscales: [ + { + version: "0.4", + axes, + datasets, + name: common.name || "default", + type: "unknown", + metadata: null, + }, + ], + }; + return JSON.stringify(omeMetadata, null, 2); + } + + private _buildV2ZarrayCompressorMetadata( + zarrState: ZarrCreationState, + ): object | null { + switch (zarrState.compression.value) { + case ZarrCompression.BLOSC: + return { + id: "blosc", + cname: "lz4", + clevel: 5, + shuffle: 1, + }; + case ZarrCompression.GZIP: + return { id: "gzip", level: 1 }; + case ZarrCompression.RAW: + default: + return null; + } + } + + private _buildV2Zarray(scaleMetadata: any): string { + const { shape, chunks, dtype, compressor } = scaleMetadata; + const zarrMetadata = { + zarr_format: 2, + shape: shape, + chunks: chunks, + dtype: dtype, + compressor: compressor, + fill_value: 0, + order: "C", + filters: null, + }; + return JSON.stringify(zarrMetadata, null, 2); + } +} + +const dataTypeToZarrV3Dtype: { [key in DataType]?: string } = { + [DataType.UINT8]: "uint8", + [DataType.UINT16]: "uint16", + [DataType.UINT32]: "uint32", + [DataType.UINT64]: "uint64", + [DataType.INT8]: "int8", + [DataType.INT16]: "int16", + [DataType.INT32]: "int32", + [DataType.FLOAT32]: "float32", +}; + +class ZarrV3Creator implements ZarrCreator { + async create(options: CreateDataSourceOptions): Promise { + const { kvStoreUrl, registry, metadata } = options; + const { sharedKvStoreContext } = registry; + const kvStore = sharedKvStoreContext.kvStoreContext.getKvStore(kvStoreUrl); + + const rootGroupContent = this._buildV3RootGroupMetadata(metadata.common); + const writeRootPromise = proxyWrite( + sharedKvStoreContext, + kvStore.store.getUrl(joinPath(kvStore.path, "zarr.json")), + new TextEncoder().encode(rootGroupContent).buffer as ArrayBuffer, + ); + + const commonMetadata = metadata.common as CommonCreationMetadata; + const zarrMetadata = metadata.sourceRelated as ZarrCreationState; + + const scales = []; + for (let i = 0; i < commonMetadata.numScales; ++i) { + const downsampleCoeffs = commonMetadata.downsamplingFactor.map( + (f: number) => Math.pow(f, i), + ); + scales.push({ + shape: commonMetadata.shape.map((dim: number, j: number) => + Math.ceil(dim / downsampleCoeffs[j]), + ), + chunks: [64, 64, 64], + dataType: dataTypeToZarrV3Dtype[commonMetadata.dataType], + transform: commonMetadata.voxelSize.map( + (v: number, j: number) => v * downsampleCoeffs[j], + ), + }); + } + + const writeArrayPromises = scales.map((scale: any, i: number) => { + const arrayMetaUrl = kvStore.store.getUrl( + joinPath(kvStore.path, `s${i}`, "zarr.json"), + ); + const arrayMetaContent = this._buildV3ArrayMetadata(scale, zarrMetadata); + return proxyWrite( + sharedKvStoreContext, + arrayMetaUrl, + new TextEncoder().encode(arrayMetaContent).buffer as ArrayBuffer, + ); + }); + + await Promise.all([writeRootPromise, ...writeArrayPromises]); + } + + private _buildV3RootGroupMetadata(common: CommonCreationMetadata): string { + const rank = common.shape.length; + const defaultAxes = ["x", "y", "z", "c", "t"]; + const axes = Array.from({ length: rank }, (_, i) => ({ + name: defaultAxes[i] || `dim_${i}`, + type: "space", + unit: zarrUnitMapping[common.voxelUnit], + })); + + const datasets = Array.from({ length: common.numScales }, (_, i) => ({ + path: `s${i}`, + coordinateTransformations: [ + { + type: "scale", + scale: common.downsamplingFactor.map( + (f, j) => common.voxelSize[j] * Math.pow(f, i), + ), + }, + ], + })); + + const omeMetadata = { + multiscales: [ + { + version: "0.5", // OME-NGFF version compatible with Zarr v3 + axes, + datasets, + name: common.name || "default", + }, + ], + }; + + return JSON.stringify( + { + zarr_format: 3, + node_type: "group", + attributes: omeMetadata, + }, + null, + 2, + ); + } + + private _buildV3ArrayMetadata( + scaleMetadata: any, + zarrState: ZarrCreationState, + ): string { + const { shape, chunks, dataType } = scaleMetadata; + + const codecs: { name: string; configuration?: any }[] = [ + { + name: "bytes", + configuration: { + endian: "little", + }, + }, + ]; + + switch (zarrState.compression.value) { + case ZarrCompression.GZIP: + codecs.push({ name: "gzip", configuration: { level: 1 } }); + break; + case ZarrCompression.BLOSC: + codecs.push({ + name: "blosc", + configuration: { + cname: "lz4", + clevel: 5, + shuffle: "bit", + }, + }); + break; + } + + const zarrV3Array = { + zarr_format: 3, + node_type: "array", + shape: shape, + data_type: dataType, + chunk_grid: { + name: "regular", + configuration: { + chunk_shape: chunks, + }, + }, + chunk_key_encoding: { + name: "default", + configuration: { + separator: "/", + }, + }, + codecs: codecs, + fill_value: 0, + attributes: {}, + }; + return JSON.stringify(zarrV3Array, null, 2); + } +} + +export function getZarrCreator(version: number | undefined): ZarrCreator { + switch (version) { + case 2: + return new ZarrV2Creator(); + case 3: + return new ZarrV3Creator(); + default: + throw new Error(`Unsupported Zarr version: ${version}`); + } +} diff --git a/src/kvstore/enabled_backend_modules.ts b/src/kvstore/enabled_backend_modules.ts index 335031dc7d..42333d904d 100644 --- a/src/kvstore/enabled_backend_modules.ts +++ b/src/kvstore/enabled_backend_modules.ts @@ -9,3 +9,5 @@ import "#kvstore/ngauth/register"; import "#kvstore/ocdbt/register_backend"; import "#kvstore/s3/register_backend"; import "#kvstore/zip/register_backend"; +import "#kvstore/ssa_s3/register_backend"; +import "#kvstore/opfs/register_backend"; diff --git a/src/kvstore/enabled_frontend_modules.ts b/src/kvstore/enabled_frontend_modules.ts index 476e2f6d1d..8af8e40cfb 100644 --- a/src/kvstore/enabled_frontend_modules.ts +++ b/src/kvstore/enabled_frontend_modules.ts @@ -10,4 +10,7 @@ import "#kvstore/ngauth/register"; import "#kvstore/ngauth/register_credentials_provider"; import "#kvstore/ocdbt/register_frontend"; import "#kvstore/s3/register_frontend"; +import "#kvstore/ssa_s3/register_credentials_provider"; +import "#kvstore/ssa_s3/register_frontend"; import "#kvstore/zip/register_frontend"; +import "#kvstore/opfs/register_frontend"; diff --git a/src/kvstore/index.ts b/src/kvstore/index.ts index e94f870efb..159e6caa7c 100644 --- a/src/kvstore/index.ts +++ b/src/kvstore/index.ts @@ -91,7 +91,15 @@ export interface ListableKvStore { list?: (prefix: string, options: DriverListOptions) => Promise; } -export interface KvStore extends ReadableKvStore, ListableKvStore { +export interface WritableKvStore { + write?: (key: Key, value: ArrayBuffer) => Promise; + delete?: (key: Key) => Promise; +} + +export interface KvStore + extends ReadableKvStore, + ListableKvStore, + WritableKvStore { // Indicates that the only valid key is the empty string. singleKey?: boolean; } diff --git a/src/kvstore/opfs/backend.ts b/src/kvstore/opfs/backend.ts new file mode 100644 index 0000000000..54e12ce9ec --- /dev/null +++ b/src/kvstore/opfs/backend.ts @@ -0,0 +1,263 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { SharedKvStoreContextCounterpart } from "#src/kvstore/backend.js"; +import type { + DriverListOptions, + DriverReadOptions, + KvStore, + ListResponse, + ReadResponse, + StatOptions, + StatResponse, +} from "#src/kvstore/index.js"; +import { + encodePathForUrl, + kvstoreEnsureDirectoryPipelineUrl, +} from "#src/kvstore/url.js"; + +function ensureOpfsAvailable(context: string): void { + if ( + typeof navigator === "undefined" || + (navigator as any).storage === undefined + ) { + throw new Error( + `${context}: OPFS (navigator.storage) is not available in this environment`, + ); + } +} + +async function getRootDirectoryHandle(): Promise { + ensureOpfsAvailable("opfs"); + return await (navigator as any).storage.getDirectory(); +} + +function splitPath(path: string): string[] { + const normalized = path.replace(/\\+/g, "/").replace(/^\/+|\/+$/g, ""); + return normalized === "" ? [] : normalized.split("/"); +} + +async function getDirectoryHandleForPath( + baseDir: FileSystemDirectoryHandle, + pathSegments: string[], + create: boolean, +): Promise { + let current: FileSystemDirectoryHandle = baseDir; + for (const segment of pathSegments) { + if (segment === "") continue; + current = await current.getDirectoryHandle(segment, { create }); + } + return current; +} + +async function getFileHandleForPath( + baseDir: FileSystemDirectoryHandle, + pathSegments: string[], + create: boolean, +): Promise { + if (pathSegments.length === 0) { + throw new Error("getFileHandleForPath: empty path provided"); + } + const dirSegments = pathSegments.slice(0, -1); + const fileName = pathSegments[pathSegments.length - 1]; + const parent = await getDirectoryHandleForPath(baseDir, dirSegments, create); + return await parent.getFileHandle(fileName, { create }); +} + +export class OpfsKvStore implements KvStore { + private readonly basePathSegments: string[]; + private rootDirectoryPromise: Promise | undefined; + + constructor( + public sharedKvStoreContext: SharedKvStoreContextCounterpart, + basePath: string, + ) { + this.basePathSegments = splitPath(basePath); + } + + private getRoot(): Promise { + if (this.rootDirectoryPromise !== undefined) + return this.rootDirectoryPromise; + this.rootDirectoryPromise = getRootDirectoryHandle(); + return this.rootDirectoryPromise; + } + + private async getBaseDirectory(): Promise { + const root = await this.getRoot(); + return await getDirectoryHandleForPath( + root, + this.basePathSegments, + /*create=*/ true, + ); + } + + async stat( + key: string, + _options: StatOptions, + ): Promise { + const base = await this.getBaseDirectory(); + const pathSegments = splitPath(key); + try { + const fileHandle = await getFileHandleForPath( + base, + pathSegments, + /*create=*/ false, + ); + const file = await fileHandle.getFile(); + return { totalSize: file.size }; + } catch (e) { + if ( + e instanceof DOMException && + (e.name === "NotFoundError" || e.name === "NotAllowedError") + ) { + return undefined; + } + throw new Error( + `stat(${key}) failed for ${this.getUrl(key)}: ${String((e as Error).message ?? e)}`, + ); + } + } + + async read( + key: string, + _options: DriverReadOptions, + ): Promise { + const base = await this.getBaseDirectory(); + const pathSegments = splitPath(key); + try { + const fileHandle = await getFileHandleForPath( + base, + pathSegments, + /*create=*/ false, + ); + const file = await fileHandle.getFile(); + const buffer = await file.arrayBuffer(); + const response = new Response(buffer); + return { + response, + offset: 0, + length: buffer.byteLength, + totalSize: buffer.byteLength, + }; + } catch (e) { + if ( + e instanceof DOMException && + (e.name === "NotFoundError" || e.name === "NotAllowedError") + ) { + return undefined; + } + throw new Error( + `read(${key}) failed for ${this.getUrl(key)}: ${String((e as Error).message ?? e)}`, + ); + } + } + + async write(key: string, value: ArrayBuffer): Promise { + const base = await this.getBaseDirectory(); + const pathSegments = splitPath(key); + const fh = await getFileHandleForPath(base, pathSegments, /*create=*/ true); + const writable = await (fh as any).createWritable({ + keepExistingData: false, + }); + try { + await writable.write(new Uint8Array(value)); + } finally { + await writable.close(); + } + } + + async delete(key: string): Promise { + const base = await this.getBaseDirectory(); + const parts = splitPath(key); + if (parts.length === 0) throw new Error("delete: empty key"); + const parent = await getDirectoryHandleForPath( + base, + parts.slice(0, -1), + /*create=*/ false, + ); + await (parent as any).removeEntry(parts[parts.length - 1], { + recursive: false, + }); + } + + async list( + prefix: string, + _options: DriverListOptions, + ): Promise { + const base = await this.getBaseDirectory(); + const prefixSegments = splitPath(prefix); + + const dirForPrefix = await (async () => { + try { + return await getDirectoryHandleForPath( + base, + prefixSegments, + /*create=*/ false, + ); + } catch (e) { + if (e instanceof DOMException && e.name === "NotFoundError") { + return undefined; + } + throw e; + } + })(); + + if (dirForPrefix === undefined) { + return { entries: [], directories: [] }; + } + + const entries: Array<{ key: string }> = []; + const directories = new Set(); + + for await (const [name, handle] of ( + dirForPrefix as any + ).entries() as AsyncIterable<[string, FileSystemHandle]>) { + const fullKey = + prefix === "" + ? name + : `${prefix}${prefix.endsWith("/") ? "" : "/"}${name}`; + if ((handle as FileSystemDirectoryHandle).kind === "directory") { + directories.add(fullKey); + } else { + entries.push({ key: fullKey }); + } + } + + const sortedEntries = entries.sort((a, b) => + a.key < b.key ? -1 : a.key > b.key ? 1 : 0, + ); + const sortedDirectories = Array.from(directories).sort((a, b) => + a < b ? -1 : a > b ? 1 : 0, + ); + + return { entries: sortedEntries, directories: sortedDirectories }; + } + + getUrl(key: string): string { + const base = this.basePathSegments.join("/"); + const baseUrl = + base === "" ? "opfs://" : `opfs://${encodePathForUrl(base)}/`; + const ensured = kvstoreEnsureDirectoryPipelineUrl(baseUrl); + return ensured + (key === "" ? "" : encodePathForUrl(key)); + } + + get supportsOffsetReads(): boolean { + return false; + } + get supportsSuffixReads(): boolean { + return false; + } +} diff --git a/src/kvstore/opfs/common.ts b/src/kvstore/opfs/common.ts new file mode 100644 index 0000000000..5b78682adb --- /dev/null +++ b/src/kvstore/opfs/common.ts @@ -0,0 +1,64 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { BaseKvStoreProvider } from "#src/kvstore/context.js"; +import type { KvStore } from "#src/kvstore/index.js"; +import type { + KvStoreProviderRegistry, + SharedKvStoreContextBase, +} from "#src/kvstore/register.js"; +import type { UrlWithParsedScheme } from "#src/kvstore/url.js"; + +function parseOpfsUrlSuffix(suffix: string | undefined): { + basePath: string; + path: string; +} { + // Accept opfs://, opfs:/, or opfs: + const s = suffix ?? ""; + const m = s.match(/^\/?\/?(.*)$/); + if (m === null) { + throw new Error( + `Invalid opfs URL suffix ${JSON.stringify(s)}; expected opfs://`, + ); + } + const decoded = decodeURIComponent(m[1] ?? ""); + // Choose to have basePath be empty and return full path as initial kv path. + return { basePath: "", path: decoded }; +} + +export function registerProviders< + SharedKvStoreContext extends SharedKvStoreContextBase, +>( + registry: KvStoreProviderRegistry, + OpfsKvStoreClass: { + new (sharedKvStoreContext: SharedKvStoreContext, basePath: string): KvStore; + }, +) { + const provider: (context: SharedKvStoreContext) => BaseKvStoreProvider = ( + sharedKvStoreContext: SharedKvStoreContext, + ) => ({ + scheme: "opfs", + description: "Origin Private File System (browser)", + getKvStore(url: UrlWithParsedScheme) { + const { basePath, path } = parseOpfsUrlSuffix(url.suffix); + return { + store: new OpfsKvStoreClass(sharedKvStoreContext, basePath), + path, + }; + }, + }); + registry.registerBaseKvStoreProvider(provider); +} diff --git a/src/kvstore/opfs/frontend.ts b/src/kvstore/opfs/frontend.ts new file mode 100644 index 0000000000..40d57b9a14 --- /dev/null +++ b/src/kvstore/opfs/frontend.ts @@ -0,0 +1,47 @@ +/** + * @license + * Copyright 2025 + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { SharedKvStoreContext } from "#src/kvstore/frontend.js"; +import { ProxyKvStore } from "#src/kvstore/proxy.js"; +import { + encodePathForUrl, + kvstoreEnsureDirectoryPipelineUrl, +} from "#src/kvstore/url.js"; + +export class OpfsKvStore extends ProxyKvStore { + constructor( + public override sharedKvStoreContext: SharedKvStoreContext, + private readonly basePath: string, + ) { + super(sharedKvStoreContext); + } + + getUrl(key: string): string { + const base = + this.basePath === "" + ? "opfs://" + : `opfs://${encodePathForUrl(this.basePath)}/`; + const ensured = kvstoreEnsureDirectoryPipelineUrl(base); + return ensured + (key === "" ? "" : encodePathForUrl(key)); + } + + get supportsOffsetReads(): boolean { + return false; + } + get supportsSuffixReads(): boolean { + return false; + } +} diff --git a/src/kvstore/opfs/register_backend.ts b/src/kvstore/opfs/register_backend.ts new file mode 100644 index 0000000000..0caced1a1c --- /dev/null +++ b/src/kvstore/opfs/register_backend.ts @@ -0,0 +1,21 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { backendOnlyKvStoreProviderRegistry } from "#src/kvstore/backend.js"; +import { OpfsKvStore } from "#src/kvstore/opfs/backend.js"; +import { registerProviders } from "#src/kvstore/opfs/common.js"; + +registerProviders(backendOnlyKvStoreProviderRegistry, OpfsKvStore); diff --git a/src/kvstore/opfs/register_frontend.ts b/src/kvstore/opfs/register_frontend.ts new file mode 100644 index 0000000000..4a946cc90e --- /dev/null +++ b/src/kvstore/opfs/register_frontend.ts @@ -0,0 +1,21 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { frontendOnlyKvStoreProviderRegistry } from "#src/kvstore/frontend.js"; +import { registerProviders } from "#src/kvstore/opfs/common.js"; +import { OpfsKvStore } from "#src/kvstore/opfs/frontend.js"; + +registerProviders(frontendOnlyKvStoreProviderRegistry, OpfsKvStore); diff --git a/src/kvstore/proxy.ts b/src/kvstore/proxy.ts index f1a2f4bda3..9774ffd96f 100644 --- a/src/kvstore/proxy.ts +++ b/src/kvstore/proxy.ts @@ -30,6 +30,7 @@ import { READ_RPC_ID, STAT_RPC_ID, COMPLETE_URL_RPC_ID, + WRITE_RPC_ID, } from "#src/kvstore/shared_common.js"; import { finalPipelineUrlComponent, @@ -227,6 +228,48 @@ registerPromiseRPC( }, ); +export async function proxyWrite( + sharedKvStoreContext: SharedKvStoreContextBase, + url: string, + data: ArrayBuffer, +): Promise { + await sharedKvStoreContext.rpc!.promiseInvoke( + WRITE_RPC_ID, + { + sharedKvStoreContext: sharedKvStoreContext.rpcId, + url, + data, + }, + { transfers: [data] }, + ); +} + +registerPromiseRPC( + WRITE_RPC_ID, + async function ( + this: RPC, + options: { + sharedKvStoreContext: number; + url: string; + data: ArrayBuffer; + }, + ) { + const sharedKvStoreContext: SharedKvStoreContextBase = this.get( + options.sharedKvStoreContext, + ); + const { store, path } = sharedKvStoreContext.kvStoreContext.getKvStore( + options.url, + ); + if (store.write === undefined) { + throw new Error( + `The specified storage location is not writable: ${options.url}`, + ); + } + await store.write(path, options.data); + return { value: undefined }; + }, +); + export abstract class ProxyReadableKvStore { constructor(public sharedKvStoreContext: SharedKvStoreContextBase) {} diff --git a/src/kvstore/s3/backend.ts b/src/kvstore/s3/backend.ts index 7577a34618..971c3c9dc7 100644 --- a/src/kvstore/s3/backend.ts +++ b/src/kvstore/s3/backend.ts @@ -17,9 +17,9 @@ import type { SharedKvStoreContextCounterpart } from "#src/kvstore/backend.js"; import type { DriverListOptions, ListResponse } from "#src/kvstore/index.js"; import { proxyList } from "#src/kvstore/proxy.js"; -import { ReadableS3KvStore } from "#src/kvstore/s3/common.js"; +import { S3KvStoreBase } from "#src/kvstore/s3/common.js"; -export class S3KvStore extends ReadableS3KvStore { +export class S3KvStore extends S3KvStoreBase { list(prefix: string, options: DriverListOptions): Promise { return proxyList(this.sharedKvStoreContext, this.getUrl(prefix), options); } diff --git a/src/kvstore/s3/common.ts b/src/kvstore/s3/common.ts index e1e19ca06f..1b37bf67bc 100644 --- a/src/kvstore/s3/common.ts +++ b/src/kvstore/s3/common.ts @@ -35,10 +35,10 @@ import { } from "#src/kvstore/s3/list.js"; import { joinBaseUrlAndPath } from "#src/kvstore/url.js"; import type { FetchOk } from "#src/util/http_request.js"; -import { fetchOk } from "#src/util/http_request.js"; +import { HttpError, fetchOk } from "#src/util/http_request.js"; import { ProgressSpan } from "#src/util/progress_listener.js"; -export class ReadableS3KvStore< +export class S3KvStoreBase< SharedKvStoreContext extends SharedKvStoreContextBase, > implements KvStore { @@ -88,6 +88,32 @@ export class ReadableS3KvStore< ); } + async write(key: string, value: ArrayBuffer): Promise { + const url = joinBaseUrlAndPath(this.baseUrl, key); + try { + await this.fetchOkImpl(url, { + method: "PUT", + body: value, + }); + } catch (e) { + throw new Error(`Failed to write to ${url}.`, { cause: e }); + } + } + + async delete(key: string): Promise { + const url = joinBaseUrlAndPath(this.baseUrl, key); + try { + await this.fetchOkImpl(url, { + method: "DELETE", + }); + } catch (e) { + if (e instanceof HttpError && e.status === 404) { + return; + } + throw new Error(`Failed to delete ${url}.`, { cause: e }); + } + } + getUrl(path: string) { return joinBaseUrlAndPath(this.baseUrlForDisplay, path); } @@ -104,7 +130,7 @@ function amazonS3Provider< SharedKvStoreContext extends SharedKvStoreContextBase, >( sharedKvStoreContext: SharedKvStoreContext, - s3KvStoreClass: typeof ReadableS3KvStore, + s3KvStoreClass: typeof S3KvStoreBase, ): BaseKvStoreProvider { return { scheme: "s3", @@ -131,7 +157,7 @@ function amazonS3Provider< function s3Provider( sharedKvStoreContext: SharedKvStoreContext, httpScheme: "http" | "https", - s3KvStoreClass: typeof ReadableS3KvStore, + s3KvStoreClass: typeof S3KvStoreBase, ): BaseKvStoreProvider { return { scheme: `s3+${httpScheme}`, @@ -161,7 +187,7 @@ export function registerProviders< SharedKvStoreContext extends SharedKvStoreContextBase, >( registry: KvStoreProviderRegistry, - s3KvStoreClass: typeof ReadableS3KvStore, + s3KvStoreClass: typeof S3KvStoreBase, ) { registry.registerBaseKvStoreProvider((context) => amazonS3Provider(context, s3KvStoreClass), diff --git a/src/kvstore/s3/frontend.ts b/src/kvstore/s3/frontend.ts index b9c4fee91d..ef0ed81111 100644 --- a/src/kvstore/s3/frontend.ts +++ b/src/kvstore/s3/frontend.ts @@ -16,7 +16,7 @@ import type { SharedKvStoreContext } from "#src/kvstore/frontend.js"; import type { DriverListOptions, ListResponse } from "#src/kvstore/index.js"; -import { ReadableS3KvStore } from "#src/kvstore/s3/common.js"; +import { S3KvStoreBase } from "#src/kvstore/s3/common.js"; import { getS3BucketListing, listS3CompatibleUrl, @@ -24,7 +24,7 @@ import { import { joinBaseUrlAndPath } from "#src/kvstore/url.js"; import { ProgressSpan } from "#src/util/progress_listener.js"; -export class S3KvStore extends ReadableS3KvStore { +export class S3KvStore extends S3KvStoreBase { list(prefix: string, options: DriverListOptions): Promise { const { progressListener } = options; using _span = diff --git a/src/kvstore/shared_common.ts b/src/kvstore/shared_common.ts index fdf31f3480..ae781510d1 100644 --- a/src/kvstore/shared_common.ts +++ b/src/kvstore/shared_common.ts @@ -19,4 +19,5 @@ export const SHARED_KVSTORE_CONTEXT_RPC_ID = "SharedKvStoreContext"; export const STAT_RPC_ID = "SharedKvStoreContext.stat"; export const READ_RPC_ID = "SharedKvStoreContext.read"; export const LIST_RPC_ID = "SharedKvStoreContext.list"; +export const WRITE_RPC_ID = "SharedKvStoreContext.write"; export const COMPLETE_URL_RPC_ID = "SharedKvStoreContext.completeUrl"; diff --git a/src/kvstore/ssa_s3/README.md b/src/kvstore/ssa_s3/README.md new file mode 100644 index 0000000000..c00ea880fe --- /dev/null +++ b/src/kvstore/ssa_s3/README.md @@ -0,0 +1,2 @@ +The Stateless S3 Authenticator (SSA) is an authentication service that uses an OIDC portal to verify user identity. It then generates secure, temporary, pre-signed URLs that allow Neuroglancer to directly read from and write to private S3 buckets. +See [TODO: link the github here after it is created...] for more details. diff --git a/src/kvstore/ssa_s3/credentials_provider.ts b/src/kvstore/ssa_s3/credentials_provider.ts new file mode 100644 index 0000000000..8031250677 --- /dev/null +++ b/src/kvstore/ssa_s3/credentials_provider.ts @@ -0,0 +1,427 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { + CredentialsProvider, + makeCredentialsGetter, +} from "#src/credentials_provider/index.js"; +import { + getCredentialsWithStatus, + monitorAuthPopupWindow, +} from "#src/credentials_provider/interactive_credentials_provider.js"; +import type { OAuth2Credentials } from "#src/credentials_provider/oauth2.js"; +import { raceWithAbort } from "#src/util/abort.js"; +import { fetchOk } from "#src/util/http_request.js"; +import { + verifyObject, + verifyObjectProperty, + verifyOptionalObjectProperty, + verifyString, +} from "#src/util/json.js"; +import type { ProgressOptions } from "#src/util/progress_listener.js"; +import { ProgressSpan } from "#src/util/progress_listener.js"; + +interface SsaConfiguration { + // OIDC issuer for the SSA deployment. + issuer: string; +} + +interface OidcConfiguration { + authorization_endpoint: string; + token_endpoint: string; +} + +function parseSsaConfiguration(json: unknown): SsaConfiguration { + const obj = verifyObject(json); + const issuer = verifyObjectProperty(obj, "issuer", verifyString); + return { issuer }; +} + +async function discoverSsaConfiguration( + workerOrigin: string, +): Promise { + const response = await fetchOk( + `${workerOrigin}/.well-known/ssa-configuration`, + ); + const config = parseSsaConfiguration(await response.json()); + return config; +} + +async function discoverOpenIdConfiguration( + issuer: string, +): Promise { + const response = await fetchOk(`${issuer}/.well-known/openid-configuration`); + const json = verifyObject(await response.json()); + const authorization_endpoint = verifyObjectProperty( + json, + "authorization_endpoint", + verifyString, + ); + const token_endpoint = verifyObjectProperty( + json, + "token_endpoint", + verifyString, + ); + return { authorization_endpoint, token_endpoint }; +} + +interface OidcCodeMessage { + type: "oidc_code"; + code: string; + state: string; +} + +async function waitForOidcCodeMessage( + expectedOrigin: string, + source: Window, + signal: AbortSignal, +): Promise { + return new Promise((resolve, reject) => { + window.addEventListener( + "message", + (event: MessageEvent) => { + if (event.source !== source) return; + if (event.origin !== expectedOrigin) return; + try { + const data = verifyObject(event.data); + const type = verifyObjectProperty(data, "type", verifyString); + if (type !== "oidc_code") return; + const code = verifyObjectProperty(data, "code", verifyString); + const state = verifyObjectProperty(data, "state", verifyString); + resolve({ type: "oidc_code", code, state }); + } catch (e) { + reject( + new Error( + `Received unexpected OIDC authorization response: ${(e as Error).message}`, + ), + ); + } + }, + { signal }, + ); + }); +} + +function openPopupCentered(url: string, width: number, height: number) { + const top = + window.outerHeight - + window.innerHeight + + window.innerHeight / 2 - + height / 2; + const left = window.innerWidth / 2 - width / 2; + const popup = window.open( + url, + undefined, + `toolbar=no, menubar=no, width=${width}, height=${height}, top=${top}, left=${left}`, + ); + if (popup === null) { + throw new Error("Failed to create authentication popup window"); + } + return popup; +} + +function base64UrlEncode(bytes: Uint8Array): string { + const s = btoa(String.fromCharCode(...bytes)); + return s.replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, ""); +} + +async function sha256Bytes(input: Uint8Array): Promise { + const digest = await crypto.subtle.digest("SHA-256", input); + return new Uint8Array(digest); +} + +function generateRandomAscii(length: number): string { + const charset = + "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789-._~"; + const random = new Uint8Array(length); + crypto.getRandomValues(random); + let s = ""; + for (let i = 0; i < length; ++i) { + s += charset[random[i] % charset.length]; + } + return s; +} + +async function createPkcePair(): Promise<{ + verifier: string; + challenge: string; +}> { + const verifier = generateRandomAscii(128); + const challenge = base64UrlEncode( + await sha256Bytes(new TextEncoder().encode(verifier)), + ); + return { verifier, challenge }; +} + +interface StoredSsaToken { + accessToken: string; + refreshToken: string; + tokenType: string; + expiresAt: number; + email?: string; +} + +function getLocalStorageKeyForWorker(workerOrigin: string): string { + return `ssa_oidc_token_${workerOrigin}`; +} + +function loadStoredSsaToken(workerOrigin: string): StoredSsaToken | null { + const key = getLocalStorageKeyForWorker(workerOrigin); + const raw = localStorage.getItem(key); + if (raw === null) return null; + const parsed = JSON.parse(raw); + const obj = verifyObject(parsed); + const accessToken = verifyObjectProperty(obj, "accessToken", verifyString); + const refreshToken = verifyObjectProperty(obj, "refreshToken", verifyString); + const tokenType = verifyObjectProperty(obj, "tokenType", verifyString); + const expiresAt = Number( + verifyObjectProperty(obj, "expiresAt", (v) => { + if (typeof v !== "number") throw new Error("expiresAt must be a number"); + return v; + }), + ); + const email = verifyOptionalObjectProperty(obj, "email", verifyString); + return { accessToken, refreshToken, tokenType, expiresAt, email }; +} + +function saveStoredSsaToken(workerOrigin: string, value: StoredSsaToken): void { + const key = getLocalStorageKeyForWorker(workerOrigin); + localStorage.setItem(key, JSON.stringify(value)); +} + +function clearStoredSsaToken(workerOrigin: string): void { + const key = getLocalStorageKeyForWorker(workerOrigin); + localStorage.removeItem(key); +} + +export class SsaCredentialsProvider extends CredentialsProvider { + constructor(public readonly workerOrigin: string) { + super(); + try { + // Throws if invalid URL. + const parsed = new URL(workerOrigin); + if (parsed.origin !== workerOrigin) { + throw new Error("workerOrigin must be an origin like https://host"); + } + } catch (e) { + throw new Error(`Invalid worker origin ${JSON.stringify(workerOrigin)}`, { + cause: e, + }); + } + } + + private async performInteractiveLogin( + options: ProgressOptions, + ): Promise { + using _span = new ProgressSpan(options.progressListener, { + message: `Requesting SSA login via ${this.workerOrigin}`, + }); + + const { issuer } = await discoverSsaConfiguration(this.workerOrigin); + const { authorization_endpoint, token_endpoint } = + await discoverOpenIdConfiguration(issuer); + + const clientId = "neuroglancer"; + const redirectUri = `${location.origin}/`; + const scope = "openid profile email"; + const state = generateRandomAscii(32); + const { verifier: codeVerifier, challenge: codeChallenge } = + await createPkcePair(); + + const authParams = new URLSearchParams({ + response_type: "code", + client_id: clientId, + redirect_uri: redirectUri, + scope, + state, + code_challenge: codeChallenge, + code_challenge_method: "S256", + }); + const popupUrl = `${authorization_endpoint}?${authParams.toString()}`; + + await getCredentialsWithStatus( + { + description: `SSA at ${this.workerOrigin}`, + requestDescription: "login", + get: async (innerSignal) => { + const abortController = new AbortController(); + const combined = AbortSignal.any([ + abortController.signal, + innerSignal, + options.signal, + ]); + try { + const popup = openPopupCentered(popupUrl, 450, 700); + monitorAuthPopupWindow(popup, abortController); + const appOrigin = new URL(redirectUri).origin; + const { code, state: returnedState } = await raceWithAbort( + waitForOidcCodeMessage(appOrigin, popup, abortController.signal), + combined, + ); + if (returnedState !== state) { + throw new Error("OIDC state mismatch detected"); + } + const tokenResp = await fetchOk(token_endpoint, { + method: "POST", + headers: { "content-type": "application/x-www-form-urlencoded" }, + body: new URLSearchParams({ + grant_type: "authorization_code", + code, + redirect_uri: redirectUri, + client_id: clientId, + code_verifier: codeVerifier, + }), + signal: combined, + }); + const tokenJson = verifyObject(await tokenResp.json()); + const access_token = verifyObjectProperty( + tokenJson, + "access_token", + verifyString, + ); + const token_type = verifyObjectProperty( + tokenJson, + "token_type", + verifyString, + ); + const refresh_token = verifyObjectProperty( + tokenJson, + "refresh_token", + verifyString, + ); + const expires_in = Number( + verifyObjectProperty(tokenJson, "expires_in", (v) => { + if (typeof v !== "number") + throw new Error("expires_in must be a number"); + return v; + }), + ); + const email = verifyOptionalObjectProperty( + tokenJson, + "email", + verifyString, + ); + const stored: StoredSsaToken = { + accessToken: access_token, + refreshToken: refresh_token, + tokenType: token_type, + expiresAt: Date.now() + expires_in * 1000, + email, + }; + saveStoredSsaToken(this.workerOrigin, stored); + return { tokenType: token_type, accessToken: access_token, email }; + } finally { + abortController.abort(); + } + }, + }, + options.signal, + ); + + // The above getCredentialsWithStatus returns OAuth2Credentials. We already saved full token. + const stored = loadStoredSsaToken(this.workerOrigin); + if (stored === null) { + throw new Error( + "Failed to persist SSA token to localStorage after interactive login", + ); + } + return stored; + } + + private async refreshTokenSilently( + refreshToken: string, + signal: AbortSignal, + ): Promise { + const { issuer } = await discoverSsaConfiguration(this.workerOrigin); + const { token_endpoint } = await discoverOpenIdConfiguration(issuer); + const clientId = "neuroglancer"; + + const resp = await fetchOk(token_endpoint, { + method: "POST", + headers: { "content-type": "application/x-www-form-urlencoded" }, + body: new URLSearchParams({ + grant_type: "refresh_token", + refresh_token: refreshToken, + client_id: clientId, + }), + signal, + }); + const json = verifyObject(await resp.json()); + const access_token = verifyObjectProperty( + json, + "access_token", + verifyString, + ); + const token_type = verifyObjectProperty(json, "token_type", verifyString); + const new_refresh = + verifyOptionalObjectProperty(json, "refresh_token", verifyString) ?? + refreshToken; + const expires_in = Number( + verifyObjectProperty(json, "expires_in", (v) => { + if (typeof v !== "number") + throw new Error("expires_in must be a number"); + return v; + }), + ); + const email = verifyOptionalObjectProperty(json, "email", verifyString); + const stored: StoredSsaToken = { + accessToken: access_token, + refreshToken: new_refresh, + tokenType: token_type, + expiresAt: Date.now() + expires_in * 1000, + email, + }; + saveStoredSsaToken(this.workerOrigin, stored); + return stored; + } + + get = makeCredentialsGetter(async (options) => { + // 1) Try localStorage + const existing = loadStoredSsaToken(this.workerOrigin); + if (existing !== null) { + if (Date.now() < existing.expiresAt) { + return { + tokenType: existing.tokenType, + accessToken: existing.accessToken, + email: existing.email, + }; + } + // Try silent refresh + try { + const refreshed = await this.refreshTokenSilently( + existing.refreshToken, + options.signal, + ); + return { + tokenType: refreshed.tokenType, + accessToken: refreshed.accessToken, + email: refreshed.email, + }; + } catch { + clearStoredSsaToken(this.workerOrigin); + // Fall through to interactive login + } + } + + // 4) Interactive login + const stored = await this.performInteractiveLogin(options); + return { + tokenType: stored.tokenType, + accessToken: stored.accessToken, + email: stored.email, + }; + }); +} diff --git a/src/kvstore/ssa_s3/register_backend.ts b/src/kvstore/ssa_s3/register_backend.ts new file mode 100644 index 0000000000..cc9e1fbefa --- /dev/null +++ b/src/kvstore/ssa_s3/register_backend.ts @@ -0,0 +1,48 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { BaseKvStoreProvider } from "#src/kvstore/context.js"; +import type { SharedKvStoreContextBase } from "#src/kvstore/register.js"; +import { frontendBackendIsomorphicKvStoreProviderRegistry } from "#src/kvstore/register.js"; +import { SsaS3KvStore } from "#src/kvstore/ssa_s3/ssa_s3_kvstore.js"; +import { + ensureSsaHttpsUrl, + getWorkerOriginAndDatasetPrefix, + getDisplayBase, +} from "#src/kvstore/ssa_s3/url_utils.js"; + +function ssaIsomorphicProvider( + context: SharedKvStoreContextBase, +): BaseKvStoreProvider { + return { + scheme: "ssa+https", + description: "Stateless S3 Authenticator (SSA) over HTTPS", + getKvStore(parsedUrl) { + const parsed = ensureSsaHttpsUrl(parsedUrl.url); + const { workerOrigin, datasetBasePrefix } = + getWorkerOriginAndDatasetPrefix(parsed); + const displayBase = getDisplayBase(parsedUrl.url); + return { + store: new SsaS3KvStore(context, workerOrigin, "", displayBase), + path: datasetBasePrefix, + }; + }, + }; +} + +frontendBackendIsomorphicKvStoreProviderRegistry.registerBaseKvStoreProvider( + ssaIsomorphicProvider, +); diff --git a/src/kvstore/ssa_s3/register_credentials_provider.ts b/src/kvstore/ssa_s3/register_credentials_provider.ts new file mode 100644 index 0000000000..2924b0ced4 --- /dev/null +++ b/src/kvstore/ssa_s3/register_credentials_provider.ts @@ -0,0 +1,22 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { registerDefaultCredentialsProvider } from "#src/credentials_provider/default_manager.js"; +import { SsaCredentialsProvider } from "#src/kvstore/ssa_s3/credentials_provider.js"; + +registerDefaultCredentialsProvider("ssa", (workerOrigin: string) => { + return new SsaCredentialsProvider(workerOrigin); +}); diff --git a/src/kvstore/ssa_s3/register_frontend.ts b/src/kvstore/ssa_s3/register_frontend.ts new file mode 100644 index 0000000000..ccb2d3cdc8 --- /dev/null +++ b/src/kvstore/ssa_s3/register_frontend.ts @@ -0,0 +1,191 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { OAuth2Credentials } from "#src/credentials_provider/oauth2.js"; +import { fetchOkWithOAuth2CredentialsAdapter } from "#src/credentials_provider/oauth2.js"; +import type { + BaseKvStoreProvider, + BaseKvStoreCompleteUrlOptions, + CompletionResult, +} from "#src/kvstore/context.js"; +import type { SharedKvStoreContext } from "#src/kvstore/frontend.js"; +import { frontendOnlyKvStoreProviderRegistry } from "#src/kvstore/frontend.js"; +import { SsaS3KvStore } from "#src/kvstore/ssa_s3/ssa_s3_kvstore.js"; +import { + ensureSsaHttpsUrl, + getWorkerOriginAndDatasetPrefix, + getDisplayBase, +} from "#src/kvstore/ssa_s3/url_utils.js"; +import { + verifyObject, + verifyObjectProperty, + verifyString, + verifyStringArray, +} from "#src/util/json.js"; + +interface SsaAuthenticateResponseLite { + permissions: { read: string[]; write: string[] }; + endpoints: { signRequests: string; listFiles: string }; +} + +function parseAuthenticateResponseLite( + json: unknown, +): SsaAuthenticateResponseLite { + const obj = verifyObject(json); + const endpointsObj = verifyObjectProperty(obj, "endpoints", verifyObject); + const permissionsObj = verifyObjectProperty(obj, "permissions", verifyObject); + return { + permissions: { + read: verifyObjectProperty(permissionsObj, "read", verifyStringArray), + write: verifyObjectProperty(permissionsObj, "write", verifyStringArray), + }, + endpoints: { + signRequests: verifyObjectProperty( + endpointsObj, + "signRequests", + verifyString, + ), + listFiles: verifyObjectProperty(endpointsObj, "listFiles", verifyString), + }, + }; +} + +function dirnameAndBasename(path: string): { dir: string; base: string } { + const idx = path.lastIndexOf("/"); + if (idx === -1) return { dir: "", base: path }; + return { dir: path.substring(0, idx + 1), base: path.substring(idx + 1) }; +} + +function joinPath(base: string, suffix: string) { + if (base === "") return suffix; + if (base.endsWith("/")) return base + suffix; + return base + "/" + suffix; +} + +async function completeSsaUrl( + sharedContext: SharedKvStoreContext, + options: BaseKvStoreCompleteUrlOptions, +): Promise { + const { url } = options; + const parsed = ensureSsaHttpsUrl(url.url); + const { workerOrigin, datasetBasePrefix } = + getWorkerOriginAndDatasetPrefix(parsed); + + const credentialsProvider = + sharedContext.credentialsManager.getCredentialsProvider( + "ssa", + workerOrigin, + ); + const fetchOkToWorker = + fetchOkWithOAuth2CredentialsAdapter(credentialsProvider); + + const authenticateResponse = parseAuthenticateResponseLite( + await ( + await fetchOkToWorker(`${workerOrigin}/authenticate`, { + method: "POST", + headers: { "content-type": "application/json" }, + body: "{}", + signal: options.signal, + }) + ).json(), + ); + + // Determine context for completion. + const { dir, base } = dirnameAndBasename(datasetBasePrefix); + + // Root-level completion: suggest directories from read permissions. + if (dir === "") { + const candidates = authenticateResponse.permissions.read.map((p) => + p.endsWith("/") ? p : p + "/", + ); + const matches = candidates + .filter((p) => p.startsWith(base)) + .map((p) => ({ value: p })); + const offset = url.url.length - base.length; + return { offset, completions: matches }; + } + + // Within a directory: use list-files for current dir prefix. + const listResponse = verifyObject( + await ( + await fetchOkToWorker( + `${workerOrigin}${authenticateResponse.endpoints.listFiles}`, + { + method: "POST", + headers: { "content-type": "application/json" }, + body: JSON.stringify({ prefix: dir }), + signal: options.signal, + }, + ) + ).json(), + ); + const objects = verifyObjectProperty( + listResponse, + "objects", + (x) => x as unknown as any[], + ); + const childDirs = new Set(); + const childFiles = new Set(); + for (const entry of objects) { + const obj = verifyObject(entry); + const key = verifyObjectProperty(obj, "key", verifyString); + if (!key.startsWith(dir)) continue; + const remainder = key.substring(dir.length); + const slash = remainder.indexOf("/"); + if (slash === -1) { + if (remainder !== "") childFiles.add(remainder); + } else { + const first = remainder.substring(0, slash + 1); + childDirs.add(first); + } + } + const candidates = [ + ...Array.from(childDirs).map((d) => (d.endsWith("/") ? d : d + "/")), + ...Array.from(childFiles), + ]; + const matches = candidates + .filter((p) => p.startsWith(base)) + .map((p) => ({ value: joinPath(dir, p) })); + const offset = url.url.length - base.length; + return { offset, completions: matches }; +} + +function ssaFrontendProvider( + sharedContext: SharedKvStoreContext, +): BaseKvStoreProvider { + return { + scheme: "ssa+https", + description: "Stateless S3 Authenticator (SSA) over HTTPS", + getKvStore(parsedUrl) { + // parsedUrl.url is full string like ssa+https://host/path + const parsed = ensureSsaHttpsUrl(parsedUrl.url); + const { workerOrigin, datasetBasePrefix } = + getWorkerOriginAndDatasetPrefix(parsed); + const displayBase = getDisplayBase(parsedUrl.url); + return { + store: new SsaS3KvStore(sharedContext, workerOrigin, "", displayBase), + path: datasetBasePrefix, + }; + }, + async completeUrl(options) { + return await completeSsaUrl(sharedContext, options); + }, + }; +} + +frontendOnlyKvStoreProviderRegistry.registerBaseKvStoreProvider( + ssaFrontendProvider, +); diff --git a/src/kvstore/ssa_s3/ssa_s3_kvstore.ts b/src/kvstore/ssa_s3/ssa_s3_kvstore.ts new file mode 100644 index 0000000000..95986c75f7 --- /dev/null +++ b/src/kvstore/ssa_s3/ssa_s3_kvstore.ts @@ -0,0 +1,550 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { OAuth2Credentials } from "#src/credentials_provider/oauth2.js"; +import { fetchOkWithOAuth2CredentialsAdapter } from "#src/credentials_provider/oauth2.js"; +import type { + DriverReadOptions, + KvStore, + ListResponse, + StatOptions, + StatResponse, + ReadResponse, +} from "#src/kvstore/index.js"; +import type { SharedKvStoreContextBase } from "#src/kvstore/register.js"; +import type { SsaCredentialsProvider } from "#src/kvstore/ssa_s3/credentials_provider.js"; +import { pipelineUrlJoin } from "#src/kvstore/url.js"; +import type { FetchOk } from "#src/util/http_request.js"; +import { fetchOk, HttpError } from "#src/util/http_request.js"; +import { + verifyObject, + verifyObjectProperty, + verifyString, + verifyStringArray, +} from "#src/util/json.js"; +import { + MultiConsumerProgressListener, + ProgressSpan, +} from "#src/util/progress_listener.js"; + +function joinPath(base: string, suffix: string) { + if (base === "") return suffix; + if (base.endsWith("/")) return base + suffix; + return base + "/" + suffix; +} + +interface SsaAuthenticateResponse { + bucket: string; + endpoints: { + signRequests: string; // path relative to worker origin, e.g. "/sign-requests" + listFiles: string; // path relative to worker origin, e.g. "/list-files" + }; + permissions: { + read: string[]; + write: string[]; + }; +} + +function parseAuthenticateResponse(json: unknown): SsaAuthenticateResponse { + const obj = verifyObject(json); + const bucket = verifyObjectProperty(obj, "bucket", verifyString); + const endpointsObj = verifyObjectProperty(obj, "endpoints", verifyObject); + const permissionsObj = verifyObjectProperty(obj, "permissions", verifyObject); + return { + bucket, + endpoints: { + signRequests: verifyObjectProperty( + endpointsObj, + "signRequests", + verifyString, + ), + listFiles: verifyObjectProperty(endpointsObj, "listFiles", verifyString), + }, + permissions: { + read: verifyObjectProperty(permissionsObj, "read", verifyStringArray), + write: verifyObjectProperty(permissionsObj, "write", verifyStringArray), + }, + }; +} + +interface SsaSignRequestBody { + requests: Array<{ + action: "GET" | "PUT" | "HEAD" | "DELETE"; + key: string; // key within the SSA-managed bucket + }>; +} + +interface SsaSignRequestsResponseItem { + key: string; + url: string; +} +interface SsaSignRequestsResponse { + signedRequests: SsaSignRequestsResponseItem[]; +} + +function parseSignRequestsResponse(json: unknown): SsaSignRequestsResponse { + const obj = verifyObject(json); + const signedRequestsArrayUnknown = verifyObjectProperty( + obj, + "signedRequests", + (v) => { + if (!Array.isArray(v)) { + throw new Error("signedRequests must be an array"); + } + return v as unknown[]; + }, + ); + const signedRequests: SsaSignRequestsResponseItem[] = + signedRequestsArrayUnknown.map((entry) => { + const entryObj = verifyObject(entry); + const key = verifyObjectProperty(entryObj, "key", verifyString); + const url = verifyObjectProperty(entryObj, "url", verifyString); + return { key, url }; + }); + return { signedRequests }; +} + +interface SsaListFilesObject { + key: string; + size: number; + lastModified: string; +} +interface SsaListFilesResponse { + prefix: string; + objects: SsaListFilesObject[]; +} + +function parseListFilesResponse(json: unknown): SsaListFilesResponse { + const obj = verifyObject(json); + const prefix = verifyObjectProperty(obj, "prefix", verifyString); + const objectsArray = verifyObjectProperty( + obj, + "objects", + (x) => x as unknown as any[], + ); + const objects: SsaListFilesObject[] = objectsArray.map((entry) => { + const e = verifyObject(entry); + const key = verifyObjectProperty(e, "key", verifyString); + const sizeStr = verifyObjectProperty(e, "size", (v) => { + if (typeof v !== "number") throw new Error("Expected number"); + return v; + }); + const lastModified = verifyObjectProperty(e, "lastModified", verifyString); + return { key, size: sizeStr, lastModified }; + }); + return { prefix, objects }; +} + +export class SsaS3KvStore implements KvStore { + private readonly fetchOkToWorker: FetchOk; + private readonly credentialsProvider: SsaCredentialsProvider; + private readonly workerOrigin: string; + private readonly datasetBasePrefix: string; + private readonly displayBaseUrl: string; + + private authenticatePromise: Promise | undefined; + + constructor( + public readonly sharedKvStoreContext: SharedKvStoreContextBase, + workerOrigin: string, + datasetBasePrefix: string, + displayBaseUrl: string, + ) { + this.workerOrigin = workerOrigin; + this.datasetBasePrefix = datasetBasePrefix; + this.displayBaseUrl = displayBaseUrl; + this.credentialsProvider = + sharedKvStoreContext.credentialsManager.getCredentialsProvider( + "ssa", + workerOrigin, + ) as unknown as SsaCredentialsProvider; + this.fetchOkToWorker = fetchOkWithOAuth2CredentialsAdapter( + this.credentialsProvider, + ); + } + + getUrl(path: string): string { + return pipelineUrlJoin(this.displayBaseUrl, path); + } + + get supportsOffsetReads() { + return true; + } + + get supportsSuffixReads() { + return true; + } + + private async ensureAuthenticated( + signal?: AbortSignal, + ): Promise { + if (this.authenticatePromise === undefined) { + this.authenticatePromise = this.performAuthenticate(signal).catch((e) => { + // Clear cached promise on failure to allow retry. + this.authenticatePromise = undefined; + throw e; + }); + } + return this.authenticatePromise; + } + + private async performAuthenticate( + signal?: AbortSignal, + ): Promise { + using _span = new ProgressSpan(new MultiConsumerProgressListener(), { + message: `Connecting to SSA worker at ${this.workerOrigin}`, + }); + try { + const response = await this.fetchOkToWorker( + `${this.workerOrigin}/authenticate`, + { + method: "POST", + signal, + headers: { "content-type": "application/json" }, + body: "{}", + }, + ); + const result = parseAuthenticateResponse(await response.json()); + return result; + } catch (e) { + if (e instanceof HttpError) { + if (e.status === 401 || e.status === 403) { + throw new Error( + `Failed to authenticate with SSA service at ${this.workerOrigin}: access denied (${e.status}).`, + ); + } + } + throw new Error( + `Failed to connect to SSA service at ${this.workerOrigin}: ${(e as Error).message}`, + { cause: e }, + ); + } + } + + private async signSingleUrl( + fullKey: string, + type: "GET" | "PUT" | "HEAD" | "DELETE", + signal?: AbortSignal, + ): Promise { + const { endpoints } = await this.ensureAuthenticated(signal); + try { + const response = await this.fetchOkToWorker( + `${this.workerOrigin}${endpoints.signRequests}`, + { + method: "POST", + signal, + headers: { "content-type": "application/json" }, + body: JSON.stringify({ + requests: [{ action: type, key: fullKey }], + } satisfies SsaSignRequestBody), + }, + ); + const { signedRequests } = parseSignRequestsResponse( + await response.json(), + ); + if (signedRequests.length !== 1) { + throw new Error( + `SSA /sign-requests returned ${signedRequests.length} entries, expected 1 for key ${JSON.stringify(fullKey)}`, + ); + } + return signedRequests[0].url; + } catch (e) { + if (e instanceof HttpError && (e.status === 401 || e.status === 403)) { + throw new Error( + `Permission denied by SSA while signing ${JSON.stringify(fullKey)} (HTTP ${e.status}).`, + { cause: e }, + ); + } + throw new Error( + `Failed to sign request for ${JSON.stringify(fullKey)} via SSA: ${(e as Error).message}`, + { cause: e }, + ); + } + } + + async write(key: string, value: ArrayBuffer): Promise { + const fullKey = joinPath(this.datasetBasePrefix, key); + const url = await this.signSingleUrl(fullKey, "PUT"); + try { + await fetchOk(url, { + method: "PUT", + body: value, + }); + } catch (e) { + if (e instanceof HttpError && (e.status === 401 || e.status === 403)) { + throw new Error( + `Permission denied by SSA while writing ${this.getUrl(key)} (HTTP ${e.status}).`, + { cause: e }, + ); + } + throw new Error( + `Failed to write ${this.getUrl(key)} via SSA-signed URL: ${(e as Error).message}`, + { cause: e }, + ); + } + } + + async delete(key: string): Promise { + const fullKey = joinPath(this.datasetBasePrefix, key); + const url = await this.signSingleUrl(fullKey, "DELETE"); + try { + await fetchOk(url, { + method: "DELETE", + }); + } catch (e) { + if (e instanceof HttpError) { + if (e.status === 404) { + // Deleting a non-existent object is not considered an error in S3. + return; + } + if (e.status === 401 || e.status === 403) { + throw new Error( + `Permission denied by SSA while deleting ${this.getUrl(key)} (HTTP ${e.status}).`, + { cause: e }, + ); + } + } + throw new Error( + `Failed to delete ${this.getUrl(key)} via SSA-signed URL: ${(e as Error).message}`, + { cause: e }, + ); + } + } + + async stat( + key: string, + options: StatOptions, + ): Promise { + const fullKey = joinPath(this.datasetBasePrefix, key); + const url = await this.signSingleUrl(fullKey, "HEAD", options.signal); + try { + const response = await fetchOk(url, { + method: "HEAD", + signal: options.signal, + progressListener: options.progressListener, + }); + const contentLength = response.headers.get("content-length"); + let totalSize: number | undefined; + if (contentLength !== null) { + const n = Number(contentLength); + if (!Number.isFinite(n) || n < 0) { + throw new Error( + `Invalid content-length returned by S3 for ${JSON.stringify(fullKey)}: ${JSON.stringify(contentLength)}`, + ); + } + totalSize = n; + } + return { totalSize }; + } catch (e) { + if (e instanceof HttpError && e.status === 404) { + if (options.throwIfMissing === true) { + throw new Error(`${this.getUrl(key)} not found`, { cause: e }); + } + return undefined; + } + throw new Error( + `Failed to stat ${this.getUrl(key)} via SSA-signed URL: ${(e as Error).message}`, + { cause: e }, + ); + } + } + + async read( + key: string, + options: DriverReadOptions, + ): Promise { + const fullKey = joinPath(this.datasetBasePrefix, key); + const url = await this.signSingleUrl(fullKey, "GET", options.signal); + + // Construct Range header based on options.byteRange for efficient reads. + let rangeHeader: string | undefined; + const { byteRange } = options; + if (byteRange !== undefined) { + if ("suffixLength" in byteRange) { + // For suffix reads we must know total size; issue HEAD first then compute exact range. + const statResponse = await this.stat(key, { signal: options.signal }); + if ( + statResponse === undefined || + statResponse.totalSize === undefined + ) { + throw new Error( + `Failed to determine total size of ${this.getUrl(key)} in order to fetch suffix bytes`, + ); + } + const total = statResponse.totalSize; + const len = Math.min(byteRange.suffixLength, total); + const start = total - len; + rangeHeader = `bytes=${start}-${total - 1}`; + } else { + if (byteRange.length === 0) { + // Request 1 byte and discard per HTTP semantics for 0-length workaround. + const start = Math.max(byteRange.offset - 1, 0); + rangeHeader = `bytes=${start}-${start}`; + } else { + rangeHeader = `bytes=${byteRange.offset}-${byteRange.offset + byteRange.length - 1}`; + } + } + } + + try { + const response = await fetchOk(url, { + method: "GET", + signal: options.signal, + progressListener: options.progressListener, + headers: rangeHeader ? { range: rangeHeader } : undefined, + cache: rangeHeader + ? navigator.userAgent.indexOf("Chrome") !== -1 + ? "no-store" + : "default" + : undefined, + }); + + // Interpret response similar to http/read.ts logic. + let offset: number | undefined; + let length: number | undefined; + let totalSize: number | undefined; + if (response.status === 206) { + const contentRange = response.headers.get("content-range"); + if (contentRange !== null) { + const m = contentRange.match(/bytes ([0-9]+)-([0-9]+)\/(\*|[0-9]+)/); + if (m === null) { + throw new Error( + `Invalid content-range header from S3 for ${this.getUrl(key)}: ${JSON.stringify(contentRange)}`, + ); + } + offset = Number(m[1]); + const endPos = Number(m[2]); + length = endPos - offset + 1; + if (m[3] !== "*") totalSize = Number(m[3]); + } else if (byteRange !== undefined) { + // Some servers omit content-range; use requested range info where possible. + if ("suffixLength" in byteRange) { + // Already computed via HEAD. + const statResponse = await this.stat(key, { + signal: options.signal, + }); + totalSize = statResponse?.totalSize; + if (totalSize === undefined) { + throw new Error("Missing total size for suffix read"); + } + const len = Math.min(byteRange.suffixLength, totalSize); + offset = totalSize - len; + length = len; + } else { + if (byteRange.length === 0) { + offset = byteRange.offset; + length = 0; + // Return empty body for zero-length reads. + return { + response: new Response(new Uint8Array(0)), + offset, + length, + totalSize, + }; + } else { + offset = byteRange.offset; + length = byteRange.length; + } + } + } + } else { + const cl = response.headers.get("content-length"); + if (cl !== null) { + const n = Number(cl); + if (!Number.isFinite(n) || n < 0) { + throw new Error( + `Invalid content-length header for ${this.getUrl(key)}: ${JSON.stringify(cl)}`, + ); + } + length = n; + totalSize = n; + offset = 0; + } + } + if (offset === undefined) offset = 0; + return { response, offset, length, totalSize }; + } catch (e) { + if (e instanceof HttpError) { + if (e.status === 404) { + if (options.throwIfMissing === true) { + throw new Error(`${this.getUrl(key)} not found`, { cause: e }); + } + return undefined; + } + if (e.status === 401 || e.status === 403) { + throw new Error( + `Permission denied while reading ${this.getUrl(key)} (HTTP ${e.status}).`, + { cause: e }, + ); + } + } + throw new Error( + `Failed to read ${this.getUrl(key)} via SSA-signed URL: ${(e as Error).message}`, + { cause: e }, + ); + } + } + + async list( + prefix: string, + options: { signal?: AbortSignal } = {}, + ): Promise { + const fullPrefix = joinPath(this.datasetBasePrefix, prefix); + const { endpoints } = await this.ensureAuthenticated(options.signal); + try { + const response = await this.fetchOkToWorker( + `${this.workerOrigin}${endpoints.listFiles}`, + { + method: "POST", + signal: options.signal, + headers: { "content-type": "application/json" }, + body: JSON.stringify({ prefix: fullPrefix }), + }, + ); + const parsed = parseListFilesResponse(await response.json()); + // Compute immediate children relative to the requested prefix. + const childDirSet = new Set(); + const childFileSet = new Set(); + for (const obj of parsed.objects) { + const key = obj.key; + if (!key.startsWith(parsed.prefix)) continue; + const remainder = key.substring(parsed.prefix.length); + if (remainder === "") continue; + const slash = remainder.indexOf("/"); + if (slash === -1) { + childFileSet.add(remainder); + } else { + childDirSet.add(remainder.substring(0, slash + 1)); + } + } + return { + directories: Array.from(childDirSet), + entries: Array.from(childFileSet).map((k) => ({ key: k })), + }; + } catch (e) { + if (e instanceof HttpError && (e.status === 401 || e.status === 403)) { + throw new Error( + `Permission denied by SSA while listing ${this.getUrl(prefix)} (HTTP ${e.status}).`, + { cause: e }, + ); + } + throw new Error( + `Failed to list files for ${this.getUrl(prefix)} via SSA: ${(e as Error).message}`, + { cause: e }, + ); + } + } +} diff --git a/src/kvstore/ssa_s3/url_utils.ts b/src/kvstore/ssa_s3/url_utils.ts new file mode 100644 index 0000000000..ef1d68012e --- /dev/null +++ b/src/kvstore/ssa_s3/url_utils.ts @@ -0,0 +1,49 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +export const SSA_SCHEME_PREFIX = "ssa+"; + +export function ensureSsaHttpsUrl(url: string): URL { + if (!url.startsWith("ssa+https://")) { + throw new Error( + `Invalid URL ${JSON.stringify(url)}: expected ssa+https scheme`, + ); + } + const httpUrl = url.substring(SSA_SCHEME_PREFIX.length); + const parsed = new URL(httpUrl); + if (parsed.hash) throw new Error("Fragment not supported in ssa+https URLs"); + if (parsed.username || parsed.password) + throw new Error( + "Basic auth credentials are not supported in ssa+https URLs", + ); + return parsed; +} + +export function getWorkerOriginAndDatasetPrefix(parsed: URL): { + workerOrigin: string; + datasetBasePrefix: string; +} { + const workerOrigin = parsed.origin; + const datasetBasePrefix = decodeURIComponent( + parsed.pathname.replace(/^\//, ""), + ); + return { workerOrigin, datasetBasePrefix }; +} + +export function getDisplayBase(url: string): string { + const parsed = ensureSsaHttpsUrl(url); + return `${SSA_SCHEME_PREFIX}${parsed.origin}/`; +} diff --git a/src/layer/image/index.ts b/src/layer/image/index.ts index 241b69853e..c44cb70ddb 100644 --- a/src/layer/image/index.ts +++ b/src/layer/image/index.ts @@ -34,7 +34,10 @@ import { UserLayer, } from "#src/layer/index.js"; import type { LoadedDataSubsource } from "#src/layer/layer_data_source.js"; +import { registerVoxelLayerControls } from "#src/layer/vox/controls.js"; +import { UserLayerWithVoxelEditingMixin } from "#src/layer/vox/index.js"; import { Overlay } from "#src/overlay.js"; +import type { RenderLayerTransformOrError } from "#src/render_coordinate_transform.js"; import { getChannelSpace } from "#src/render_coordinate_transform.js"; import { RenderScaleHistogram, @@ -48,21 +51,25 @@ import { ImageRenderLayer, } from "#src/sliceview/volume/image_renderlayer.js"; import { trackableAlphaValue } from "#src/trackable_alpha.js"; -import { trackableBlendModeValue } from "#src/trackable_blend.js"; +import { BLEND_MODES, trackableBlendModeValue } from "#src/trackable_blend.js"; import { TrackableBoolean } from "#src/trackable_boolean.js"; import { trackableFiniteFloat } from "#src/trackable_finite_float.js"; import type { WatchableValueInterface } from "#src/trackable_value.js"; import { makeCachedDerivedWatchableValue, makeCachedLazyDerivedWatchableValue, + makeDerivedWatchableValue, registerNested, + TrackableValue, WatchableValue, } from "#src/trackable_value.js"; import { UserLayerWithAnnotationsMixin } from "#src/ui/annotations.js"; +import { registerVoxelTools } from "#src/ui/voxel_annotations.js"; import { setClipboard } from "#src/util/clipboard.js"; import type { Borrowed } from "#src/util/disposable.js"; import { makeValueOrError } from "#src/util/error.js"; -import { verifyOptionalObjectProperty } from "#src/util/json.js"; +import { verifyFloat01, verifyOptionalObjectProperty } from "#src/util/json.js"; +import { TrackableEnum } from "#src/util/trackable_enum.js"; import { trackableShaderModeValue, VolumeRenderingModes, @@ -119,7 +126,9 @@ export interface ImageLayerSelectionState extends UserLayerSelectionState { value: any; } -const Base = UserLayerWithAnnotationsMixin(UserLayer); +const Base = UserLayerWithVoxelEditingMixin( + UserLayerWithAnnotationsMixin(UserLayer), +); const [ volumeRenderingDepthSamplesOriginLogScale, volumeRenderingDepthSamplesMaxLogScale, @@ -188,6 +197,48 @@ export class ImageUserLayer extends Base { }; } + _createVoxelRenderLayer( + source: MultiscaleVolumeChunkSource, + transform: WatchableValueInterface, + ): ImageRenderLayer { + const wrappedFragmentMain = makeDerivedWatchableValue( + (originalShader: string) => ` +#define main userMain +${originalShader} +#undef main + +void main() { + if (toRaw(getDataValue()) == 0n) { + emitTransparent(); + return; + } + userMain(); +} +`, + this.fragmentMain, + ); + this.registerDisposer(wrappedFragmentMain); + + const shaderControlState = new ShaderControlState( + wrappedFragmentMain, + this.shaderControlState.dataContext, + this.channelCoordinateSpaceCombiner, + ); + this.registerDisposer(shaderControlState); + + return new ImageRenderLayer(source, { + opacity: new TrackableValue(1.0, verifyFloat01), + blendMode: new TrackableEnum(BLEND_MODES, BLEND_MODES.ADDITIVE), + shaderControlState: shaderControlState, + shaderError: this.shaderError, + transform: transform, + renderScaleTarget: this.sliceViewRenderScaleTarget, + renderScaleHistogram: this.sliceViewRenderScaleHistogram, + localPosition: this.localPosition, + channelCoordinateSpace: this.channelCoordinateSpace, + }); + } + addCoordinateSpace( coordinateSpace: WatchableValueInterface, ) { @@ -246,21 +297,20 @@ export class ImageUserLayer extends Base { } dataType = volume.dataType; loadedSubsource.activate((context) => { - loadedSubsource.addRenderLayer( - new ImageRenderLayer(volume, { - opacity: this.opacity, - blendMode: this.blendMode, - shaderControlState: this.shaderControlState, - shaderError: this.shaderError, - transform: loadedSubsource.getRenderLayerTransform( - this.channelCoordinateSpace, - ), - renderScaleTarget: this.sliceViewRenderScaleTarget, - renderScaleHistogram: this.sliceViewRenderScaleHistogram, - localPosition: this.localPosition, - channelCoordinateSpace: this.channelCoordinateSpace, - }), - ); + const imageRenderLayer = new ImageRenderLayer(volume, { + opacity: this.opacity, + blendMode: this.blendMode, + shaderControlState: this.shaderControlState, + shaderError: this.shaderError, + transform: loadedSubsource.getRenderLayerTransform( + this.channelCoordinateSpace, + ), + renderScaleTarget: this.sliceViewRenderScaleTarget, + renderScaleHistogram: this.sliceViewRenderScaleHistogram, + localPosition: this.localPosition, + channelCoordinateSpace: this.channelCoordinateSpace, + }); + loadedSubsource.addRenderLayer(imageRenderLayer); const volumeRenderLayer = context.registerDisposer( new VolumeRenderingRenderLayer({ gain: this.volumeRenderingGain, @@ -290,6 +340,18 @@ export class ImageUserLayer extends Base { }, this.volumeRenderingMode), ); this.shaderError.changed.dispatch(); + context.registerDisposer( + registerNested((context, isWritable) => { + this.initializeVoxelEditingForSubsource( + loadedSubsource, + imageRenderLayer, + isWritable, + ); + context.registerDisposer(() => { + this.deinitializeVoxelEditingForSubsource(loadedSubsource); + }); + }, loadedSubsource.writable), + ); }); } this.dataType.value = dataType; @@ -588,6 +650,8 @@ class ShaderCodeOverlay extends Overlay { } registerLayerType(ImageUserLayer); +registerVoxelTools(ImageUserLayer); +registerVoxelLayerControls(ImageUserLayer); registerVolumeLayerType(VolumeType.IMAGE, ImageUserLayer); // Use ImageUserLayer as a fallback layer type if there is a `volume` subsource. registerLayerTypeDetector((subsource) => { diff --git a/src/layer/index.ts b/src/layer/index.ts index f4ea769103..3a1c6b61c4 100644 --- a/src/layer/index.ts +++ b/src/layer/index.ts @@ -32,6 +32,7 @@ import { TrackableCoordinateSpace, } from "#src/coordinate_transform.js"; import type { + CommonCreationMetadata, DataSourceRegistry, DataSourceSpecification, DataSubsource, @@ -64,6 +65,7 @@ import type { VisibilityTrackedRenderLayer, } from "#src/renderlayer.js"; import type { VolumeType } from "#src/sliceview/volume/base.js"; +import { MultiscaleVolumeChunkSource } from "#src/sliceview/volume/frontend.js"; import { StatusMessage } from "#src/status.js"; import { TrackableBoolean } from "#src/trackable_boolean.js"; import type { @@ -85,6 +87,7 @@ import { LayerToolBinder, SelectedLegacyTool } from "#src/ui/tool.js"; import { gatherUpdate } from "#src/util/array.js"; import type { Borrowed, Owned } from "#src/util/disposable.js"; import { invokeDisposers, RefCounted } from "#src/util/disposable.js"; +import { vec3 } from "#src/util/geom.js"; import { emptyToUndefined, parseArray, @@ -846,6 +849,74 @@ export class ManagedUserLayer extends RefCounted { this.layerChanged.dispatch(); } + getCreationMetadata(): CommonCreationMetadata | undefined { + const userLayer = this.layer; + if (userLayer === null) return undefined; + + for (const dataSource of userLayer.dataSources) { + const loadState = dataSource.loadState; + if (loadState === undefined || loadState.error !== undefined) continue; + + for (const subsource of loadState.subsources) { + if (!subsource.enabled) continue; + const { volume } = subsource.subsourceEntry.subsource; + + if (volume instanceof MultiscaleVolumeChunkSource) { + const { modelTransform } = loadState.dataSource; + const modelSpace = modelTransform.outputSpace; + const { rank } = modelSpace; + + const identityOptions = { + displayRank: rank, + multiscaleToViewTransform: new Float32Array(rank * rank).fill(0), + modelChannelDimensionIndices: [], + }; + for (let i = 0; i < rank; ++i) + identityOptions.multiscaleToViewTransform[i * rank + i] = 1; + + const scales = volume.getSources(identityOptions)[0]; + if (!scales || scales.length === 0) continue; + + const highResSource = scales[0]; + const shape = Array.from( + highResSource.chunkSource.spec.upperVoxelBound, + ); + const highResTransform = highResSource.chunkToMultiscaleTransform; + const voxelSize = new Array(rank); + for (let i = 0; i < rank; ++i) { + voxelSize[i] = highResTransform[i * (rank + 1) + i]; + } + + const numScales = scales.length; + + const downsamplingFactor = vec3.fromValues(1, 1, 1); + if (scales.length > 1) { + const lowResSource = scales[1]; + const lowResTransform = lowResSource.chunkToMultiscaleTransform; + for (let i = 0; i < rank; ++i) { + const highResScale = highResTransform[i * (rank + 1) + i]; + const lowResScale = lowResTransform[i * (rank + 1) + i]; + if (highResScale !== 0) { + downsamplingFactor[i] = Math.round(lowResScale / highResScale); + } + } + } + + return { + shape, + dataType: volume.dataType, + voxelSize, + voxelUnit: modelSpace.units[0] || "", + numScales, + downsamplingFactor: Array.from(downsamplingFactor), + name: `${this.name}_copy`, + }; + } + } + } + return undefined; + } + disposed() { this.layer = null; super.disposed(); @@ -1137,6 +1208,7 @@ export class MouseSelectionState implements PickState { unsnappedPosition: Float32Array = kEmptyFloat32Vec; active = false; displayDimensions: DisplayDimensions | undefined = undefined; + planeNormal: vec3 | undefined = undefined; pickedRenderLayer: RenderLayer | null = null; pickedValue = 0n; pickedOffset = 0; diff --git a/src/layer/layer_data_source.ts b/src/layer/layer_data_source.ts index 63d07a7746..0663e33cf2 100644 --- a/src/layer/layer_data_source.ts +++ b/src/layer/layer_data_source.ts @@ -36,7 +36,10 @@ import { makeEmptyDataSourceSpecification } from "#src/datasource/index.js"; import type { UserLayer } from "#src/layer/index.js"; import { getWatchableRenderLayerTransform } from "#src/render_coordinate_transform.js"; import type { RenderLayer } from "#src/renderlayer.js"; +import { StatusMessage } from "#src/status.js"; +import { TrackableBoolean } from "#src/trackable_boolean.js"; import type { WatchableValueInterface } from "#src/trackable_value.js"; +import { DatasetCreationDialog } from "#src/ui/dataset_creation.js"; import { arraysEqual } from "#src/util/array.js"; import type { Borrowed, Owned } from "#src/util/disposable.js"; import { disposableOnce, RefCounted } from "#src/util/disposable.js"; @@ -63,6 +66,7 @@ export function parseDataSubsourceSpecificationFromJson( verifyObject(json); return { enabled: verifyOptionalObjectProperty(json, "enabled", verifyBoolean), + writable: verifyOptionalObjectProperty(json, "writable", verifyBoolean), }; } @@ -108,7 +112,8 @@ export function layerDataSourceSpecificationFromJson( } function dataSubsourceSpecificationToJson(spec: DataSubsourceSpecification) { - return spec.enabled; + const { enabled, writable } = spec; + return { enabled, writable }; } export function layerDataSourceSpecificationToJson( @@ -146,6 +151,7 @@ export class LoadedDataSubsource { subsourceToModelSubspaceTransform: Float32Array; modelSubspaceDimensionIndices: number[]; enabled: boolean; + writable: TrackableBoolean; activated: RefCounted | undefined = undefined; guardValues: any[] = []; messages = new MessageList(); @@ -178,6 +184,13 @@ export class LoadedDataSubsource { ), } = subsourceEntry; this.enabled = enabled; + this.writable = new TrackableBoolean( + subsourceSpec?.writable ?? false, + false, + ); + this.writable.changed.add( + loadedDataSource.layer.dataSourcesChanged.dispatch, + ); this.subsourceToModelSubspaceTransform = subsourceToModelSubspaceTransform; this.modelSubspaceDimensionIndices = modelSubspaceDimensionIndices; this.isActiveChanged.add( @@ -451,6 +464,23 @@ export class LayerDataSource extends RefCounted { if (refCounted.wasDisposed) return; this.loadState_ = { error }; this.messages.clearMessages(); + + const status = new StatusMessage(/*delay=*/ false, /*modal=*/ true); + status.element.innerHTML = `Dataset not found at ${this.spec_.url}. `; + const createButton = document.createElement("button"); + createButton.textContent = "Create Dataset"; + createButton.addEventListener("click", () => { + status.dispose(); + new DatasetCreationDialog(this.layer.manager, this.spec_.url); + }); + const cancelButton = document.createElement("button"); + cancelButton.textContent = "Cancel"; + cancelButton.addEventListener("click", () => { + status.dispose(); + }); + status.element.appendChild(createButton); + status.element.appendChild(cancelButton); + this.messages.addMessage({ severity: MessageSeverity.error, message: formatErrorMessage(error), @@ -491,6 +521,7 @@ export class LayerDataSource extends RefCounted { loadedSubsource.enabled !== defaultEnabledValue ? loadedSubsource.enabled : undefined, + writable: loadedSubsource.writable.value ? true : undefined, }, ]; }), diff --git a/src/layer/segmentation/index.ts b/src/layer/segmentation/index.ts index 43a21b05cd..6bf727292c 100644 --- a/src/layer/segmentation/index.ts +++ b/src/layer/segmentation/index.ts @@ -35,12 +35,15 @@ import type { LoadedDataSubsource } from "#src/layer/layer_data_source.js"; import { layerDataSourceSpecificationFromJson } from "#src/layer/layer_data_source.js"; import * as json_keys from "#src/layer/segmentation/json_keys.js"; import { registerLayerControls } from "#src/layer/segmentation/layer_controls.js"; +import { registerVoxelLayerControls } from "#src/layer/vox/controls.js"; +import { UserLayerWithVoxelEditingMixin } from "#src/layer/vox/index.js"; import { MeshLayer, MeshSource, MultiscaleMeshLayer, MultiscaleMeshSource, } from "#src/mesh/frontend.js"; +import type { RenderLayerTransformOrError } from "#src/render_coordinate_transform.js"; import { RenderScaleHistogram, trackableRenderScaleTarget, @@ -92,6 +95,7 @@ import type { WatchableValueInterface, } from "#src/trackable_value.js"; import { + registerNested, IndirectTrackableValue, IndirectWatchableValue, makeCachedDerivedWatchableValue, @@ -106,6 +110,7 @@ import { SegmentDisplayTab } from "#src/ui/segment_list.js"; import { registerSegmentSelectTools } from "#src/ui/segment_select_tools.js"; import { registerSegmentSplitMergeTools } from "#src/ui/segment_split_merge_tools.js"; import { DisplayOptionsTab } from "#src/ui/segmentation_display_options_tab.js"; +import { registerVoxelTools } from "#src/ui/voxel_annotations.js"; import { Uint64Map } from "#src/uint64_map.js"; import { Uint64OrderedSet } from "#src/uint64_ordered_set.js"; import { Uint64Set } from "#src/uint64_set.js"; @@ -579,7 +584,9 @@ interface SegmentationActionContext extends LayerActionContext { segmentationToggleSegmentState?: boolean | undefined; } -const Base = UserLayerWithAnnotationsMixin(UserLayer); +const Base = UserLayerWithVoxelEditingMixin( + UserLayerWithAnnotationsMixin(UserLayer), +); export class SegmentationUserLayer extends Base { sliceViewRenderScaleHistogram = new RenderScaleHistogram(); sliceViewRenderScaleTarget = trackableRenderScaleTarget(1); @@ -606,6 +613,19 @@ export class SegmentationUserLayer extends Base { ); }; + _createVoxelRenderLayer( + source: MultiscaleVolumeChunkSource, + transform: WatchableValueInterface, + ): SegmentationRenderLayer { + return new SegmentationRenderLayer(source, { + ...this.displayState, + transform: transform, + renderScaleTarget: this.sliceViewRenderScaleTarget, + renderScaleHistogram: this.sliceViewRenderScaleHistogram, + localPosition: this.localPosition, + }); + } + filterBySegmentLabel = (id: bigint) => { const augmented = augmentSegmentId(this.displayState, id); const { label } = augmented; @@ -774,19 +794,28 @@ export class SegmentationUserLayer extends Base { continue; } hasVolume = true; - loadedSubsource.activate( - () => - loadedSubsource.addRenderLayer( - new SegmentationRenderLayer(volume, { - ...this.displayState, - transform: loadedSubsource.getRenderLayerTransform(), - renderScaleTarget: this.sliceViewRenderScaleTarget, - renderScaleHistogram: this.sliceViewRenderScaleHistogram, - localPosition: this.localPosition, - }), - ), - this.displayState.segmentationGroupState.value, - ); + loadedSubsource.activate((context) => { + const segmentationRenderLayer = new SegmentationRenderLayer(volume, { + ...this.displayState, + transform: loadedSubsource.getRenderLayerTransform(), + renderScaleTarget: this.sliceViewRenderScaleTarget, + renderScaleHistogram: this.sliceViewRenderScaleHistogram, + localPosition: this.localPosition, + }); + loadedSubsource.addRenderLayer(segmentationRenderLayer); + context.registerDisposer( + registerNested((context, isWritable) => { + this.initializeVoxelEditingForSubsource( + loadedSubsource, + segmentationRenderLayer, + isWritable, + ); + context.registerDisposer(() => { + this.deinitializeVoxelEditingForSubsource(loadedSubsource); + }); + }, loadedSubsource.writable), + ); + }, this.displayState.segmentationGroupState.value); } else if (mesh !== undefined) { loadedSubsource.activate(() => { const displayState = { @@ -1391,7 +1420,8 @@ export class SegmentationUserLayer extends Base { } registerLayerControls(SegmentationUserLayer); - +registerVoxelTools(SegmentationUserLayer); +registerVoxelLayerControls(SegmentationUserLayer); registerLayerType(SegmentationUserLayer); registerVolumeLayerType(VolumeType.SEGMENTATION, SegmentationUserLayer); registerLayerTypeDetector((subsource) => { diff --git a/src/layer/vox/controls.ts b/src/layer/vox/controls.ts new file mode 100644 index 0000000000..e06d515de0 --- /dev/null +++ b/src/layer/vox/controls.ts @@ -0,0 +1,120 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { UserLayerConstructor } from "#src/layer/index.js"; +import { LayerActionContext } from "#src/layer/index.js"; +import type { UserLayerWithVoxelEditing } from "#src/layer/vox/index.js"; +import { observeWatchable } from "#src/trackable_value.js"; +import type { LayerControlDefinition } from "#src/widget/layer_control.js"; +import { registerLayerControl } from "#src/widget/layer_control.js"; +import { buttonLayerControl } from "#src/widget/layer_control_button.js"; +import { checkboxLayerControl } from "#src/widget/layer_control_checkbox.js"; +import { enumLayerControl } from "#src/widget/layer_control_enum.js"; +import { rangeLayerControl } from "#src/widget/layer_control_range.js"; + +export const VOXEL_LAYER_CONTROLS: LayerControlDefinition[] = + [ + { + label: "Brush size", + toolJson: { type: "vox-brush-size" }, + ...rangeLayerControl((layer) => ({ + value: layer.voxBrushRadius, + options: { min: 1, max: 64, step: 1 }, + })), + }, + { + label: "Eraser", + toolJson: { type: "vox-erase-mode" }, + ...checkboxLayerControl((layer) => layer.voxEraseMode), + }, + { + label: "Brush shape", + toolJson: { type: "vox-brush-shape" }, + ...enumLayerControl( + (layer: UserLayerWithVoxelEditing) => layer.voxBrushShape, + ), + }, + { + label: "Max fill voxels", + toolJson: { type: "vox-flood-max-voxels" }, + ...rangeLayerControl((layer) => ({ + value: layer.voxFloodMaxVoxels, + options: { min: 1, max: 1000000, step: 1000 }, + })), + }, + { + label: "Undo", + toolJson: { type: "vox-undo" }, + ...buttonLayerControl({ + text: "Undo", + onClick: (layer) => + layer.handleVoxAction("undo", new LayerActionContext()), + }), + }, + { + label: "Redo", + toolJson: { type: "vox-redo" }, + ...buttonLayerControl({ + text: "Redo", + onClick: (layer) => + layer.handleVoxAction("redo", new LayerActionContext()), + }), + }, + { + label: "Paint Value", + toolJson: { type: "vox-paint-value" }, + makeControl: (layer, context) => { + const control = document.createElement("input"); + control.type = "text"; + control.title = "Specify segment ID or intensity value to paint"; + control.addEventListener("change", () => { + try { + layer.setVoxelPaintValue(control.value); + } catch { + control.value = layer.paintValue.value.toString(); + } + }); + context.registerDisposer( + observeWatchable((value) => { + control.value = value.toString(); + }, layer.paintValue), + ); + control.value = layer.paintValue.value.toString(); + return { control, controlElement: control, parent: context }; + }, + activateTool: () => {}, + }, + { + label: "New Random Value", + toolJson: { type: "vox-random-value" }, + ...buttonLayerControl({ + text: "Random", + onClick: (layer) => + layer.handleVoxAction( + "randomize-paint-value", + new LayerActionContext(), + ), + }), + }, + ]; + +export function registerVoxelLayerControls( + layerType: UserLayerConstructor, +) { + for (const control of VOXEL_LAYER_CONTROLS) { + registerLayerControl(layerType, control); + } +} diff --git a/src/layer/vox/index.ts b/src/layer/vox/index.ts new file mode 100644 index 0000000000..026b2f372e --- /dev/null +++ b/src/layer/vox/index.ts @@ -0,0 +1,434 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { + LayerActionContext, + MouseSelectionState, +} from "#src/layer/index.js"; +import { UserLayer } from "#src/layer/index.js"; +import type { LoadedDataSubsource } from "#src/layer/layer_data_source.js"; +import { VoxToolTab } from "#src/layer/vox/tabs/tools.js"; +import type { + ChunkTransformParameters, + RenderLayerTransformOrError, +} from "#src/render_coordinate_transform.js"; +import { + getChunkPositionFromCombinedGlobalLocalPositions, + getChunkTransformParameters, +} from "#src/render_coordinate_transform.js"; +import type { + SliceViewBase, + SliceViewSourceOptions, + TransformedSource, +} from "#src/sliceview/base.js"; +import { DataType } from "#src/sliceview/base.js"; +import type { SliceViewRenderLayer } from "#src/sliceview/renderlayer.js"; +import type { MultiscaleVolumeChunkSource } from "#src/sliceview/volume/frontend.js"; +import type { ImageRenderLayer } from "#src/sliceview/volume/image_renderlayer.js"; +import type { SegmentationRenderLayer } from "#src/sliceview/volume/segmentation_renderlayer.js"; +import { TrackableBoolean } from "#src/trackable_boolean.js"; +import type { WatchableValueInterface } from "#src/trackable_value.js"; +import { + makeDerivedWatchableValue, + TrackableValue, + WatchableValue, +} from "#src/trackable_value.js"; +import type { UserLayerWithAnnotations } from "#src/ui/annotations.js"; +import { randomUint64 } from "#src/util/bigint.js"; +import { RefCounted } from "#src/util/disposable.js"; +import { vec3 } from "#src/util/geom.js"; +import { + parseUint64, + verifyFiniteFloat, + verifyInt, + verifyOptionalObjectProperty, +} from "#src/util/json.js"; +import { TrackableEnum } from "#src/util/trackable_enum.js"; +import { VoxelPreviewMultiscaleSource } from "#src/voxel_annotation/PreviewMultiscaleChunkSource.js"; +import type { VoxelEditControllerHost } from "#src/voxel_annotation/base.js"; +import { BrushShape } from "#src/voxel_annotation/base.js"; +import { VoxelEditController } from "#src/voxel_annotation/edit_controller.js"; + +const BRUSH_SIZE_JSON_KEY = "brushSize"; +const ERASE_MODE_JSON_KEY = "eraseMode"; +const BRUSH_SHAPE_JSON_KEY = "brushShape"; +const FLOOD_FILL_MAX_VOXELS_JSON_KEY = "floodFillMaxVoxels"; +const PAINT_VALUE_JSON_KEY = "paintValue"; + +const DATA_TYPE_BIT_INFO = { + [DataType.UINT8]: { bits: 8, signed: false }, + [DataType.INT8]: { bits: 8, signed: true }, + [DataType.UINT16]: { bits: 16, signed: false }, + [DataType.INT16]: { bits: 16, signed: true }, + [DataType.UINT32]: { bits: 32, signed: false }, + [DataType.INT32]: { bits: 32, signed: true }, + [DataType.UINT64]: { bits: 64, signed: false }, +}; + +export class VoxelEditingContext + extends RefCounted + implements VoxelEditControllerHost +{ + controller: VoxelEditController | undefined = undefined; + + private cachedChunkTransform: ChunkTransformParameters | undefined; + private cachedTransformGeneration: number = -1; + private cachedVoxelPosition: Float32Array = new Float32Array(3); + optimisticRenderLayer: + | ImageRenderLayer + | SegmentationRenderLayer + | undefined = undefined; + previewSource: VoxelPreviewMultiscaleSource | undefined = undefined; + + constructor( + public hostLayer: UserLayerWithVoxelEditing, + public primarySource: MultiscaleVolumeChunkSource, + public primaryRenderLayer: ImageRenderLayer | SegmentationRenderLayer, + public writable: boolean, + ) { + super(); + + if (!writable) return; + + this.previewSource = new VoxelPreviewMultiscaleSource( + this.hostLayer.manager.chunkManager, + primarySource, + ); + + const transform = primaryRenderLayer.transform; + + this.optimisticRenderLayer = this.hostLayer._createVoxelRenderLayer( + this.previewSource, + transform, + ); + + // since we only allow drawing at max res, we can lock the optimistic render layer to it + this.optimisticRenderLayer.filterVisibleSources = function* ( + this: SliceViewRenderLayer, + _sliceView: SliceViewBase, + sources: readonly TransformedSource[], + ): Iterable { + if (sources.length > 0) { + yield sources[0]; + } + }; + + this.hostLayer.addRenderLayer(this.optimisticRenderLayer); + + this.controller = new VoxelEditController(this); + } + + get rpc() { + return this.hostLayer.manager.chunkManager.rpc!; + } + + disposed() { + if (this.controller) this.controller.dispose(); + if (this.optimisticRenderLayer) + this.hostLayer.removeRenderLayer(this.optimisticRenderLayer); + super.disposed(); + } + + getVoxelPositionFromMouse( + mouseState: MouseSelectionState, + ): Float32Array | undefined { + const renderLayer = this.primaryRenderLayer; + const renderLayerTransform = renderLayer.transform.value; + if (renderLayerTransform.error !== undefined) { + return undefined; + } + + const transformGeneration = renderLayer.transform.changed.count; + if (this.cachedTransformGeneration !== transformGeneration) { + this.cachedChunkTransform = undefined; + try { + this.cachedChunkTransform = getChunkTransformParameters( + renderLayerTransform, + this.primarySource.getSources( + this.hostLayer.getIdentitySliceViewSourceOptions(), + )[0][0]!.chunkToMultiscaleTransform, + ); + this.cachedTransformGeneration = transformGeneration; + } catch (e) { + this.cachedTransformGeneration = -1; + console.error("Error computing chunk transform parameters:", e); + return undefined; + } + } + + const chunkTransform = this.cachedChunkTransform; + if (chunkTransform === undefined) return undefined; + + if ( + this.cachedVoxelPosition.length !== + chunkTransform.modelTransform.unpaddedRank + ) { + this.cachedVoxelPosition = new Float32Array( + chunkTransform.modelTransform.unpaddedRank, + ); + } + + const ok = getChunkPositionFromCombinedGlobalLocalPositions( + this.cachedVoxelPosition, + mouseState.unsnappedPosition, + this.hostLayer.localPosition.value, + chunkTransform.layerRank, + chunkTransform.combinedGlobalLocalToChunkTransform, + ); + if (!ok) return undefined; + return this.cachedVoxelPosition; + } + + transformGlobalToVoxelNormal(globalNormal: vec3): vec3 { + const chunkTransform = this.cachedChunkTransform; + if (chunkTransform === undefined) + throw new Error("Chunk transform not computed"); + const { modelTransform, layerToChunkTransform, layerRank } = chunkTransform; + const { globalToRenderLayerDimensions } = modelTransform; + const globalRank = globalToRenderLayerDimensions.length; + const voxelNormal = vec3.create(); + + for (let chunkDim = 0; chunkDim < 3; ++chunkDim) { + let sum = 0; + for ( + let globalDim = 0; + globalDim < Math.min(globalRank, 3); + ++globalDim + ) { + const layerDim = globalToRenderLayerDimensions[globalDim]; + if (layerDim !== -1) { + sum += + layerToChunkTransform[chunkDim + layerDim * (layerRank + 1)] * + globalNormal[globalDim]; + } + } + voxelNormal[chunkDim] = sum; + } + vec3.normalize(voxelNormal, voxelNormal); + return voxelNormal; + } +} + +export declare abstract class UserLayerWithVoxelEditing extends UserLayer { + isEditable: WatchableValue; + + voxBrushRadius: TrackableValue; + voxEraseMode: TrackableBoolean; + voxBrushShape: TrackableEnum; + voxFloodMaxVoxels: TrackableValue; + paintValue: TrackableValue; + + editingContexts: Map; + + abstract _createVoxelRenderLayer( + source: MultiscaleVolumeChunkSource, + transform: WatchableValueInterface, + ): ImageRenderLayer | SegmentationRenderLayer; + abstract getVoxelPaintValue(erase: boolean): bigint; + abstract setVoxelPaintValue(value: any): bigint; + + initializeVoxelEditingForSubsource( + loadedSubsource: LoadedDataSubsource, + renderlayer: SegmentationRenderLayer | ImageRenderLayer, + ): void; + deinitializeVoxelEditingForSubsource( + loadedSubsource: LoadedDataSubsource, + ): void; + + getIdentitySliceViewSourceOptions(): SliceViewSourceOptions; + handleVoxAction(action: string, context: LayerActionContext): void; +} + +export function UserLayerWithVoxelEditingMixin< + TBase extends { new (...args: any[]): UserLayerWithAnnotations }, +>(Base: TBase) { + abstract class C extends Base implements UserLayerWithVoxelEditing { + editingContexts = new Map(); + isEditable = new WatchableValue(false); + paintValue = new TrackableValue(1n, (x) => parseUint64(x)); + + // Brush properties + voxBrushRadius = new TrackableValue(3, verifyInt); + voxEraseMode = new TrackableBoolean(false); + voxBrushShape = new TrackableEnum(BrushShape, BrushShape.DISK); + voxFloodMaxVoxels = new TrackableValue(10000, verifyFiniteFloat); + + constructor(...args: any[]) { + super(...args); + this.registerDisposer(() => { + for (const context of this.editingContexts.values()) { + context.dispose(); + } + this.editingContexts.clear(); + }); + this.voxBrushRadius.changed.add(this.specificationChanged.dispatch); + this.voxEraseMode.changed.add(this.specificationChanged.dispatch); + this.voxBrushShape.changed.add(this.specificationChanged.dispatch); + this.voxFloodMaxVoxels.changed.add(this.specificationChanged.dispatch); + this.paintValue.changed.add(this.specificationChanged.dispatch); + this.tabs.add("Draw", { + label: "Draw", + order: 20, + hidden: makeDerivedWatchableValue( + (editable) => !editable, + this.isEditable, + ), + getter: () => new VoxToolTab(this), + }); + } + + toJSON() { + const json = super.toJSON(); + json[BRUSH_SIZE_JSON_KEY] = this.voxBrushRadius.toJSON(); + json[ERASE_MODE_JSON_KEY] = this.voxEraseMode.toJSON(); + json[BRUSH_SHAPE_JSON_KEY] = this.voxBrushShape.toJSON(); + json[FLOOD_FILL_MAX_VOXELS_JSON_KEY] = this.voxFloodMaxVoxels.toJSON(); + json[PAINT_VALUE_JSON_KEY] = this.paintValue.toJSON(); + return json; + } + + restoreState(specification: any) { + super.restoreState(specification); + verifyOptionalObjectProperty(specification, BRUSH_SIZE_JSON_KEY, (v) => + this.voxBrushRadius.restoreState(v), + ); + verifyOptionalObjectProperty(specification, ERASE_MODE_JSON_KEY, (v) => + this.voxEraseMode.restoreState(v), + ); + verifyOptionalObjectProperty(specification, BRUSH_SHAPE_JSON_KEY, (v) => + this.voxBrushShape.restoreState(v), + ); + verifyOptionalObjectProperty( + specification, + FLOOD_FILL_MAX_VOXELS_JSON_KEY, + (v) => this.voxFloodMaxVoxels.restoreState(v), + ); + verifyOptionalObjectProperty(specification, PAINT_VALUE_JSON_KEY, (v) => + this.paintValue.restoreState(v), + ); + } + + getVoxelPaintValue(erase: boolean): bigint { + if (erase) return 0n; + return this.paintValue.value; + } + + setVoxelPaintValue(x: any) { + const editContext = this.editingContexts.values().next().value; + const dataType = editContext.primarySource.dataType; + let value: bigint; + + if (dataType === DataType.FLOAT32) { + const floatValue = parseFloat(String(x)); + value = BigInt(Math.round(floatValue)); + } else { + value = BigInt(x); + } + + const info = + DATA_TYPE_BIT_INFO[dataType as keyof typeof DATA_TYPE_BIT_INFO]; + if (!info) { + this.paintValue.value = value; + return value; + } + + const { bits, signed } = info; + const mask = (1n << BigInt(bits)) - 1n; + let truncated = value & mask; + + if (signed) { + const signBit = 1n << BigInt(bits - 1); + if ((truncated & signBit) !== 0n) { + truncated -= 1n << BigInt(bits); + } + } + + this.paintValue.value = truncated; + return truncated; + } + + abstract _createVoxelRenderLayer( + source: MultiscaleVolumeChunkSource, + transform: WatchableValueInterface, + ): ImageRenderLayer | SegmentationRenderLayer; + + initializeVoxelEditingForSubsource( + loadedSubsource: LoadedDataSubsource, + renderlayer: SegmentationRenderLayer | ImageRenderLayer, + writable: boolean = true, + ): void { + if (this.editingContexts.has(loadedSubsource)) return; + + const primarySource = loadedSubsource.subsourceEntry.subsource + .volume as MultiscaleVolumeChunkSource; + + const context = new VoxelEditingContext( + this, + primarySource, + renderlayer, + writable, + ); + this.editingContexts.set(loadedSubsource, context); + this.isEditable.value = writable; + } + + deinitializeVoxelEditingForSubsource(loadedSubsource: LoadedDataSubsource) { + const context = this.editingContexts.get(loadedSubsource); + if (context) { + context.dispose(); + this.editingContexts.delete(loadedSubsource); + } + if (this.editingContexts.size === 0 && this.isEditable.value) { + this.isEditable.value = false; + } + } + + getIdentitySliceViewSourceOptions(): SliceViewSourceOptions { + const rank = this.localCoordinateSpace.value.rank; + const displayRank = rank; + const multiscaleToViewTransform = new Float32Array(displayRank * rank); + for (let chunkDim = 0; chunkDim < rank; ++chunkDim) { + for (let displayDim = 0; displayDim < displayRank; ++displayDim) { + multiscaleToViewTransform[displayRank * chunkDim + displayDim] = + chunkDim === displayDim ? 1 : 0; + } + } + return { + displayRank, + multiscaleToViewTransform, + modelChannelDimensionIndices: [], + }; + } + + handleVoxAction(action: string, _context: LayerActionContext): void { + const firstContext = this.editingContexts.values().next().value; + if (!firstContext) return; + const controller = firstContext.controller; + switch (action) { + case "undo": + controller.undo(); + break; + case "redo": + controller.redo(); + break; + case "randomize-paint-value": + this.setVoxelPaintValue(randomUint64()); + break; + } + } + } + return C; +} diff --git a/src/layer/vox/style.css b/src/layer/vox/style.css new file mode 100644 index 0000000000..81d2ca9b14 --- /dev/null +++ b/src/layer/vox/style.css @@ -0,0 +1,44 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +.neuroglancer-vox-row label { + flex: 0 0 140px; + min-width: 0; + font-weight: 500; +} + +.neuroglancer-vox-status { + display: block; + flex: 1 1 100%; + min-width: 100%; + padding-top: 4px; +} + +.neuroglancer-vox-settings-tab button:hover, +.neuroglancer-vox-tools-tab button:hover { + filter: brightness(1.06); +} + +.neuroglancer-vox-settings-tab button:active, +.neuroglancer-vox-tools-tab button:active { + transform: translateY(1px); +} + +.neuroglancer-vox-toolbox { + display: flex; + flex-direction: column; + gap: 8px; +} diff --git a/src/layer/vox/tabs/tools.ts b/src/layer/vox/tabs/tools.ts new file mode 100644 index 0000000000..8cb2ed2692 --- /dev/null +++ b/src/layer/vox/tabs/tools.ts @@ -0,0 +1,124 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { VOXEL_LAYER_CONTROLS } from "#src/layer/vox/controls.js"; +import type { UserLayerWithVoxelEditing } from "#src/layer/vox/index.js"; +import { observeWatchable } from "#src/trackable_value.js"; +import { makeToolButton } from "#src/ui/tool.js"; +import { + ADOPT_VOXEL_LABEL_TOOL_ID, + BRUSH_TOOL_ID, + FLOODFILL_TOOL_ID, +} from "#src/ui/voxel_annotations.js"; +import type { VoxelEditController } from "#src/voxel_annotation/edit_controller.js"; +import { DependentViewWidget } from "#src/widget/dependent_view_widget.js"; +import { addLayerControlToOptionsTab } from "#src/widget/layer_control.js"; +import { Tab } from "#src/widget/tab_view.js"; + +export class VoxToolTab extends Tab { + constructor(public layer: UserLayerWithVoxelEditing) { + super(); + const { element } = this; + element.classList.add("neuroglancer-vox-tools-tab"); + + const toolbox = document.createElement("div"); + toolbox.className = "neuroglancer-vox-toolbox"; + + const toolsRow = document.createElement("div"); + toolsRow.className = "neuroglancer-vox-row"; + const toolsTitle = document.createElement("div"); + toolsTitle.textContent = "Tools"; + toolsTitle.style.fontWeight = "600"; + toolsRow.appendChild(toolsTitle); + + const toolButtonsContainer = document.createElement("div"); + toolButtonsContainer.style.display = "flex"; + toolButtonsContainer.style.gap = "8px"; + + const brushButton = makeToolButton(this, layer.toolBinder, { + toolJson: { type: BRUSH_TOOL_ID }, + label: "Brush", + }); + + const floodFillButton = makeToolButton(this, layer.toolBinder, { + toolJson: { type: FLOODFILL_TOOL_ID }, + label: "Flood Fill", + }); + + const pickButton = makeToolButton(this, layer.toolBinder, { + toolJson: { type: ADOPT_VOXEL_LABEL_TOOL_ID }, + label: "Seg Picker", + }); + + toolButtonsContainer.appendChild(brushButton); + toolButtonsContainer.appendChild(floodFillButton); + toolButtonsContainer.appendChild(pickButton); + toolsRow.appendChild(toolButtonsContainer); + toolbox.appendChild(toolsRow); + + for (const controlDef of VOXEL_LAYER_CONTROLS) { + const controlElement = addLayerControlToOptionsTab( + this, + this.layer, + this.visibility, + controlDef, + ); + + if ( + controlDef.toolJson.type === "vox:undo" || + controlDef.toolJson.type === "vox:redo" + ) { + const button = controlElement.querySelector("button"); + if (button) { + this.registerDisposer( + new DependentViewWidget( + { + changed: this.layer.layersChanged, + get value() { + return layer.editingContexts.values().next().value.controller; + }, + }, + ( + controller: VoxelEditController | undefined, + _parent, + context, + ) => { + if (!controller) { + button.disabled = true; + return; + } + const watchable = + controlDef.toolJson.type === "vox:undo" + ? controller.undoCount + : controller.redoCount; + context.registerDisposer( + observeWatchable((count) => { + button.disabled = count === 0; + }, watchable), + ); + }, + this.visibility, + ), + ); + } + } + + toolbox.appendChild(controlElement); + } + + element.appendChild(toolbox); + } +} diff --git a/src/main.ts b/src/main.ts index 8a06431aed..d440a0bb21 100644 --- a/src/main.ts +++ b/src/main.ts @@ -20,4 +20,21 @@ import { setupDefaultViewer } from "#src/ui/default_viewer_setup.js"; import "#src/util/google_tag_manager.js"; +(function maybeHandleOidcCallback() { + try { + // Only handle when running in a popup opened by our app and when code/state are present. + if (window.opener === null) return; + const params = new URLSearchParams(window.location.search); + const code = params.get("code"); + const state = params.get("state"); + if (code === null || state === null) return; + // Post message back to opener; opener will validate origin and state. + window.opener.postMessage({ type: "oidc_code", code, state }, "*"); + // Close this popup window. + window.close(); + } catch { + // Swallow errors; fall through to normal app startup. + } +})(); + setupDefaultViewer(); diff --git a/src/rendered_data_panel.ts b/src/rendered_data_panel.ts index fd8a82ff01..28ad4e6b9e 100644 --- a/src/rendered_data_panel.ts +++ b/src/rendered_data_panel.ts @@ -165,6 +165,9 @@ export abstract class RenderedDataPanel extends RenderedPanel { */ pickRequestPending = false; + private overlay_canvas: HTMLCanvasElement; + private overlay_context: CanvasRenderingContext2D; + private mouseStateForcer = () => this.blockOnPickRequest(); protected isMovingToMousePosition: boolean = false; @@ -812,6 +815,51 @@ export abstract class RenderedDataPanel extends RenderedPanel { } }, ); + + this.overlay_canvas = document.createElement("canvas"); + this.overlay_canvas.style.position = "absolute"; + this.overlay_canvas.style.top = "0"; + this.overlay_canvas.style.left = "0"; + this.overlay_canvas.style.width = "100%"; + this.overlay_canvas.style.height = "100%"; + this.overlay_canvas.style.pointerEvents = "none"; + this.overlay_canvas.style.zIndex = "10"; + this.element.appendChild(this.overlay_canvas); + this.overlay_context = this.overlay_canvas.getContext("2d")!; + + this.boundsUpdated.add(() => { + this.overlay_canvas.width = this.renderViewport.logicalWidth; + this.overlay_canvas.height = this.renderViewport.logicalHeight; + }); + } + + drawBrushCursor(x: number, y: number, radius: number) { + const ctx = this.overlay_context; + const { logicalWidth, logicalHeight } = this.renderViewport; + + ctx.clearRect(0, 0, logicalWidth, logicalHeight); + + if (radius > 0) { + ctx.beginPath(); + ctx.arc(x, y, radius, 0, 2 * Math.PI); + ctx.fillStyle = "rgba(255, 255, 255, 0.2)"; + ctx.fill(); + ctx.strokeStyle = "rgba(255, 255, 255, 1)"; + ctx.lineWidth = 3; + ctx.stroke(); + ctx.strokeStyle = "rgba(0, 0, 0, 1)"; + ctx.lineWidth = 1.5; + ctx.stroke(); + } + } + + clearOverlay() { + this.overlay_context.clearRect( + 0, + 0, + this.overlay_canvas.width, + this.overlay_canvas.height, + ); } abstract translateDataPointByViewportPixels( diff --git a/src/sliceview/README.md b/src/sliceview/README.md index 8851e64279..8939962e17 100644 --- a/src/sliceview/README.md +++ b/src/sliceview/README.md @@ -2,7 +2,7 @@ This directory contains the code for `SliceView`, which provides the cross-secti # Architecture -A volume is divided into a regular grid of 3-d chunks. Each chunk has voxel dimensions `chunkDataSize` (a 3-d vector of positive integers). All chunks have the same dimensions, except at the the upper bound of the volume in each dimension, where the chunks are allowed to be truncated to fit within the volume dimensions. +A volume is divided into a regular grid of 3-d chunks. Each chunk has voxel dimensions `chunkDataSize` (a 3-d vector of positive integers). All chunks have the same dimensions, except at the upper bound of the volume in each dimension, where the chunks are allowed to be truncated to fit within the volume dimensions. Chunks are the unit at which portions of the volume are queued, retrieved, transcoded (if necessary), copied to the GPU, and rendered: diff --git a/src/sliceview/base.ts b/src/sliceview/base.ts index b6d061cc07..667cd99013 100644 --- a/src/sliceview/base.ts +++ b/src/sliceview/base.ts @@ -169,6 +169,14 @@ export interface SliceViewRenderLayer { localPosition: WatchableValueInterface; renderScaleTarget: WatchableValueInterface; + /** + * If implemented by a render layer, return a non-negative integer scale index to override + * automatic multiscale selection. When defined, the sliceview must use only the specified + * scale from the current orientation. Implementations must ensure the index is valid for + * their multiscale source; this function should return undefined when no override is desired. + */ + getForcedSourceIndexOverride?(): number | undefined; + filterVisibleSources( sliceView: SliceViewBase, sources: readonly TransformedSource[], @@ -686,6 +694,22 @@ export function* filterVisibleSources( renderLayer: SliceViewRenderLayer, sources: readonly TransformedSource[], ): Iterable { + // First: allow a render layer to force a specific multiscale index for safety-critical flows. + const forcedIndex = renderLayer.getForcedSourceIndexOverride?.(); + if (forcedIndex !== undefined) { + if ( + !Number.isInteger(forcedIndex) || + forcedIndex < 0 || + forcedIndex >= sources.length + ) { + throw new Error( + `filterVisibleSources: forced source index ${forcedIndex} is out of range [0, ${sources.length - 1}]`, + ); + } + yield sources[forcedIndex]; + return; + } + // Increase pixel size by a small margin. const pixelSize = sliceView.projectionParameters.value.pixelSize * 1.1; // At the smallest scale, all alternative sources must have the same voxel size, which is diff --git a/src/sliceview/chunk_base.ts b/src/sliceview/chunk_base.ts new file mode 100644 index 0000000000..27146c992a --- /dev/null +++ b/src/sliceview/chunk_base.ts @@ -0,0 +1,31 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { ChunkState } from "#src/chunk_manager/base.js"; +import { Chunk } from "#src/chunk_manager/frontend.js"; +import type { SliceViewChunkSource } from "#src/sliceview/frontend.js"; +import type { vec3 } from "#src/util/geom.js"; + +export class SliceViewChunk extends Chunk { + chunkGridPosition: vec3; + declare source: SliceViewChunkSource; + + constructor(source: SliceViewChunkSource, x: any) { + super(source); + this.chunkGridPosition = x.chunkGridPosition; + this.state = ChunkState.SYSTEM_MEMORY; + } +} diff --git a/src/sliceview/compressed_segmentation/chunk_format.ts b/src/sliceview/compressed_segmentation/chunk_format.ts index b025c49720..db2fc6ac05 100644 --- a/src/sliceview/compressed_segmentation/chunk_format.ts +++ b/src/sliceview/compressed_segmentation/chunk_format.ts @@ -26,7 +26,7 @@ import type { ChunkFormatHandler, VolumeChunkSource, } from "#src/sliceview/volume/frontend.js"; -import { registerChunkFormatHandler } from "#src/sliceview/volume/frontend.js"; +import { registerChunkFormatHandler } from "#src/sliceview/volume/registry.js"; import { RefCounted } from "#src/util/disposable.js"; import { vec3, vec3Key } from "#src/util/geom.js"; import type { GL } from "#src/webgl/context.js"; diff --git a/src/sliceview/frontend.ts b/src/sliceview/frontend.ts index b87475efb6..99697b202b 100644 --- a/src/sliceview/frontend.ts +++ b/src/sliceview/frontend.ts @@ -19,8 +19,9 @@ import { ChunkState } from "#src/chunk_manager/base.js"; import type { ChunkManager, ChunkRequesterState, + Chunk, } from "#src/chunk_manager/frontend.js"; -import { Chunk, ChunkSource } from "#src/chunk_manager/frontend.js"; +import { ChunkSource } from "#src/chunk_manager/frontend.js"; import { applyRenderViewportToProjectionMatrix } from "#src/display_context.js"; import type { LayerManager } from "#src/layer/index.js"; import type { @@ -60,6 +61,7 @@ import { SliceViewBase, SliceViewProjectionParameters, } from "#src/sliceview/base.js"; +import type { SliceViewChunk } from "#src/sliceview/chunk_base.js"; import { ChunkLayout } from "#src/sliceview/chunk_layout.js"; import type { SliceViewerState } from "#src/sliceview/panel.js"; import { SliceViewRenderLayer } from "#src/sliceview/renderlayer.js"; @@ -89,6 +91,8 @@ import { getSquareCornersBuffer } from "#src/webgl/square_corners_buffer.js"; import type { RPC } from "#src/worker_rpc.js"; import { registerSharedObjectOwner } from "#src/worker_rpc.js"; +export { SliceViewChunk } from "#src/sliceview/chunk_base.js"; + export type GenericChunkKey = string; class FrontendSliceViewBase extends SliceViewBase< @@ -708,16 +712,11 @@ export interface SliceViewChunkSource { getChunk(x: any): any; } +/* export class SliceViewChunk extends Chunk { - chunkGridPosition: vec3; - declare source: SliceViewChunkSource; - - constructor(source: SliceViewChunkSource, x: any) { - super(source); - this.chunkGridPosition = x.chunkGridPosition; - this.state = ChunkState.SYSTEM_MEMORY; - } + // MOVED to chunk_base.ts to avoid import loop } +*/ /** * Helper for rendering a SliceView that has been pre-rendered to a texture. @@ -1073,7 +1072,6 @@ export function getVolumetricTransformedSources( effectiveVoxelSize[i] * globalScales[i], ); } - effectiveVoxelSize.fill(1, displayRank); return { layerRank, lowerClipBound, diff --git a/src/sliceview/panel.ts b/src/sliceview/panel.ts index adb9c8cc41..751e83b7d5 100644 --- a/src/sliceview/panel.ts +++ b/src/sliceview/panel.ts @@ -279,6 +279,12 @@ export class SliceViewPanel extends RenderedDataPanel { ); } + handleMouseMove(clientX: number, clientY: number) { + super.handleMouseMove(clientX, clientY); + this.viewer.mouseState.planeNormal = + this.sliceView.projectionParameters.value.viewportNormalInCanonicalCoordinates; + } + translateByViewportPixels(deltaX: number, deltaY: number): void { const { pose } = this.viewer.navigationState; pose.updateDisplayPosition((pos) => { diff --git a/src/sliceview/single_texture_chunk_format.ts b/src/sliceview/single_texture_chunk_format.ts index 53468aedfd..4a281e2545 100644 --- a/src/sliceview/single_texture_chunk_format.ts +++ b/src/sliceview/single_texture_chunk_format.ts @@ -14,11 +14,11 @@ * limitations under the License. */ +import { VolumeChunk } from "#src/sliceview/volume/chunk.js"; import type { VolumeChunkSource, ChunkFormat, } from "#src/sliceview/volume/frontend.js"; -import { VolumeChunk } from "#src/sliceview/volume/frontend.js"; import type { TypedArray } from "#src/util/array.js"; import type { DataType } from "#src/util/data_type.js"; import type { Disposable } from "#src/util/disposable.js"; @@ -146,6 +146,31 @@ export abstract class SingleTextureVolumeChunk< gl.bindTexture(textureTarget, null); } + updateFromCpuData( + gl: GL, + _region?: { offset: Uint32Array; size: Uint32Array }, + ) { + if (this.data == null) return; + + if (this.texture == null) { + this.copyToGPU(gl); + return; + } + + const textureTarget = + textureTargetForSamplerType[this.chunkFormat.shaderSamplerType]; + gl.bindTexture(textureTarget, this.texture); + try { + this.chunkFormat.setTextureData( + gl, + this.textureLayout!, + this.data as unknown as TypedArray, + ); + } finally { + gl.bindTexture(textureTarget, null); + } + } + freeGPUMemory(gl: GL) { super.freeGPUMemory(gl); if (this.data === null) return; diff --git a/src/sliceview/uncompressed_chunk_format.ts b/src/sliceview/uncompressed_chunk_format.ts index 2f1eadbd71..09195806e1 100644 --- a/src/sliceview/uncompressed_chunk_format.ts +++ b/src/sliceview/uncompressed_chunk_format.ts @@ -26,7 +26,7 @@ import type { ChunkFormatHandler, VolumeChunkSource, } from "#src/sliceview/volume/frontend.js"; -import { registerChunkFormatHandler } from "#src/sliceview/volume/frontend.js"; +import { registerChunkFormatHandler } from "#src/sliceview/volume/registry.js"; import type { TypedArray, TypedNumberArrayConstructor, diff --git a/src/sliceview/volume/backend.ts b/src/sliceview/volume/backend.ts index 4c7c39b8de..c17748244d 100644 --- a/src/sliceview/volume/backend.ts +++ b/src/sliceview/volume/backend.ts @@ -15,21 +15,31 @@ */ import type { Chunk } from "#src/chunk_manager/backend.js"; +import { ChunkState } from "#src/chunk_manager/base.js"; import { SliceViewChunk, SliceViewChunkSourceBackend, } from "#src/sliceview/backend.js"; -import type { - DataType, - SliceViewChunkSpecification, -} from "#src/sliceview/base.js"; +import type { SliceViewChunkSpecification } from "#src/sliceview/base.js"; +import { DataType } from "#src/sliceview/base.js"; +import { decodeChannel as decodeChannelUint32 } from "#src/sliceview/compressed_segmentation/decode_uint32.js"; +import { decodeChannel as decodeChannelUint64 } from "#src/sliceview/compressed_segmentation/decode_uint64.js"; +import { encodeChannel as encodeChannelUint32 } from "#src/sliceview/compressed_segmentation/encode_uint32.js"; +import { encodeChannel as encodeChannelUint64 } from "#src/sliceview/compressed_segmentation/encode_uint64.js"; import type { VolumeChunkSource as VolumeChunkSourceInterface, VolumeChunkSpecification, } from "#src/sliceview/volume/base.js"; +import { IN_MEMORY_VOLUME_CHUNK_SOURCE_RPC_ID } from "#src/sliceview/volume/base.js"; +import type { TypedArray } from "#src/util/array.js"; +import { TypedArrayBuilder } from "#src/util/array.js"; +import { DATA_TYPE_ARRAY_CONSTRUCTOR } from "#src/util/data_type.js"; import type { vec3 } from "#src/util/geom.js"; +import { HttpError } from "#src/util/http_request.js"; import * as vector from "#src/util/vector.js"; +import type { VoxelChange } from "#src/voxel_annotation/base.js"; import type { RPC } from "#src/worker_rpc.js"; +import { registerSharedObject } from "#src/worker_rpc.js"; export class VolumeChunk extends SliceViewChunk { source: VolumeChunkSource | null = null; @@ -155,5 +165,178 @@ export class VolumeChunkSource computeChunkBounds(chunk: VolumeChunk) { return computeChunkBounds(this, chunk); } + + // Override in data source backends to actually persist the chunk. + // Default throws to ensure write capability is explicitly implemented. + async writeChunk(_chunk: VolumeChunk): Promise { + throw new Error( + "VolumeChunkSource.writeChunk not implemented for this datasource", + ); + } + + async applyEdits( + chunkKey: string, + indices: ArrayLike, + values: ArrayLike, + ): Promise { + if (indices.length !== values.length) { + throw new Error("applyEdits: indices and values length mismatch"); + } + const chunkGridPosition = new Float32Array(chunkKey.split(",").map(Number)); + if ( + chunkGridPosition.length !== this.spec.rank || + chunkGridPosition.some((v) => !Number.isFinite(v)) + ) { + throw new Error(`applyEdits: invalid chunk key ${chunkKey}`); + } + + const chunk = this.getChunk(chunkGridPosition) as VolumeChunk; + if (chunk.state > ChunkState.SYSTEM_MEMORY_WORKER || !chunk.data) { + const ac = new AbortController(); + await this.download(chunk, ac.signal); + } + + if (!chunk.chunkDataSize) { + this.computeChunkBounds(chunk); + } + if (!chunk.chunkDataSize) { + throw new Error( + `applyEdits: Cannot create new chunk ${chunkKey} because its size is unknown.`, + ); + } + + if (!chunk.data) { + const numElements = chunk.chunkDataSize.reduce((a, b) => a * b, 1); + const Ctor = DATA_TYPE_ARRAY_CONSTRUCTOR[this.spec.dataType]; + chunk.data = new (Ctor as any)(numElements) as TypedArray; + } + + const ArrayCtor = DATA_TYPE_ARRAY_CONSTRUCTOR[this.spec.dataType] as any; + const indicesCopy = new Uint32Array(indices); + const newValuesArray = new ArrayCtor(values.length); + for (let i = 0; i < values.length; ++i) { + newValuesArray[i] = + this.spec.dataType === DataType.UINT64 + ? values[i]! + : Number(values[i]!); + } + const oldValuesArray = new ArrayCtor(indices.length); + + if (this.spec.compressedSegmentationBlockSize !== undefined) { + const compressedData = chunk.data as Uint32Array; + const { chunkDataSize } = chunk; + const numElements = + chunkDataSize[0] * chunkDataSize[1] * chunkDataSize[2]; + const { dataType, compressedSegmentationBlockSize: subchunkSize } = + this.spec; + const baseOffset = compressedData.length > 0 ? compressedData[0] : 0; + + let uncompressedData: Uint32Array | BigUint64Array; + if (dataType === DataType.UINT32) { + uncompressedData = new Uint32Array(numElements); + if (baseOffset !== 0) { + decodeChannelUint32( + uncompressedData, + compressedData, + baseOffset, + chunkDataSize, + subchunkSize!, + ); + } + } else { + uncompressedData = new BigUint64Array(numElements); + if (baseOffset !== 0) { + decodeChannelUint64( + uncompressedData, + compressedData, + baseOffset, + chunkDataSize, + subchunkSize!, + ); + } + } + + for (let i = 0; i < indices.length; ++i) { + const idx = indices[i]!; + oldValuesArray[i] = uncompressedData[idx]; + if (dataType === DataType.UINT32) { + (uncompressedData as Uint32Array)[idx] = Number(values[i]!); + newValuesArray[i] = Number(values[i]!); + } else { + (uncompressedData as BigUint64Array)[idx] = values[i]! as bigint; + newValuesArray[i] = values[i]! as bigint; + } + } + + const outputBuilder = new TypedArrayBuilder(Uint32Array); + outputBuilder.resize(1); + outputBuilder.data[0] = 1; + + if (dataType === DataType.UINT32) { + encodeChannelUint32( + outputBuilder, + subchunkSize!, + uncompressedData as Uint32Array, + chunkDataSize, + ); + } else { + encodeChannelUint64( + outputBuilder, + subchunkSize!, + uncompressedData as BigUint64Array, + chunkDataSize, + ); + } + + chunk.data = outputBuilder.view; + } else { + const data = chunk.data as TypedArray; + for (let i = 0; i < indices.length; ++i) { + const idx = indices[i]!; + if (idx < 0 || idx >= data.byteLength) { + throw new Error( + `applyEdits: index ${idx} out of bounds for chunk ${chunkKey}`, + ); + } + oldValuesArray[i] = data[idx]; + data[idx] = newValuesArray[i]; + } + } + + const maxRetries = 3; + let lastError: Error | undefined; + + for (let i = 0; i < maxRetries; i++) { + try { + await this.writeChunk(chunk); + return { + indices: indicesCopy, + oldValues: oldValuesArray, + newValues: newValuesArray, + }; + } catch (e) { + lastError = e as Error; + if (e instanceof HttpError && e.status < 500 && e.status !== 429) { + break; + } + await new Promise((resolve) => + setTimeout(resolve, 250 * Math.pow(2, i)), + ); + } + } + throw new Error( + `Failed to write chunk ${chunkKey} after ${maxRetries} attempts.`, + { cause: lastError }, + ); + } } + +@registerSharedObject(IN_MEMORY_VOLUME_CHUNK_SOURCE_RPC_ID) +export class InMemoryVolumeChunkSourceBackend extends VolumeChunkSource { + async download(chunk: VolumeChunk, _signal: AbortSignal): Promise { + chunk.data = null; + return new Promise((_resolve) => {}); + } +} + VolumeChunkSource.prototype.chunkConstructor = VolumeChunk; diff --git a/src/sliceview/volume/base.ts b/src/sliceview/volume/base.ts index 583d8653e7..f5c5e49b74 100644 --- a/src/sliceview/volume/base.ts +++ b/src/sliceview/volume/base.ts @@ -310,3 +310,5 @@ export interface VolumeChunkSource extends SliceViewChunkSource { } export const VOLUME_RPC_ID = "volume"; +export const IN_MEMORY_VOLUME_CHUNK_SOURCE_RPC_ID = + "sliceview/volume/InMemoryChunkSource"; diff --git a/src/sliceview/volume/chunk.ts b/src/sliceview/volume/chunk.ts new file mode 100644 index 0000000000..b764cdfabd --- /dev/null +++ b/src/sliceview/volume/chunk.ts @@ -0,0 +1,39 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { SliceViewChunk } from "#src/sliceview/chunk_base.js"; +import type { + ChunkFormat, + VolumeChunkSource, +} from "#src/sliceview/volume/frontend.js"; +import type { GL } from "#src/webgl/context.js"; + +export abstract class VolumeChunk extends SliceViewChunk { + declare source: VolumeChunkSource; + chunkDataSize: Uint32Array; + declare CHUNK_FORMAT_TYPE: ChunkFormat; + + get chunkFormat(): this["CHUNK_FORMAT_TYPE"] { + return this.source.chunkFormat; + } + + constructor(source: VolumeChunkSource, x: any) { + super(source, x); + this.chunkDataSize = x.chunkDataSize || source.spec.chunkDataSize; + } + abstract getValueAt(dataPosition: Uint32Array): any; + abstract updateFromCpuData(gl: GL): void; +} diff --git a/src/sliceview/volume/frontend.ts b/src/sliceview/volume/frontend.ts index a373b5644a..c1d531fd29 100644 --- a/src/sliceview/volume/frontend.ts +++ b/src/sliceview/volume/frontend.ts @@ -16,27 +16,33 @@ import type { ChunkManager } from "#src/chunk_manager/frontend.js"; import type { ChunkChannelAccessParameters } from "#src/render_coordinate_transform.js"; -import type { +import type { SliceViewChunkSpecification } from "#src/sliceview/base.js"; +import { DataType, - SliceViewChunkSpecification, + SLICEVIEW_REQUEST_CHUNK_RPC_ID, } from "#src/sliceview/base.js"; +import type { SliceViewChunk } from "#src/sliceview/frontend.js"; import { MultiscaleSliceViewChunkSource, - SliceViewChunk, SliceViewChunkSource, } from "#src/sliceview/frontend.js"; +import type { UncompressedVolumeChunk } from "#src/sliceview/uncompressed_chunk_format.js"; import type { VolumeChunkSource as VolumeChunkSourceInterface, VolumeChunkSpecification, VolumeSourceOptions, VolumeType, } from "#src/sliceview/volume/base.js"; +import { IN_MEMORY_VOLUME_CHUNK_SOURCE_RPC_ID } from "#src/sliceview/volume/base.js"; +import { VolumeChunk } from "#src/sliceview/volume/chunk.js"; +import { getChunkFormatHandler } from "#src/sliceview/volume/registry.js"; +import type { TypedArray } from "#src/util/array.js"; +import { DATA_TYPE_ARRAY_CONSTRUCTOR } from "#src/util/data_type.js"; import type { Disposable } from "#src/util/disposable.js"; import type { GL } from "#src/webgl/context.js"; import type { ShaderBuilder, ShaderProgram } from "#src/webgl/shader.js"; import { getShaderType, glsl_mixLinear } from "#src/webgl/shader_lib.js"; - -export type VolumeChunkKey = string; +import { registerSharedObjectOwner } from "#src/worker_rpc.js"; export interface ChunkFormat { shaderKey: string; @@ -154,27 +160,6 @@ export interface ChunkFormatHandler extends Disposable { getChunk(source: SliceViewChunkSource, x: any): SliceViewChunk; } -export type ChunkFormatHandlerFactory = ( - gl: GL, - spec: VolumeChunkSpecification, -) => ChunkFormatHandler | null; - -const chunkFormatHandlers = new Array(); - -export function registerChunkFormatHandler(factory: ChunkFormatHandlerFactory) { - chunkFormatHandlers.push(factory); -} - -export function getChunkFormatHandler(gl: GL, spec: VolumeChunkSpecification) { - for (const handler of chunkFormatHandlers) { - const result = handler(gl, spec); - if (result != null) { - return result; - } - } - throw new Error("No chunk format handler found."); -} - export class VolumeChunkSource extends SliceViewChunkSource implements VolumeChunkSourceInterface @@ -212,6 +197,62 @@ export class VolumeChunkSource return this.chunkFormatHandler.chunkFormat; } + async getEnsuredValueAt( + chunkPosition: Float32Array, + channelAccess: ChunkChannelAccessParameters, + ): Promise { + const initialValue = this.getValueAt(chunkPosition, channelAccess); + if (initialValue != null) { + return initialValue; + } + + const { spec } = this; + const { rank, chunkDataSize } = spec; + const chunkGridPosition = this.tempChunkGridPosition; + + for (let chunkDim = 0; chunkDim < rank; ++chunkDim) { + const voxel = chunkPosition[chunkDim]; + const chunkSize = chunkDataSize[chunkDim]; + chunkGridPosition[chunkDim] = Math.floor(voxel / chunkSize); + } + + try { + await this.rpc!.promiseInvoke(SLICEVIEW_REQUEST_CHUNK_RPC_ID, { + source: this.rpcId, + chunkGridPosition: chunkGridPosition, + }); + } catch (e) { + console.error( + `Failed to fetch chunk for position ${chunkPosition.join()}:`, + e, + ); + return null; + } + + return this.getValueAt(chunkPosition, channelAccess); + } + + computeChunkIndices(voxelCoord: Float32Array): { + chunkGridPosition: Float32Array; + positionWithinChunk: Uint32Array; + } { + const { spec } = this; + const { rank, chunkDataSize } = spec; + const chunkGridPosition = this.tempChunkGridPosition; + const positionWithinChunk = this.tempPositionWithinChunk; + + for (let chunkDim = 0; chunkDim < rank; ++chunkDim) { + const voxel = voxelCoord[chunkDim]; + const chunkSize = chunkDataSize[chunkDim]; + const chunkIndex = Math.floor(voxel / chunkSize); + chunkGridPosition[chunkDim] = chunkIndex; + positionWithinChunk[chunkDim] = Math.floor( + voxel - chunkSize * chunkIndex, + ); + } + return { chunkGridPosition, positionWithinChunk }; + } + getValueAt( chunkPosition: Float32Array, channelAccess: ChunkChannelAccessParameters, @@ -267,20 +308,95 @@ export class VolumeChunkSource } } -export abstract class VolumeChunk extends SliceViewChunk { - declare source: VolumeChunkSource; - chunkDataSize: Uint32Array; - declare CHUNK_FORMAT_TYPE: ChunkFormat; +@registerSharedObjectOwner(IN_MEMORY_VOLUME_CHUNK_SOURCE_RPC_ID) +export class InMemoryVolumeChunkSource extends VolumeChunkSource { + constructor( + chunkManager: ChunkManager, + options: { spec: VolumeChunkSpecification }, + ) { + super(chunkManager, options); + this.initializeCounterpart(this.chunkManager.rpc!, {}); + } + + private invalidateGpuData(chunks: Set): void { + if (chunks.size === 0) return; + for (const chunk of chunks) { + chunk.updateFromCpuData(this.chunkManager.chunkQueueManager.gl); + } + this.chunkManager.chunkQueueManager.visibleChunksChanged.dispatch(); + } + + invalidateChunks(keys: string[]): void { + const update = () => { + const validKeys: string[] = []; + for (const key of keys) { + const chunk = this.chunks.get(key); + if (chunk) { + validKeys.push(key); + this.deleteChunk(key); + } + } - get chunkFormat(): this["CHUNK_FORMAT_TYPE"] { - return this.source.chunkFormat; + if (validKeys.length > 0) { + this.chunkManager.chunkQueueManager.visibleChunksChanged.dispatch(); + } + }; + // adding a small delay to avoid flickering since the base source will take some time to download the new data + setTimeout(update, 100); } - constructor(source: VolumeChunkSource, x: any) { - super(source, x); - this.chunkDataSize = x.chunkDataSize || source.spec.chunkDataSize; + applyLocalEdits( + edits: Map, + ): void { + const chunksToUpdate = new Set(); + const { dataType } = this.spec; + + for (const [key, edit] of edits.entries()) { + const chunkGridPosition = new Float32Array(key.split(",").map(Number)); + + let chunk = this.chunks.get(key) as UncompressedVolumeChunk | undefined; + if (chunk === undefined) { + chunk = this.getChunk({ + chunkGridPosition: chunkGridPosition, + }) as UncompressedVolumeChunk; + this.addChunk(key, chunk); + } + + if (chunk.data == undefined) { + const numElements = chunk.chunkDataSize.reduce((a, b) => a * b, 1); + const Ctor = DATA_TYPE_ARRAY_CONSTRUCTOR[dataType]; + chunk.data = new (Ctor as any)(numElements) as TypedArray; + } + chunksToUpdate.add(chunk); + + const cpuArray = chunk.data!; + + for (const index of edit.indices) { + const value = edit.value; + switch (dataType) { + case DataType.UINT8: + case DataType.INT8: + case DataType.UINT16: + case DataType.INT16: + case DataType.UINT32: + case DataType.INT32: + case DataType.FLOAT32: + cpuArray[index] = Number(value); + break; + case DataType.UINT64: + (cpuArray as BigUint64Array)[index] = value; + break; + default: + console.warn( + `Unsupported data type for editing: ${DataType[dataType]}`, + ); + break; + } + } + } + + this.invalidateGpuData(chunksToUpdate); } - abstract getValueAt(dataPosition: Uint32Array): any; } export abstract class MultiscaleVolumeChunkSource extends MultiscaleSliceViewChunkSource< @@ -290,3 +406,5 @@ export abstract class MultiscaleVolumeChunkSource extends MultiscaleSliceViewChu abstract dataType: DataType; abstract volumeType: VolumeType; } + +export { VolumeChunk }; diff --git a/src/sliceview/volume/registry.ts b/src/sliceview/volume/registry.ts new file mode 100644 index 0000000000..59a8ae6e64 --- /dev/null +++ b/src/sliceview/volume/registry.ts @@ -0,0 +1,40 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { VolumeChunkSpecification } from "#src/sliceview/volume/base.js"; +import type { ChunkFormatHandler } from "#src/sliceview/volume/frontend.js"; +import type { GL } from "#src/webgl/context.js"; + +export type ChunkFormatHandlerFactory = ( + gl: GL, + spec: VolumeChunkSpecification, +) => ChunkFormatHandler | null; + +const chunkFormatHandlers = new Array(); + +export function registerChunkFormatHandler(factory: ChunkFormatHandlerFactory) { + chunkFormatHandlers.push(factory); +} + +export function getChunkFormatHandler(gl: GL, spec: VolumeChunkSpecification) { + for (const handler of chunkFormatHandlers) { + const result = handler(gl, spec); + if (result != null) { + return result; + } + } + throw new Error("No chunk format handler found."); +} diff --git a/src/sliceview/volume/renderlayer.ts b/src/sliceview/volume/renderlayer.ts index e189483934..3ec256939e 100644 --- a/src/sliceview/volume/renderlayer.ts +++ b/src/sliceview/volume/renderlayer.ts @@ -265,6 +265,14 @@ function drawChunk( chunkPosition: vec3, wireFrame: boolean, ) { + if (chunkPosition.some(isNaN)) { + throw new Error( + `Attempted to draw chunk with NaN position: [${chunkPosition.join( + ",", + )}]. This indicates a problem with the layer's coordinate transforms.`, + ); + } + gl.uniform3fv(shader.uniform("uTranslation"), chunkPosition); if (wireFrame) { drawLines(shader.gl, 6, 1); diff --git a/src/ui/dataset_creation.ts b/src/ui/dataset_creation.ts new file mode 100644 index 0000000000..8b4bd6df9f --- /dev/null +++ b/src/ui/dataset_creation.ts @@ -0,0 +1,356 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may not use this file except in compliance with the License. + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { + CreateDataSourceOptions, + CommonCreationMetadata, + DataSourceCreationState, +} from "#src/datasource/index.js"; +import type { LayerListSpecification } from "#src/layer/index.js"; +import { Overlay } from "#src/overlay.js"; +import { StatusMessage } from "#src/status.js"; +import { TrackableValue } from "#src/trackable_value.js"; +import { TrackableVec3 } from "#src/trackable_vec3.js"; +import { DataType } from "#src/util/data_type.js"; +import { removeChildren } from "#src/util/dom.js"; +import { vec3 } from "#src/util/geom.js"; +import { verifyInt, verifyString } from "#src/util/json.js"; +import { CompoundTrackable, type Trackable } from "#src/util/trackable.js"; +import { TrackableEnum } from "#src/util/trackable_enum.js"; +import { DependentViewWidget } from "#src/widget/dependent_view_widget.js"; +import { EnumSelectWidget } from "#src/widget/enum_widget.js"; +import { NumberInputWidget } from "#src/widget/number_input_widget.js"; +import { TextInputWidget } from "#src/widget/text_input.js"; +import { Vec3Widget } from "#src/widget/vec3_entry_widget.js"; + +function createControlForTrackable(trackable: Trackable): HTMLElement { + if (trackable instanceof TrackableVec3) { + return new Vec3Widget(trackable).element; + } + if (trackable instanceof TrackableEnum) { + return new EnumSelectWidget(trackable).element; + } + if (trackable instanceof TrackableValue) { + const value = trackable.value; + if (typeof value === "number") { + return new NumberInputWidget(trackable as TrackableValue).element; + } + if (typeof value === "string") { + return new TextInputWidget(trackable as TrackableValue).element; + } + } + const unsupportedElement = document.createElement("div"); + unsupportedElement.textContent = `Unsupported control type`; + return unsupportedElement; +} + +class CommonMetadataState extends CompoundTrackable { + shape = new TrackableVec3( + vec3.fromValues(42000, 42000, 42000), + vec3.fromValues(42000, 42000, 42000), + ); + dataType = new TrackableEnum(DataType, DataType.UINT32); + voxelSize = new TrackableVec3( + vec3.fromValues(8, 8, 8), + vec3.fromValues(8, 8, 8), + ); + voxelUnit = new TrackableValue("nm", verifyString); + numScales = new TrackableValue(6, verifyInt); + downsamplingFactor = new TrackableVec3( + vec3.fromValues(2, 2, 2), + vec3.fromValues(2, 2, 2), + ); + name = new TrackableValue("new-dataset", verifyString); + + constructor() { + super(); + this.add("shape", this.shape); + this.add("dataType", this.dataType); + this.add("voxelSize", this.voxelSize); + this.add("voxelUnit", this.voxelUnit); + this.add("numScales", this.numScales); + this.add("downsamplingFactor", this.downsamplingFactor); + this.add("name", this.name); + } + + toJSON(): CommonCreationMetadata { + return { + shape: Array.from(this.shape.value), + dataType: this.dataType.value, + voxelSize: Array.from(this.voxelSize.value), + voxelUnit: this.voxelUnit.value, + numScales: this.numScales.value, + downsamplingFactor: Array.from(this.downsamplingFactor.value), + name: this.name.value, + }; + } + + restoreState(_obj: any) {} + reset() {} +} + +export class DatasetCreationDialog extends Overlay { + state = new CommonMetadataState(); + dataSourceType = new TrackableValue("", verifyString); + private dataSourceOptions: DataSourceCreationState | undefined; + + addControl = (trackable: Trackable, label: string, parent: HTMLElement) => { + const container = document.createElement("div"); + container.style.display = "flex"; + const labelElement = document.createElement("label"); + labelElement.textContent = label + ": "; + container.appendChild(labelElement); + const ctrl = createControlForTrackable(trackable); + const ctrlContainer = document.createElement("div"); + ctrlContainer.style.display = "flex"; + ctrlContainer.style.flexGrow = "1"; + ctrlContainer.style.justifyContent = "flex-end"; + ctrlContainer.appendChild(ctrl); + container.appendChild(ctrlContainer); + parent.appendChild(container); + }; + + constructor( + public manager: LayerListSpecification, + public url: string, + ) { + super(); + + const { content } = this; + + const titleElement = document.createElement("h2"); + titleElement.textContent = "Create New Dataset"; + content.appendChild(titleElement); + + const topControls = document.createElement("div"); + topControls.style.display = "flex"; + topControls.style.flexDirection = "column"; + + content.appendChild(topControls); + + const dataSourceSelect = document.createElement("select"); + const creatableProviders = Array.from( + this.manager.dataSourceProviderRegistry.kvStoreBasedDataSources.values(), + ).filter((p) => p.creationState !== undefined); + + creatableProviders.forEach((p) => { + const option = document.createElement("option"); + option.value = p.scheme; + option.textContent = p.description || p.scheme; + dataSourceSelect.appendChild(option); + }); + + if (creatableProviders.length > 0) { + this.dataSourceType.value = creatableProviders[0].scheme; + } else { + const noProviderMessage = document.createElement("div"); + noProviderMessage.textContent = + "No creatable data source types are configured."; + content.appendChild(noProviderMessage); + } + + const dsLabel = document.createElement("label"); + dsLabel.textContent = "Data Source Type: "; + topControls.appendChild(dsLabel); + topControls.appendChild(dataSourceSelect); + + this.registerEventListener(dataSourceSelect, "change", () => { + this.dataSourceType.value = dataSourceSelect.value; + }); + + topControls.appendChild( + this.registerDisposer( + new DependentViewWidget( + { + changed: this.manager.rootLayers.layersChanged, + get value() { + return null; + }, + }, + (_value, parentElement) => { + const compatibleLayers = + this.manager.rootLayers.managedLayers.filter( + (layer) => layer.getCreationMetadata() !== undefined, + ); + if (compatibleLayers.length === 0) return; + + const label = document.createElement("label"); + label.textContent = "Copy settings from layer: "; + parentElement.appendChild(label); + + const select = document.createElement("select"); + const defaultOption = document.createElement("option"); + defaultOption.textContent = "None"; + defaultOption.value = ""; + select.appendChild(defaultOption); + + compatibleLayers.forEach((layer) => { + const option = document.createElement("option"); + option.textContent = layer.name; + option.value = layer.name; + select.appendChild(option); + }); + + this.registerEventListener(select, "change", () => { + if (!select.value) return; + const layer = this.manager.rootLayers.getLayerByName( + select.value, + ); + if (layer) { + const metadata = layer.getCreationMetadata(); + if (metadata) { + this.state.shape.value = vec3.fromValues( + metadata.shape[0], + metadata.shape[1], + metadata.shape[2], + ); + (this.state.dataType as TrackableEnum).value = + metadata.dataType; + this.state.voxelSize.value = vec3.fromValues( + metadata.voxelSize[0], + metadata.voxelSize[1], + metadata.voxelSize[2], + ); + this.state.voxelUnit.value = metadata.voxelUnit; + this.state.name.value = metadata.name; + } + } + }); + parentElement.appendChild(select); + }, + ), + ).element, + ); + + const commonFields = document.createElement("fieldset"); + const commonLegend = document.createElement("legend"); + commonLegend.textContent = "Common Metadata"; + commonFields.appendChild(commonLegend); + content.appendChild(commonFields); + + this.addControl(this.state.name, "Name", commonFields); + this.addControl(this.state.shape, "Shape", commonFields); + this.addControl(this.state.dataType, "Data Type", commonFields); + this.addControl(this.state.voxelSize, "Voxel Size", commonFields); + this.addControl(this.state.voxelUnit, "Voxel Unit", commonFields); + this.addControl(this.state.numScales, "Number of Scales", commonFields); + this.addControl( + this.state.downsamplingFactor, + "Downsampling Factor", + commonFields, + ); + + const optionsContainer = document.createElement("fieldset"); + const optionsLegend = document.createElement("legend"); + optionsContainer.appendChild(optionsLegend); + const optionsGrid = document.createElement("div"); + optionsContainer.appendChild(optionsGrid); + content.appendChild(optionsContainer); + + this.registerDisposer( + this.dataSourceType.changed.add(() => { + this.updateDataSourceOptions(optionsGrid, optionsLegend); + }), + ); + this.updateDataSourceOptions(optionsGrid, optionsLegend); + + const actions = document.createElement("div"); + const createButton = document.createElement("button"); + createButton.textContent = "Create"; + this.registerEventListener(createButton, "click", () => + this.createDataset(), + ); + actions.appendChild(createButton); + content.appendChild(actions); + } + + private updateDataSourceOptions( + container: HTMLElement, + legend: HTMLLegendElement, + ) { + if (this.dataSourceOptions) { + this.dataSourceOptions.dispose(); + this.dataSourceOptions = undefined; + } + removeChildren(container); + const provider = + this.manager.dataSourceProviderRegistry.getKvStoreBasedProvider( + this.dataSourceType.value, + ); + legend.textContent = `${provider?.description || this.dataSourceType.value} Metadata`; + const creationState = provider?.creationState as + | DataSourceCreationState + | undefined; + if (creationState) { + this.dataSourceOptions = creationState; + for (const key of Object.keys(creationState)) { + if ( + key === "changed" || + key === "toJSON" || + key === "restoreState" || + key === "reset" + ) + continue; + const trackable = (creationState as any)[key]; + if (trackable && typeof trackable.changed?.add === "function") { + this.addControl(trackable, key, container); + } + } + } + } + + private async createDataset() { + const provider = + this.manager.dataSourceProviderRegistry.getKvStoreBasedProvider( + this.dataSourceType.value, + ); + if (!provider?.create) { + StatusMessage.showTemporaryMessage( + `Data source '${this.dataSourceType.value}' does not support creation.`, + 5000, + ); + return; + } + + const options: CreateDataSourceOptions = { + kvStoreUrl: this.url, + registry: this.manager.dataSourceProviderRegistry, + metadata: { + common: this.state.toJSON(), + sourceRelated: this.dataSourceOptions, + }, + }; + + StatusMessage.forPromise(provider.create(options), { + initialMessage: `Creating dataset at ${this.url}...`, + delay: true, + errorPrefix: "Creation failed: ", + }).then(() => { + StatusMessage.showTemporaryMessage("Dataset created successfully.", 3000); + for (const layer of this.manager.rootLayers.managedLayers) { + if (layer.layer) { + for (const ds of layer.layer.dataSources) { + if (ds.spec.url === this.url) { + ds.spec = { ...ds.spec }; + this.dispose(); + return; + } + } + } + } + }); + } +} diff --git a/src/ui/layer_data_sources_tab.css b/src/ui/layer_data_sources_tab.css index a18860239f..e4df9dcaa3 100644 --- a/src/ui/layer_data_sources_tab.css +++ b/src/ui/layer_data_sources_tab.css @@ -19,6 +19,10 @@ flex-direction: column; } +.neuroglancer-layer-data-source-writable-label { + margin-right: 5px; +} + .neuroglancer-layer-data-sources-container { overflow-y: auto; display: flex; diff --git a/src/ui/layer_data_sources_tab.ts b/src/ui/layer_data_sources_tab.ts index 7b53214290..cc92773039 100644 --- a/src/ui/layer_data_sources_tab.ts +++ b/src/ui/layer_data_sources_tab.ts @@ -37,9 +37,15 @@ import { createImageLayerAsMultiChannel } from "#src/layer/multi_channel_setup.j import { MeshSource, MultiscaleMeshSource } from "#src/mesh/frontend.js"; import { SkeletonSource } from "#src/skeleton/frontend.js"; import { MultiscaleVolumeChunkSource } from "#src/sliceview/volume/frontend.js"; -import { TrackableBooleanCheckbox } from "#src/trackable_boolean.js"; +import { + ElementVisibilityFromTrackableBoolean, + TrackableBooleanCheckbox, +} from "#src/trackable_boolean.js"; import type { WatchableValueInterface } from "#src/trackable_value.js"; -import { WatchableValue } from "#src/trackable_value.js"; +import { + makeCachedDerivedWatchableValue, + WatchableValue, +} from "#src/trackable_value.js"; import type { DebouncedFunction } from "#src/util/animation_frame_debounce.js"; import { animationFrameDebounce } from "#src/util/animation_frame_debounce.js"; import { DataType } from "#src/util/data_type.js"; @@ -193,22 +199,59 @@ export class DataSourceSubsourceView extends RefCounted { this.registerDisposer( loadedSource.enabledSubsourcesChanged.add(updateActiveAttribute), ); + const enabledState: WatchableValueInterface & { + set value(v: boolean); + } = { + get value() { + return loadedSubsource.enabled; + }, + set value(value: boolean) { + if (loadedSubsource.enabled === value) return; + loadedSubsource.enabled = value; + loadedSource.enableDefaultSubsources = false; + loadedSource.enabledSubsourcesChanged.dispatch(); + }, + changed: loadedSource.enabledSubsourcesChanged, + }; const enabledCheckbox = this.registerDisposer( - new TrackableBooleanCheckbox({ - get value() { - return loadedSubsource.enabled; - }, - set value(value: boolean) { - loadedSubsource.enabled = value; - loadedSource.enableDefaultSubsources = false; - loadedSource.enabledSubsourcesChanged.dispatch(); - }, - changed: loadedSource.enabledSubsourcesChanged, - }), + new TrackableBooleanCheckbox(enabledState), ); sourceInfoLine.classList.add("neuroglancer-layer-data-sources-info-line"); sourceInfoLine.appendChild(enabledCheckbox.element); + if ( + loadedSubsource.subsourceEntry.subsource.volume instanceof + MultiscaleVolumeChunkSource + ) { + const writableCheckbox = this.registerDisposer( + new TrackableBooleanCheckbox(loadedSubsource.writable), + ); + writableCheckbox.element.title = "Enable voxel editing for this source"; + const writableLabel = document.createElement("label"); + writableLabel.className = "neuroglancer-layer-data-source-writable-label"; + writableLabel.appendChild(writableCheckbox.element); + writableLabel.appendChild(document.createTextNode("[Writable?]")); + + this.registerDisposer( + new ElementVisibilityFromTrackableBoolean( + makeCachedDerivedWatchableValue( + (enabled, isPotentiallyWritable) => + enabled && isPotentiallyWritable, + [ + enabledState, + new WatchableValue( + loadedSubsource.subsourceEntry.subsource + .isPotentiallyWritable ?? false, + ), + ], + ), + writableLabel, + ), + ); + + sourceInfoLine.appendChild(writableLabel); + } + const sourceId = document.createElement("span"); sourceId.classList.add("neuroglancer-layer-data-sources-source-id"); const { id } = loadedSubsource.subsourceEntry; diff --git a/src/ui/voxel_annotations.ts b/src/ui/voxel_annotations.ts new file mode 100644 index 0000000000..e974f643f1 --- /dev/null +++ b/src/ui/voxel_annotations.ts @@ -0,0 +1,548 @@ +/** + * @license + * Copyright 2025. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { MouseSelectionState } from "#src/layer/index.js"; +import type { + UserLayerWithVoxelEditing, + VoxelEditingContext, +} from "#src/layer/vox/index.js"; +import type { ChunkChannelAccessParameters } from "#src/render_coordinate_transform.js"; +import { RenderedDataPanel } from "#src/rendered_data_panel.js"; +import { SliceViewPanel } from "#src/sliceview/panel.js"; +import { StatusMessage } from "#src/status.js"; +import { LayerTool, registerTool, type ToolActivation } from "#src/ui/tool.js"; +import { vec3 } from "#src/util/geom.js"; +import { EventActionMap } from "#src/util/mouse_bindings.js"; +import { startRelativeMouseDrag } from "#src/util/mouse_drag.js"; +import { BrushShape } from "#src/voxel_annotation/base.js"; + +export const BRUSH_TOOL_ID = "vox-brush"; +export const FLOODFILL_TOOL_ID = "vox-flood-fill"; +export const ADOPT_VOXEL_LABEL_TOOL_ID = "vox-pick-label"; + +const VOX_TOOL_INPUT_MAP = EventActionMap.fromObject({ + ["at:control+mousedown0"]: "paint-voxels", +}); + +abstract class BaseVoxelTool extends LayerTool { + protected latestMouseState: MouseSelectionState | null = null; + + protected getEditingContext(): VoxelEditingContext | undefined { + const it = this.layer.editingContexts.values(); + let ctx: VoxelEditingContext; + while ((ctx = it.next().value) !== undefined) { + if (ctx.writable) return ctx; + } + return undefined; + } + + protected getPoint(mouseState: MouseSelectionState): Int32Array | undefined { + // TODO: maybe getVoxelPositionFromMouse() would best fit in the userLayer + const editContext = this.getEditingContext(); + if (editContext === undefined) return undefined; + const vox = editContext.getVoxelPositionFromMouse(mouseState) as + | Float32Array + | undefined; + if (!mouseState?.active || !vox) return undefined; + if (!mouseState.planeNormal) return; + const planeNormal = editContext.transformGlobalToVoxelNormal( + mouseState.planeNormal, + ); + if (!mouseState?.active || !vox || !planeNormal) return undefined; + const CHUNK_POSITION_EPSILON = 1e-3; + const shiftedVox = new Float32Array(3); + for (let i = 0; i < 3; ++i) { + shiftedVox[i] = + vox[i] + CHUNK_POSITION_EPSILON * Math.abs(planeNormal[i]); + } + return new Int32Array([ + Math.floor(shiftedVox[0]), + Math.floor(shiftedVox[1]), + Math.floor(shiftedVox[2]), + ]); + } + + protected linePoints(a: Int32Array, b: Int32Array): Float32Array[] { + const dx = b[0] - a[0]; + const dy = b[1] - a[1]; + const dz = b[2] - a[2]; + const steps = Math.max(Math.abs(dx), Math.abs(dy), Math.abs(dz)); + const out: Float32Array[] = []; + if (steps <= 0) return out; + let lastX = a[0], + lastY = a[1], + lastZ = a[2]; + for (let s = 1; s <= steps; ++s) { + const x = Math.round(a[0] + (dx * s) / steps); + const y = Math.round(a[1] + (dy * s) / steps); + const z = Math.round(a[2] + (dz * s) / steps); + if (x !== lastX || y !== lastY || z !== lastZ) { + out.push(new Float32Array([x, y, z])); + lastX = x; + lastY = y; + lastZ = z; + } + } + return out; + } + + activate(activation: ToolActivation): void { + activation.bindInputEventMap(VOX_TOOL_INPUT_MAP); + + activation.bindAction("paint-voxels", (event) => { + event.stopPropagation(); + this.activationCallback(activation); + startRelativeMouseDrag( + event.detail as MouseEvent, + () => { + this.latestMouseState = this.mouseState; + }, + () => { + this.deactivationCallback(activation); + }, + ); + + return true; + }); + } + + abstract activationCallback(activation: ToolActivation): void; + abstract deactivationCallback(activation: ToolActivation): void; + + protected setCursor(cursor: string) { + for (const panel of this.layer.manager.root.display.panels) { + panel.element.style.setProperty("cursor", cursor, "important"); + } + } + + protected resetCursor() { + for (const panel of this.layer.manager.root.display.panels) { + panel.element.style.removeProperty("cursor"); + } + } +} + +export class VoxelBrushTool extends BaseVoxelTool { + private isDrawing = false; + private lastPoint: Int32Array | undefined; + private mouseDisposer: (() => void) | undefined; + private currentMouseState: MouseSelectionState | undefined; + private animationFrameHandle: number | null = null; + + activate(activation: ToolActivation) { + super.activate(activation); + this.updateBrushOutline(); + + activation.registerDisposer(() => { + this.getActivePanel()?.clearOverlay(); + this.resetCursor(); + }); + activation.registerDisposer( + this.mouseState.changed.add(() => { + this.updateBrushOutline(); + }), + ); + } + + private getActivePanel(): RenderedDataPanel | undefined { + let activePanel: RenderedDataPanel | undefined; + for (const panel of this.layer.manager.root.display.panels) { + if (panel instanceof RenderedDataPanel) { + if (panel.mouseX !== -1 && panel instanceof SliceViewPanel) { + activePanel = panel; + } else { + panel.clearOverlay(); + } + } + } + return activePanel; + } + + private updateBrushOutline() { + const panel = this.getActivePanel(); + if (!panel) return; + + const zoom = panel.navigationState.zoomFactor.value; + const radiusInVoxels = this.layer.voxBrushRadius.value; + const radiusInPixels = radiusInVoxels / zoom; + + panel.drawBrushCursor(panel.mouseX, panel.mouseY, radiusInPixels); + } + + activationCallback(_activation: ToolActivation): void { + if (this.getEditingContext() === undefined) { + StatusMessage.showTemporaryMessage( + 'Voxel editing is not available. Please select a writable volume source in the "Source" tab.', + 5000, + ); + this.stopDrawing(); + return; + } + this.startDrawing(this.mouseState); + } + + deactivationCallback(_activation: ToolActivation): void { + this.stopDrawing(); + } + + constructor(layer: UserLayerWithVoxelEditing) { + super(layer, /*toggle=*/ true); + } + + toJSON() { + return BRUSH_TOOL_ID; + } + + get description() { + return "Brush tool"; + } + + private drawLoop = (): void => { + if (!this.isDrawing) { + this.animationFrameHandle = null; + return; + } + if (this.latestMouseState === null) { + this.animationFrameHandle = requestAnimationFrame(this.drawLoop); + return; + } + const cur = this.getPoint(this.latestMouseState); + this.latestMouseState = null; + if (cur) { + const last = this.lastPoint; + if ( + last && + (cur[0] !== last[0] || cur[1] !== last[1] || cur[2] !== last[2]) + ) { + const points = this.linePoints(last, cur); + if (points.length > 0) { + const value = this.layer.getVoxelPaintValue( + this.layer.voxEraseMode.value, + ); + this.paintPoints(points, value); + } + } + this.lastPoint = cur; + } + this.animationFrameHandle = requestAnimationFrame(this.drawLoop); + }; + + private startDrawing(mouseState: MouseSelectionState) { + if (this.isDrawing) return; + this.isDrawing = true; + this.currentMouseState = mouseState; + + const start = this.getPoint(mouseState); + if (!start) { + throw new Error( + "startDrawing: could not compute a starting voxel position from mouse", + ); + } + + const value = this.layer.getVoxelPaintValue(this.layer.voxEraseMode.value); + + this.paintPoints([new Float32Array([start[0], start[1], start[2]])], value); + this.lastPoint = start; + this.latestMouseState = mouseState; + + this.mouseDisposer = mouseState.changed.add(() => { + this.latestMouseState = mouseState; + this.currentMouseState = mouseState; + }); + + if (this.animationFrameHandle === null) { + this.animationFrameHandle = requestAnimationFrame(this.drawLoop); + } + } + + private stopDrawing() { + if (!this.isDrawing) return; + this.isDrawing = false; + this.lastPoint = undefined; + if (this.animationFrameHandle !== null) { + cancelAnimationFrame(this.animationFrameHandle); + this.animationFrameHandle = null; + } + if (this.mouseDisposer) { + this.mouseDisposer(); + this.mouseDisposer = undefined; + } + } + + private paintPoints(points: Float32Array[], value: bigint) { + const radius = Math.max( + 1, + Math.floor(this.layer.voxBrushRadius.value ?? 3), + ); + const editContext = this.getEditingContext(); + if (editContext === undefined) { + throw new Error("editContext is undefined"); + } + const shapeEnum = this.layer.voxBrushShape.value; + let basis = undefined as undefined | { u: Float32Array; v: Float32Array }; + if (shapeEnum === BrushShape.DISK && this.currentMouseState?.planeNormal) { + const n = editContext.transformGlobalToVoxelNormal( + this.currentMouseState.planeNormal, + ); + const u = vec3.create(); + const tempVec = + Math.abs(vec3.dot(n, vec3.fromValues(1, 0, 0))) < 0.9 + ? vec3.fromValues(1, 0, 0) + : vec3.fromValues(0, 1, 0); + vec3.cross(u, tempVec, n); + vec3.normalize(u, u); + const v = vec3.cross(vec3.create(), n, u); + vec3.normalize(v, v); + basis = { u, v }; + } + + for (const p of points) + editContext.controller?.paintBrushWithShape( + p, + radius, + value, + shapeEnum, + basis, + ); + } +} + +const floodFillSVG = + ` + + + + + + + + +`.replace(/\s\s+/g, " "); + +const floodFillCursor = `url('data:image/svg+xml;utf8,${encodeURIComponent(floodFillSVG)}') 4 19, crosshair`; + +export class VoxelFloodFillTool extends BaseVoxelTool { + activate(activation: ToolActivation) { + super.activate(activation); + this.setCursor(floodFillCursor); + activation.registerDisposer(() => { + this.resetCursor(); + }); + } + + activationCallback(_activation: ToolActivation): void { + const editContext = this.getEditingContext(); + if (editContext === undefined) { + StatusMessage.showTemporaryMessage( + 'Voxel editing is not available. Please select a writable volume source in the "Source" tab.', + 5000, + ); + return; + } + const seed = this.getPoint(this.mouseState); + if (!this.mouseState.planeNormal) return; + const planeNormal = editContext.transformGlobalToVoxelNormal( + this.mouseState.planeNormal, + ); + if (!seed || !planeNormal) return; + try { + const value = this.layer.getVoxelPaintValue( + this.layer.voxEraseMode.value, + ); + const max = Number(this.layer.voxFloodMaxVoxels.value); + if (!Number.isFinite(max) || max <= 0) { + throw new Error("Invalid max fill voxels setting"); + } + if (!editContext.controller) + throw new Error("Error: No controller available"); + editContext.controller + .floodFillPlane2D( + new Float32Array(seed), + value, + Math.floor(max), + planeNormal, + ) + .catch((e: any) => + StatusMessage.showTemporaryMessage(String(e?.message ?? e)), + ); + } catch (e: any) { + StatusMessage.showTemporaryMessage(String(e?.message ?? e)); + } + } + + deactivationCallback(_activation: ToolActivation): void { + return; + } + + constructor(layer: UserLayerWithVoxelEditing) { + super(layer, /*toggle=*/ true); + } + + toJSON() { + return FLOODFILL_TOOL_ID; + } + + get description() { + return "Flood fill tool"; + } +} + +const pickerSVG = ` + + + + + +`; + +const pickerCursor = `url('data:image/svg+xml;utf8,${encodeURIComponent(pickerSVG)}') 4 19, crosshair`; + +export class AdoptVoxelValueTool extends LayerTool { + private lastPickPosition: Float32Array | undefined; + private lastCheckedSourceIndex = -1; + + readonly singleChannelAccess: ChunkChannelAccessParameters = { + numChannels: 1, + channelSpaceShape: new Uint32Array([]), + chunkChannelDimensionIndices: [], + chunkChannelCoordinates: new Uint32Array([0]), + }; + + constructor(layer: UserLayerWithVoxelEditing) { + super(layer, /*toggle=*/ false); + } + + protected setCursor(cursor: string) { + for (const panel of this.layer.manager.root.display.panels) { + panel.element.style.setProperty("cursor", cursor, "important"); + } + } + + protected resetCursor() { + for (const panel of this.layer.manager.root.display.panels) { + panel.element.style.removeProperty("cursor"); + } + } + + toJSON() { + return ADOPT_VOXEL_LABEL_TOOL_ID; + } + + get description() { + return "Picking tool"; + } + + activate(activation: ToolActivation): void { + if (!this.mouseState?.active) return; + this.setCursor(pickerCursor); + activation.registerDisposer(() => { + this.resetCursor(); + }); + + const currentPosition = this.mouseState.position.slice() as vec3; + + if ( + this.lastPickPosition === undefined || + !vec3.equals(this.lastPickPosition as vec3, currentPosition) + ) { + this.lastPickPosition = currentPosition; + this.lastCheckedSourceIndex = -1; + } + + const allContexts = Array.from(this.layer.editingContexts.values()); + + if (allContexts.length === 0) { + StatusMessage.showTemporaryMessage( + "No volume sources found in this layer.", + 3000, + ); + return; + } + + const numSources = allContexts.length; + const startIndex = this.lastCheckedSourceIndex + 1; + + const checkNextSource = async () => { + for (let i = 0; i < numSources; ++i) { + const sourceIndex = (startIndex + i) % numSources; + const context = allContexts[sourceIndex]!; + + const voxelCoord = context.getVoxelPositionFromMouse(this.mouseState); + if (voxelCoord === undefined) continue; + + const source = context.primarySource.getSources( + this.layer.getIdentitySliceViewSourceOptions(), + )[0][0]!.chunkSource; + + const valueResult = await source.getEnsuredValueAt( + voxelCoord, + this.singleChannelAccess, + ); + const value = Array.isArray(valueResult) ? valueResult[0] : valueResult; + const bigValue = BigInt(value || 0); + + if (bigValue !== 0n) { + this.layer.setVoxelPaintValue(bigValue); + this.lastCheckedSourceIndex = sourceIndex; + StatusMessage.showTemporaryMessage( + `Adopted value: ${bigValue} (from source ${sourceIndex + 1}/${numSources})`, + 3000, + ); + return; + } + } + + this.lastCheckedSourceIndex = -1; + StatusMessage.showTemporaryMessage( + "No further segments found at this position.", + 3000, + ); + }; + + StatusMessage.forPromise(checkNextSource(), { + initialMessage: "Picking voxel value...", + delay: true, + errorPrefix: "Error picking value: ", + }); + } +} + +export function registerVoxelTools(LayerCtor: any) { + registerTool( + LayerCtor, + BRUSH_TOOL_ID, + (layer: UserLayerWithVoxelEditing) => new VoxelBrushTool(layer), + ); + registerTool( + LayerCtor, + FLOODFILL_TOOL_ID, + (layer: UserLayerWithVoxelEditing) => new VoxelFloodFillTool(layer), + ); + registerTool( + LayerCtor, + ADOPT_VOXEL_LABEL_TOOL_ID, + (layer: UserLayerWithVoxelEditing) => new AdoptVoxelValueTool(layer), + ); +} diff --git a/src/util/gzip.ts b/src/util/gzip.ts index 4b412bf5ca..c62d86a9e7 100644 --- a/src/util/gzip.ts +++ b/src/util/gzip.ts @@ -67,3 +67,18 @@ export async function maybeDecompressGzip(data: ArrayBuffer | ArrayBufferView) { } return byteView; } + +export async function encodeGzip( + data: Uint8Array | ArrayBuffer, + format: CompressionFormat, +): Promise { + const readableStream = new ReadableStream({ + start(controller) { + controller.enqueue(data); + controller.close(); + }, + }); + const compressionStream = new CompressionStream(format); + const compressedStream = readableStream.pipeThrough(compressionStream); + return await new Response(compressedStream).arrayBuffer(); +} diff --git a/src/voxel_annotation/PreviewMultiscaleChunkSource.ts b/src/voxel_annotation/PreviewMultiscaleChunkSource.ts new file mode 100644 index 0000000000..43f3b63153 --- /dev/null +++ b/src/voxel_annotation/PreviewMultiscaleChunkSource.ts @@ -0,0 +1,73 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { ChunkManager } from "#src/chunk_manager/frontend.js"; +import type { SliceViewSingleResolutionSource } from "#src/sliceview/frontend.js"; +import type { + VolumeChunkSpecification, + VolumeSourceOptions, + DataType, + VolumeType, +} from "#src/sliceview/volume/base.js"; +import { + InMemoryVolumeChunkSource, + MultiscaleVolumeChunkSource, + type VolumeChunkSource, +} from "#src/sliceview/volume/frontend.js"; + +export class VoxelPreviewMultiscaleSource extends MultiscaleVolumeChunkSource { + dataType: DataType; + volumeType: VolumeType; + rank: number; + + constructor( + chunkManager: ChunkManager, + public primarySource: MultiscaleVolumeChunkSource, + ) { + super(chunkManager); + this.dataType = primarySource.dataType; + this.volumeType = primarySource.volumeType; + this.rank = primarySource.rank; + } + + getSources( + options: VolumeSourceOptions, + ): SliceViewSingleResolutionSource[][] { + const sourcesByScale = this.primarySource.getSources(options); + + return sourcesByScale.map((orientation) => { + return orientation.map((primaryResSource) => { + const spec = primaryResSource.chunkSource.spec; + + const previewSpec: VolumeChunkSpecification = { + ...spec, + compressedSegmentationBlockSize: undefined, + }; + + const previewSource = this.chunkManager.getChunkSource( + InMemoryVolumeChunkSource, + { spec: previewSpec }, + ); + + return { + chunkSource: previewSource, + chunkToMultiscaleTransform: + primaryResSource.chunkToMultiscaleTransform, + }; + }); + }); + } +} diff --git a/src/voxel_annotation/TODOs.md b/src/voxel_annotation/TODOs.md new file mode 100644 index 0000000000..9caa8a2c7f --- /dev/null +++ b/src/voxel_annotation/TODOs.md @@ -0,0 +1,38 @@ +## TODOs + +### priority + +- preview colors are wrong with signed dataset +- writable float32 dataset is not working (expected), either block its usage or fix +- the brush circle is only correct in Euclidean space (expected since it cannot be an ellipse) +- blosc encoding is wrong + +- Dataset creation: + - the copy from existing seems to not be right on all settings + - upon creation of a uint64 dataset, when trying to paint, the preview is getting updated correctly, but the writing pipeline seems to fail, it causes the following error: + +``` +decode_common.ts:56 Uncaught TypeError: Cannot mix BigInt and other types, use explicit conversions + at decodeValueOffset (decode_common.ts:56:32) + at readSingleChannelValueUint64 (decode_common.ts:120:5) + at CompressedSegmentationVolumeChunk.getValueAt (chunk_format.ts:322:42) + at ZarrVolumeChunkSource.getValueAt (frontend.ts:286:20) + at SegmentationRenderLayer.getValueAt (renderlayer.ts:491:29) + at SegmentationUserLayer.getValueAt (index.ts:602:22) + at SegmentationUserLayer.captureSelectionState (index.ts:332:24) + at SegmentationUserLayer.captureSelectionState (annotations.ts:1943:13) + at LayerSelectedValues.update (index.ts:1313:21) + at LayerSelectedValues.get (index.ts:1320:10) +``` + +after a page reload, the painting works again with no issues; the previously painted voxels are not present. + +### later + +- add preview for the undo/redo +- url completion for the ssa+https source + +### questionable + +- write a testsuite for the downsampler and ensure its proper working on exotic lod levels +- adapt the brush size to the zoom level linearly diff --git a/src/voxel_annotation/base.ts b/src/voxel_annotation/base.ts new file mode 100644 index 0000000000..6aef84109e --- /dev/null +++ b/src/voxel_annotation/base.ts @@ -0,0 +1,85 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { MultiscaleVolumeChunkSource } from "#src/sliceview/volume/frontend.js"; +import type { VoxelPreviewMultiscaleSource } from "#src/voxel_annotation/PreviewMultiscaleChunkSource.js"; +import type { RPC } from "#src/worker_rpc.js"; + +export const VOX_RELOAD_CHUNKS_RPC_ID = "vox.chunk.reload"; +export const VOX_EDIT_BACKEND_RPC_ID = "vox.EditBackend"; +export const VOX_EDIT_COMMIT_VOXELS_RPC_ID = "vox.edit.commitVoxels"; +export const VOX_EDIT_FAILURE_RPC_ID = "vox.edit.failure"; +export const VOX_EDIT_UNDO_RPC_ID = "vox.edit.undo"; +export const VOX_EDIT_REDO_RPC_ID = "vox.edit.redo"; +export const VOX_EDIT_HISTORY_UPDATE_RPC_ID = "vox.edit.historyUpdate"; + +export interface VoxelLayerResolution { + lodIndex: number; + transform: number[]; + chunkSize: number[]; + sourceRpc: number; +} + +export type VoxelChangeValues = Uint32Array | BigUint64Array; + +export interface VoxelChange { + indices: Uint32Array; + oldValues: VoxelChangeValues; + newValues: VoxelChangeValues; +} + +export interface EditAction { + changes: Map; + timestamp: number; + description: string; +} + +export function makeVoxChunkKey(chunkKey: string, lodIndex: number) { + return `lod${lodIndex}#${chunkKey}`; +} + +export function makeChunkKey(x: number, y: number, z: number) { + return `${x},${y},${z}`; +} + +export function parseVoxChunkKey(key: string) { + const parts = [ + Number(key.split("#")[0].substring(3)), + ...key.split("#")[1].split(",").map(Number), + ]; + if (parts.length !== 4 || parts.some(isNaN)) { + console.warn(`Invalid chunk key format: ${key}`); + return null; + } + return { + lodIndex: parts[0], + x: parts[1], + y: parts[2], + z: parts[3], + chunkKey: key.split("#")[1], + }; +} + +export enum BrushShape { + DISK = 0, + SPHERE = 1, +} + +export interface VoxelEditControllerHost { + primarySource: MultiscaleVolumeChunkSource; + previewSource?: VoxelPreviewMultiscaleSource; + rpc: RPC; +} diff --git a/src/voxel_annotation/edit_backend.ts b/src/voxel_annotation/edit_backend.ts new file mode 100644 index 0000000000..1bf030bfcc --- /dev/null +++ b/src/voxel_annotation/edit_backend.ts @@ -0,0 +1,764 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import { DataType } from "#src/sliceview/base.js"; +import { decodeChannel as decodeChannelUint32 } from "#src/sliceview/compressed_segmentation/decode_uint32.js"; +import { decodeChannel as decodeChannelUint64 } from "#src/sliceview/compressed_segmentation/decode_uint64.js"; +import type { VolumeChunkSource } from "#src/sliceview/volume/backend.js"; +import { mat4, vec3 } from "#src/util/geom.js"; +import * as matrix from "#src/util/matrix.js"; +import type { + VoxelLayerResolution, + EditAction, + VoxelChange, +} from "#src/voxel_annotation/base.js"; +import { + VOX_EDIT_BACKEND_RPC_ID, + VOX_EDIT_COMMIT_VOXELS_RPC_ID, + VOX_RELOAD_CHUNKS_RPC_ID, + VOX_EDIT_FAILURE_RPC_ID, + VOX_EDIT_UNDO_RPC_ID, + VOX_EDIT_REDO_RPC_ID, + VOX_EDIT_HISTORY_UPDATE_RPC_ID, + makeVoxChunkKey, + parseVoxChunkKey, + makeChunkKey, +} from "#src/voxel_annotation/base.js"; +import type { RPC } from "#src/worker_rpc.js"; +import { + registerPromiseRPC, + SharedObject, + registerRPC, + registerSharedObject, + initializeSharedObjectCounterpart, +} from "#src/worker_rpc.js"; + +@registerSharedObject(VOX_EDIT_BACKEND_RPC_ID) +export class VoxelEditController extends SharedObject { + private sources = new Map(); + private resolutions = new Map< + number, + VoxelLayerResolution & { invTransform: mat4 } + >(); + + private pendingEdits: { + key: string; + indices: number[] | Uint32Array; + value?: bigint; + values?: ArrayLike; + size?: number[]; + }[] = []; + private commitDebounceTimer: number | undefined; + private readonly commitDebounceDelayMs: number = 300; + + // Undo/redo history + private undoStack: EditAction[] = []; + private redoStack: EditAction[] = []; + private readonly MAX_HISTORY_SIZE: number = 100; + + private downsampleQueue: string[] = []; + private downsampleQueueSet: Set = new Set(); + private isProcessingDownsampleQueue: boolean = false; + + constructor(rpc: RPC, options: any) { + super(); + initializeSharedObjectCounterpart(this, rpc, options); + + const passedResolutions = options?.resolutions as + | VoxelLayerResolution[] + | undefined; + if (passedResolutions === undefined || !Array.isArray(passedResolutions)) { + throw new Error( + "VoxelEditBackend: missing required 'resolutions' array during initialization", + ); + } + + for (const res of passedResolutions) { + const rank = res.chunkSize.length; + const invTransform = new Float32Array((rank + 1) ** 2); + matrix.inverse( + invTransform, + rank + 1, + new Float32Array(res.transform), + rank + 1, + rank + 1, + ); + this.resolutions.set(res.lodIndex, { + ...res, + invTransform: invTransform as mat4, + }); + const resolved = rpc.get(res.sourceRpc) as VolumeChunkSource | undefined; + if (!resolved) { + throw new Error( + `VoxelEditBackend: failed to resolve VolumeChunkSource for LOD ${res.lodIndex}`, + ); + } + this.sources.set(res.lodIndex, resolved); + } + + this.notifyHistoryChanged(); + } + + private async flushPending(): Promise { + const edits = this.pendingEdits; + this.pendingEdits = []; + this.commitDebounceTimer = undefined; + if (edits.length === 0) { + // Even if nothing to flush, history sizes may not have changed. + this.notifyHistoryChanged(); + return; + } + + const editsByVoxKey = new Map>(); + + for (const edit of edits) { + let chunkMap = editsByVoxKey.get(edit.key); + if (!chunkMap) { + chunkMap = new Map(); + editsByVoxKey.set(edit.key, chunkMap); + } + + const inds = edit.indices as ArrayLike; + if (edit.values) { + // Handle array of values + const vals = Array.from(edit.values); + if (vals.length !== inds.length) { + throw new Error("flushPending: values length mismatch with indices"); + } + for (let i = 0; i < inds.length; ++i) { + chunkMap.set(inds[i]!, vals[i]!); + } + } else if (edit.value !== undefined) { + // Handle single value for all indices + for (let i = 0; i < inds.length; ++i) { + chunkMap.set(inds[i]!, edit.value); + } + } else { + throw new Error("flushPending: edit missing value(s)"); + } + } + + const failedVoxChunkKeys: string[] = []; + let firstErrorMessage: string | undefined = undefined; + + const newAction: EditAction = { + changes: new Map(), + timestamp: Date.now(), + description: "Voxel Edit", + }; + + for (const [voxKey, chunkEdits] of editsByVoxKey.entries()) { + try { + const parsedKey = parseVoxChunkKey(voxKey); + if (!parsedKey) { + const msg = `flushPending: Failed to parse vox chunk key: ${voxKey}`; + console.error(msg); + failedVoxChunkKeys.push(voxKey); + if (firstErrorMessage === undefined) firstErrorMessage = msg; + continue; + } + const source = this.sources.get(parsedKey.lodIndex); + if (!source) { + const msg = `flushPending: No source found for LOD index ${parsedKey.lodIndex}`; + console.error(msg); + failedVoxChunkKeys.push(voxKey); + if (firstErrorMessage === undefined) firstErrorMessage = msg; + continue; + } + + const indices = Array.from(chunkEdits.keys()); + const values = Array.from(chunkEdits.values()); + + const change = await source.applyEdits( + parsedKey.chunkKey, + indices, + values, + ); + newAction.changes.set(voxKey, change); + } catch (e) { + const msg = e instanceof Error ? e.message : String(e); + console.error(`Failed to write chunk ${voxKey}:`, e); + failedVoxChunkKeys.push(voxKey); + if (firstErrorMessage === undefined) firstErrorMessage = msg; + } + } + + this.callChunkReload(editsByVoxKey.keys().toArray()); + + if (newAction.changes.size > 0) { + this.undoStack.push(newAction); + if (this.undoStack.length > this.MAX_HISTORY_SIZE) { + this.undoStack.shift(); + } + this.redoStack.length = 0; + } + + // Notify frontend of history changes after any commit attempt + this.notifyHistoryChanged(); + + if (failedVoxChunkKeys.length > 0) { + this.rpc?.invoke(VOX_EDIT_FAILURE_RPC_ID, { + rpcId: this.rpcId, + voxChunkKeys: failedVoxChunkKeys, + message: firstErrorMessage ?? "Voxel edit commit failed.", + }); + } + + const touched = new Set(); + for (const e of edits) touched.add(e.key); + for (const key of touched) { + this.enqueueDownsample(key); + } + } + + async commitVoxels( + edits: { + key: string; + indices: number[] | Uint32Array; + value?: bigint; + values?: ArrayLike; + size?: number[]; + }[], + ) { + for (const e of edits) { + if (!e || !e.key || !e.indices) { + throw new Error( + "VoxelEditController.commitVoxels: invalid edit payload", + ); + } + this.pendingEdits.push(e); + } + if (this.commitDebounceTimer !== undefined) + clearTimeout(this.commitDebounceTimer); + this.commitDebounceTimer = setTimeout(() => { + void this.flushPending(); + }, this.commitDebounceDelayMs) as unknown as number; + } + + callChunkReload(voxChunkKeys: string[], isForPreviewChunks = false) { + this.rpc?.invoke(VOX_RELOAD_CHUNKS_RPC_ID, { + rpcId: this.rpcId, + voxChunkKeys: voxChunkKeys, + isForPreviewChunks, + }); + } + + // --- Start of Downsampling Logic --- + + private enqueueDownsample(key: string): void { + if (key.length === 0) return; + if (!this.downsampleQueueSet.has(key)) { + this.downsampleQueueSet.add(key); + this.downsampleQueue.push(key); + } + if (!this.isProcessingDownsampleQueue) { + this.isProcessingDownsampleQueue = true; + Promise.resolve().then(() => this.processDownsampleQueue()); + } + } + + private async processDownsampleQueue(): Promise { + try { + while (this.downsampleQueue.length > 0) { + const key = this.downsampleQueue.shift() as string; + this.downsampleQueueSet.delete(key); + const allModifiedKeys = new Array(); + let currentKey: string | null = key; + while (currentKey !== null) { + allModifiedKeys.push(currentKey); + currentKey = await this.downsampleStep(currentKey); + } + this.callChunkReload(allModifiedKeys, true); + } + } finally { + this.isProcessingDownsampleQueue = false; + if ( + this.downsampleQueue.length > 0 && + !this.isProcessingDownsampleQueue + ) { + this.isProcessingDownsampleQueue = true; + Promise.resolve().then(() => this.processDownsampleQueue()); + } + } + } + + /** + * Performs a single downsampling step from a child chunk to its parent. + * @returns The key of the parent chunk that was updated, or null if the cascade should stop. + */ + private async downsampleStep(childKey: string): Promise { + // 1. Get child chunk and ensure its data is loaded. + const childInfo = parseVoxChunkKey(childKey); + if (childInfo === null) { + console.error(`[Downsample] Invalid child key format: ${childKey}`); + return null; + } + const childSource = this.sources.get(childInfo.lodIndex); + if (!childSource) { + console.error( + `[Downsample] No source found for child LOD: ${childInfo.lodIndex}`, + ); + return null; + } + + const childChunk = childSource.getChunk( + new Float32Array([childInfo.x, childInfo.y, childInfo.z]), + ) as any; + if (!childChunk.data) { + try { + await childSource.download(childChunk, new AbortController().signal); + } catch (e) { + console.warn( + `[Downsample] Failed to download source chunk ${childKey}:`, + e, + ); + return null; + } + } + const childChunkData = childChunk.data as Uint32Array | BigUint64Array; + const childRes = this.resolutions.get(childInfo.lodIndex)!; + + // 2. Determine the parent chunk that corresponds to this child chunk. + const parentInfo = this._getParentChunkInfo(childKey, childRes); + if (parentInfo === null) { + // Reached the coarsest LOD, stop the cascade. + return null; + } + const { parentKey, parentSource, parentRes } = parentInfo; + + let dataToProcess = childChunkData; + const { compressedSegmentationBlockSize, dataType, chunkDataSize } = + childSource.spec; + if (compressedSegmentationBlockSize !== undefined) { + const numElements = + chunkDataSize[0] * chunkDataSize[1] * chunkDataSize[2]; + const compressedData = childChunkData as Uint32Array; + const baseOffset = compressedData.length > 0 ? compressedData[0] : 0; + if (dataType === DataType.UINT32) { + const uncompressedData = new Uint32Array(numElements); + if (baseOffset !== 0) { + decodeChannelUint32( + uncompressedData, + compressedData, + baseOffset, + chunkDataSize, + compressedSegmentationBlockSize, + ); + } + dataToProcess = uncompressedData; + } else { + // Assumes UINT64 + const uncompressedData = new BigUint64Array(numElements); + if (baseOffset !== 0) { + decodeChannelUint64( + uncompressedData, + compressedData, + baseOffset, + chunkDataSize, + compressedSegmentationBlockSize, + ); + } + dataToProcess = uncompressedData; + } + } + + // 3. Calculate the update for the parent chunk based on the child chunk's data. + const update = this._calculateParentUpdate( + dataToProcess, + childRes, + parentRes, + childInfo, + ); + if (update.indices.length === 0) { + return parentKey; + } + + // 4. Commit the update to the parent chunk and notify the frontend. + try { + await parentSource.applyEdits( + parentInfo.chunkKey, + update.indices, + update.values, + ); + this.callChunkReload([parentKey]); + } catch (e) { + console.error( + `[Downsample] Failed to apply edits to parent chunk ${parentKey}:`, + e, + ); + this.rpc?.invoke(VOX_EDIT_FAILURE_RPC_ID, { + rpcId: this.rpcId, + voxChunkKeys: [parentKey], + message: `Downsampling to ${parentKey} failed.`, + }); + return null; // Stop cascade on failure. + } + + return parentKey; + } + + /** + * Helper to find and describe the parent chunk. + */ + private _getParentChunkInfo( + childKey: string, + childRes: VoxelLayerResolution, + ) { + const childInfo = parseVoxChunkKey(childKey)!; + const parentLodIndex = childInfo.lodIndex + 1; + const parentRes = this.resolutions.get(parentLodIndex); + if (parentRes === undefined) return null; // No parent LOD exists + + const parentSource = this.sources.get(parentLodIndex)!; + const rank = childRes.chunkSize.length; + + // Find the world coordinate of the child chunk's origin + const childVoxelOrigin = new Float32Array(rank); + childVoxelOrigin.set([ + childInfo.x * childRes.chunkSize[0], + childInfo.y * childRes.chunkSize[1], + childInfo.z * childRes.chunkSize[2], + ]); + const childPhysOrigin = new Float32Array(rank); + matrix.transformPoint( + childPhysOrigin, + new Float32Array(childRes.transform), + rank + 1, + childVoxelOrigin, + rank, + ); + + // Transform that world coordinate into the parent's voxel space + const parentVoxelCoordOfChildOrigin = new Float32Array(rank); + matrix.transformPoint( + parentVoxelCoordOfChildOrigin, + parentRes.invTransform, + rank + 1, + childPhysOrigin, + rank, + ); + + // Determine the parent chunk's grid position + const parentX = Math.floor( + parentVoxelCoordOfChildOrigin[0] / parentRes.chunkSize[0], + ); + const parentY = Math.floor( + parentVoxelCoordOfChildOrigin[1] / parentRes.chunkSize[1], + ); + const parentZ = Math.floor( + parentVoxelCoordOfChildOrigin[2] / parentRes.chunkSize[2], + ); + + const parentChunkKey = makeChunkKey(parentX, parentY, parentZ); + const parentKey = makeVoxChunkKey(parentChunkKey, parentLodIndex); + return { parentKey, chunkKey: parentChunkKey, parentRes, parentSource }; + } + + /** + * Calculates the downsampled voxel values for a region of a parent chunk. + * This is the core aggregation logic. + */ + private _calculateParentUpdate( + childChunkData: Uint32Array | BigUint64Array, + childRes: VoxelLayerResolution & { invTransform: mat4 }, + parentRes: VoxelLayerResolution & { invTransform: mat4 }, + childInfo: { x: number; y: number; z: number }, + ) { + const indices: number[] = []; + const values: bigint[] = []; + const rank = childRes.chunkSize.length; + const childChunkSize = childRes.chunkSize; + const parentChunkSize = parentRes.chunkSize; + + // Transform to map a point in parent-voxel-space to a point in child-voxel-space. + const parentVoxelToChildVoxelTransform = mat4.multiply( + mat4.create(), + childRes.invTransform, + new Float32Array(parentRes.transform) as mat4, + ); + + // Calculate the child chunk's origin and extent in absolute child-voxel-space + const childChunkOrigin = new Float32Array([ + childInfo.x * childChunkSize[0], + childInfo.y * childChunkSize[1], + childInfo.z * childChunkSize[2], + ]); + const childChunkMax = new Float32Array([ + (childInfo.x + 1) * childChunkSize[0], + (childInfo.y + 1) * childChunkSize[1], + (childInfo.z + 1) * childChunkSize[2], + ]); + + // Transform child chunk bounds to physical space + const childPhysOrigin = new Float32Array(rank); + matrix.transformPoint( + childPhysOrigin, + new Float32Array(childRes.transform), + rank + 1, + childChunkOrigin, + rank, + ); + const childPhysMax = new Float32Array(rank); + matrix.transformPoint( + childPhysMax, + new Float32Array(childRes.transform), + rank + 1, + childChunkMax, + rank, + ); + + // Transform to parent-voxel-space to find the affected region + const parentVoxelMin = new Float32Array(rank); + matrix.transformPoint( + parentVoxelMin, + parentRes.invTransform, + rank + 1, + childPhysOrigin, + rank, + ); + const parentVoxelMax = new Float32Array(rank); + matrix.transformPoint( + parentVoxelMax, + parentRes.invTransform, + rank + 1, + childPhysMax, + rank, + ); + + // Determine which parent chunk this corresponds to (should match _getParentChunkInfo) + const parentChunkGridX = Math.floor(parentVoxelMin[0] / parentChunkSize[0]); + const parentChunkGridY = Math.floor(parentVoxelMin[1] / parentChunkSize[1]); + const parentChunkGridZ = Math.floor(parentVoxelMin[2] / parentChunkSize[2]); + + // Calculate the parent chunk's origin in absolute parent-voxel-space + const parentChunkOriginInParentVoxels = new Float32Array([ + parentChunkGridX * parentChunkSize[0], + parentChunkGridY * parentChunkSize[1], + parentChunkGridZ * parentChunkSize[2], + ]); + + // Calculate the region to iterate over in the parent chunk's LOCAL coordinate space (0 to chunkSize) + const parentLocalMin = new Float32Array(rank); + const parentLocalMax = new Float32Array(rank); + for (let i = 0; i < rank; ++i) { + parentLocalMin[i] = Math.max( + 0, + Math.floor(parentVoxelMin[i] - parentChunkOriginInParentVoxels[i]), + ); + parentLocalMax[i] = Math.min( + parentChunkSize[i], + Math.ceil(parentVoxelMax[i] - parentChunkOriginInParentVoxels[i]), + ); + } + + const [startX, startY, startZ] = parentLocalMin; + const [endX, endY, endZ] = parentLocalMax; + + const corners = new Array(8).fill(0).map(() => vec3.create()); + const transformedCorners = new Array(8).fill(0).map(() => vec3.create()); + const sourceVoxels: bigint[] = []; + const [childW, childH, childD] = childChunkSize; + const [parentW, parentH] = parentChunkSize; + + // Iterate over each voxel in the affected region of the parent chunk (in local coordinates) + for (let pz = startZ; pz < endZ; ++pz) { + for (let py = startY; py < endY; ++py) { + for (let px = startX; px < endX; ++px) { + // Convert from parent-chunk-local to absolute parent-voxel-space + const absParentX = parentChunkOriginInParentVoxels[0] + px; + const absParentY = parentChunkOriginInParentVoxels[1] + py; + const absParentZ = parentChunkOriginInParentVoxels[2] + pz; + + // Define the 8 corners of the current parent voxel in absolute parent-voxel-space + vec3.set(corners[0], absParentX, absParentY, absParentZ); + vec3.set(corners[1], absParentX + 1, absParentY, absParentZ); + vec3.set(corners[2], absParentX, absParentY + 1, absParentZ); + vec3.set(corners[3], absParentX + 1, absParentY + 1, absParentZ); + vec3.set(corners[4], absParentX, absParentY, absParentZ + 1); + vec3.set(corners[5], absParentX + 1, absParentY, absParentZ + 1); + vec3.set(corners[6], absParentX, absParentY + 1, absParentZ + 1); + vec3.set(corners[7], absParentX + 1, absParentY + 1, absParentZ + 1); + + // Transform corners to absolute child-voxel-space + for (let i = 0; i < 8; ++i) { + vec3.transformMat4( + transformedCorners[i], + corners[i], + parentVoxelToChildVoxelTransform, + ); + } + + // Find bounding box in absolute child-voxel-space + const childMin = vec3.clone(transformedCorners[0]); + const childMax = vec3.clone(transformedCorners[0]); + for (let i = 1; i < 8; ++i) { + vec3.min(childMin, childMin, transformedCorners[i]); + vec3.max(childMax, childMax, transformedCorners[i]); + } + + // Convert to child-chunk-local coordinates for array indexing + const localChildMin = vec3.create(); + const localChildMax = vec3.create(); + vec3.subtract(localChildMin, childMin, childChunkOrigin as any); + vec3.subtract(localChildMax, childMax, childChunkOrigin as any); + + // Collect all child voxels within this bounding box (in local coordinates) + sourceVoxels.length = 0; + const cStartX = Math.max(0, Math.floor(localChildMin[0])); + const cEndX = Math.min(childW, Math.ceil(localChildMax[0])); + const cStartY = Math.max(0, Math.floor(localChildMin[1])); + const cEndY = Math.min(childH, Math.ceil(localChildMax[1])); + const cStartZ = Math.max(0, Math.floor(localChildMin[2])); + const cEndZ = Math.min(childD, Math.ceil(localChildMax[2])); + + for (let cz = cStartZ; cz < cEndZ; ++cz) { + for (let cy = cStartY; cy < cEndY; ++cy) { + for (let cx = cStartX; cx < cEndX; ++cx) { + const srcIndex = cz * (childW * childH) + cy * childW + cx; + sourceVoxels.push(BigInt(childChunkData[srcIndex])); + } + } + } + + if (sourceVoxels.length > 0) { + const mode = this._calculateMode(sourceVoxels); + // Use local coordinates for the parent chunk index + const parentIndex = pz * (parentW * parentH) + py * parentW + px; + indices.push(parentIndex); + values.push(mode); + } + } + } + } + + return { indices, values }; + } + + private _calculateMode(values: (bigint | number)[]): bigint { + if (values.length === 0) return 0n; + const counts = new Map(); + let maxCount = 0; + let mode = 0n; + for (const v of values) { + const bigV = BigInt(v); + if (bigV === 0n) continue; + const c = (counts.get(bigV) ?? 0) + 1; + counts.set(bigV, c); + if (c > maxCount) { + maxCount = c; + mode = bigV; + } + } + return mode; + } + private notifyHistoryChanged(): void { + this.rpc?.invoke(VOX_EDIT_HISTORY_UPDATE_RPC_ID, { + rpcId: this.rpcId, + undoCount: this.undoStack.length, + redoCount: this.redoStack.length, + }); + } + + private async performUndoRedo( + sourceStack: EditAction[], + targetStack: EditAction[], + useOldValues: boolean, + actionDescription: "undo" | "redo", + ): Promise { + await this.flushPending(); + + if (sourceStack.length === 0) { + throw new Error(`Nothing to ${actionDescription}.`); + } + + const action = sourceStack.pop()!; + + const chunksToReload = new Set(); + let success = true; + + for (const [voxKey, change] of action.changes.entries()) { + const parsedKey = parseVoxChunkKey(voxKey); + if (!parsedKey) continue; + const source = this.sources.get(parsedKey.lodIndex); + if (!source) continue; + + const valuesToApply = useOldValues ? change.oldValues : change.newValues; + try { + await source.applyEdits( + parsedKey.chunkKey, + change.indices, + valuesToApply, + ); + chunksToReload.add(voxKey); + } catch (e) { + success = false; + console.error( + `performUndoRedo: failed to apply edits for ${voxKey}`, + e, + ); + this.rpc?.invoke(VOX_EDIT_FAILURE_RPC_ID, { + rpcId: this.rpcId, + voxChunkKeys: [voxKey], + message: useOldValues ? "Undo failed." : "Redo failed.", + }); + // Stop processing this action on the first failure + break; + } + } + + if (success) { + // Only move the action to the target stack if all operations succeeded. + targetStack.push(action); + } else { + // On failure, return the action to its original stack to maintain consistency. + sourceStack.push(action); + } + + if (chunksToReload.size > 0 && success) { + for (const key of chunksToReload) { + this.enqueueDownsample(key); + } + this.callChunkReload(Array.from(chunksToReload)); + } + + this.notifyHistoryChanged(); + } + + public async undo(): Promise { + await this.performUndoRedo(this.undoStack, this.redoStack, true, "undo"); + } + + public async redo(): Promise { + await this.performUndoRedo(this.redoStack, this.undoStack, false, "redo"); + } +} + +registerRPC(VOX_EDIT_COMMIT_VOXELS_RPC_ID, function (x: any) { + const obj = this.get(x.rpcId) as VoxelEditController; + void obj.commitVoxels(Array.isArray(x.edits) ? x.edits : []); +}); + +registerPromiseRPC(VOX_EDIT_UNDO_RPC_ID, async function (this: RPC, x: any) { + const obj = this.get(x.rpcId) as VoxelEditController; + await obj.undo(); + return { value: undefined }; +}); + +registerPromiseRPC(VOX_EDIT_REDO_RPC_ID, async function (this: RPC, x: any) { + const obj = this.get(x.rpcId) as VoxelEditController; + await obj.redo(); + return { value: undefined }; +}); diff --git a/src/voxel_annotation/edit_controller.ts b/src/voxel_annotation/edit_controller.ts new file mode 100644 index 0000000000..b52c3ee693 --- /dev/null +++ b/src/voxel_annotation/edit_controller.ts @@ -0,0 +1,650 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { ChunkChannelAccessParameters } from "#src/render_coordinate_transform.js"; +import type { + VolumeChunkSource, + InMemoryVolumeChunkSource, +} from "#src/sliceview/volume/frontend.js"; +import { StatusMessage } from "#src/status.js"; +import { WatchableValue } from "#src/trackable_value.js"; +import { vec3 } from "#src/util/geom.js"; +import type { + VoxelEditControllerHost, + VoxelLayerResolution, +} from "#src/voxel_annotation/base.js"; +import { + BrushShape, + VOX_EDIT_BACKEND_RPC_ID, + VOX_EDIT_COMMIT_VOXELS_RPC_ID, + VOX_RELOAD_CHUNKS_RPC_ID, + VOX_EDIT_FAILURE_RPC_ID, + VOX_EDIT_UNDO_RPC_ID, + VOX_EDIT_REDO_RPC_ID, + VOX_EDIT_HISTORY_UPDATE_RPC_ID, + makeVoxChunkKey, + parseVoxChunkKey, +} from "#src/voxel_annotation/base.js"; +import { + registerRPC, + registerSharedObjectOwner, + SharedObject, +} from "#src/worker_rpc.js"; + +@registerSharedObjectOwner(VOX_EDIT_BACKEND_RPC_ID) +export class VoxelEditController extends SharedObject { + public undoCount = new WatchableValue(0); + public redoCount = new WatchableValue(0); + + constructor(private host: VoxelEditControllerHost) { + super(); + const rpc = this.host.rpc; + if (!rpc) { + throw new Error( + "VoxelEditController: Missing RPC from multiscale chunk manager.", + ); + } + + // Get all sources for all scales and orientations + const sourcesByScale = this.host.primarySource.getSources( + this.getIdentitySliceViewSourceOptions(), + ); + const sources = sourcesByScale[0]; + if (!sources) { + throw new Error( + "VoxelEditController: Could not retrieve sources from multiscale object.", + ); + } + + const resolutions: VoxelLayerResolution[] = []; + + for (let i = 0; i < sources.length; ++i) { + const source = sources[i]!.chunkSource; + const rpcId = source.rpcId; + if (rpcId == null) { + throw new Error( + `VoxelEditController: Source at LOD index ${i} has null rpcId during initialization.`, + ); + } + resolutions.push({ + lodIndex: i, + transform: Array.from(sources[i]!.chunkToMultiscaleTransform), + chunkSize: Array.from(source.spec.chunkDataSize), + sourceRpc: rpcId, + }); + } + + this.initializeCounterpart(rpc, { resolutions }); + } + + private morphologicalConfig = { + growthThresholds: [ + { count: 100, size: 1 }, + { count: 1000, size: 3 }, + { count: 10000, size: 5 }, + { count: 100000, size: 7 }, + ], + maxSize: 9, + }; + + readonly singleChannelAccess: ChunkChannelAccessParameters = { + numChannels: 1, + channelSpaceShape: new Uint32Array([]), + chunkChannelDimensionIndices: [], + chunkChannelCoordinates: new Uint32Array([0]), + }; + + private getIdentitySliceViewSourceOptions() { + const rank = this.host.primarySource.rank as number | undefined; + if (!Number.isInteger(rank) || (rank as number) <= 0) { + throw new Error("VoxelEditController: Invalid multiscale rank."); + } + const r = rank as number; + // Identity mapping from multiscale to view for our purposes. + const displayRank = r; + const multiscaleToViewTransform = new Float32Array(displayRank * r); + for (let chunkDim = 0; chunkDim < r; ++chunkDim) { + for (let displayDim = 0; displayDim < displayRank; ++displayDim) { + multiscaleToViewTransform[displayRank * chunkDim + displayDim] = + chunkDim === displayDim ? 1 : 0; + } + } + return { + displayRank, + multiscaleToViewTransform, + modelChannelDimensionIndices: [], + } as const; + } + + getSourceForLOD(lodIndex: number): VolumeChunkSource { + const sourcesByScale = this.host.primarySource.getSources( + this.getIdentitySliceViewSourceOptions(), + ); + // Assuming a single orientation, which is correct for this use case. + const sources = sourcesByScale[0]; + if (!sources || sources.length <= lodIndex) { + throw new Error( + `VoxelEditController: LOD index ${lodIndex} is out of bounds.`, + ); + } + const source = sources[lodIndex]?.chunkSource; + if (!source) { + throw new Error( + `VoxelEditController: No chunk source found for LOD index ${lodIndex}.`, + ); + } + return source; + } + + // Paint a disk (slice-aligned via basis) or sphere in WORLD/ canonical units; we transform to LOD grid before sending. + paintBrushWithShape( + centerCanonical: Float32Array, + radiusCanonical: number, + value: bigint, + shape: BrushShape, + basis?: { u: Float32Array; v: Float32Array }, + ) { + if (!Number.isFinite(radiusCanonical) || radiusCanonical <= 0) { + void basis; // basis is currently unused for disk alignment in this refactor + throw new Error("paintBrushWithShape: 'radius' must be > 0."); + } + if (!centerCanonical || centerCanonical.length < 3) { + throw new Error( + "paintBrushWithShape: 'center' must be a Float32Array[3].", + ); + } + + // For V1 we use the minimum LOD (index 0) + const voxelSize = 1; + const sourceIndex = 0; + if (!this.host.previewSource) + throw new Error( + "paintBrushWithShape: ERROR Missing preview source from host.", + ); + const source = this.host.previewSource.getSources( + this.getIdentitySliceViewSourceOptions(), + )[0][sourceIndex]!.chunkSource as InMemoryVolumeChunkSource; + if (!source) { + throw new Error("paintBrushWithShape: Missing preview source"); + } + + // Convert center and radius to the level’s voxel grid. + const cx = Math.round((centerCanonical[0] ?? 0) / voxelSize); + const cy = Math.round((centerCanonical[1] ?? 0) / voxelSize); + const cz = Math.round((centerCanonical[2] ?? 0) / voxelSize); + const r = Math.round(radiusCanonical / voxelSize); + if (r <= 0) { + throw new Error( + "paintBrushWithShape: radius too small for selected LOD.", + ); + } + const rr = r * r; + + const voxelsToPaint: Float32Array[] = []; + + if (shape === BrushShape.SPHERE) { + for (let dz = -r; dz <= r; ++dz) { + for (let dy = -r; dy <= r; ++dy) { + for (let dx = -r; dx <= r; ++dx) { + if (dx * dx + dy * dy + dz * dz <= rr) { + voxelsToPaint.push(new Float32Array([cx + dx, cy + dy, cz + dz])); + } + } + } + } + } else { + if (basis === undefined) { + throw new Error( + "paintBrushWithShape: 'basis' must be defined for disk alignment.", + ); + } + const { u, v } = basis; + for (let j = -r; j <= r; ++j) { + for (let i = -r; i <= r; ++i) { + if (i * i + j * j <= rr) { + const point = vec3.fromValues(cx, cy, cz); + vec3.scaleAndAdd(point, point, u as vec3, i); + vec3.scaleAndAdd(point, point, v as vec3, j); + voxelsToPaint.push(point as Float32Array); + } + } + } + } + + if (!voxelsToPaint || voxelsToPaint.length === 0) return; + const editsByVoxKey = new Map< + string, + { indices: number[]; value: bigint } + >(); + + for (const voxelCoord of voxelsToPaint) { + const { chunkGridPosition, positionWithinChunk } = + source.computeChunkIndices(voxelCoord); + const chunkKey = chunkGridPosition.join(); + const voxKey = makeVoxChunkKey(chunkKey, sourceIndex); + + let entry = editsByVoxKey.get(voxKey); + if (!entry) { + entry = { indices: [], value }; + editsByVoxKey.set(voxKey, entry); + } + + const { chunkDataSize } = source.spec; + const index = + (positionWithinChunk[2] * chunkDataSize[1] + positionWithinChunk[1]) * + chunkDataSize[0] + + positionWithinChunk[0]; + entry.indices.push(index); + } + + // Apply edits locally on the specific source for immediate feedback. + const localEdits = new Map(); + for (const [voxKey, edit] of editsByVoxKey.entries()) { + const parsed = parseVoxChunkKey(voxKey); + if (!parsed) continue; + localEdits.set(parsed.chunkKey, edit); + } + source.applyLocalEdits(localEdits); + + const backendEdits = [] as { + key: string; + indices: number[]; + value: bigint; + }[]; + for (const [voxKey, edit] of editsByVoxKey.entries()) { + backendEdits.push({ + key: voxKey, + indices: edit.indices, + value: edit.value, + }); + } + + this.commitEdits(backendEdits); + } + + /** Commit helper for UI tools. */ + commitEdits( + edits: { + key: string; + indices: number[] | Uint32Array; + value?: bigint; + values?: ArrayLike; + size?: number[]; + }[], + ): void { + if (!this.rpc) + throw new Error("VoxelEditController.commitEdits: RPC not initialized."); + if (!Array.isArray(edits)) { + throw new Error( + "VoxelEditController.commitEdits: edits must be an array.", + ); + } + this.rpc.invoke(VOX_EDIT_COMMIT_VOXELS_RPC_ID, { + rpcId: this.rpcId, + edits, + }); + } + + /** + * Frontend 2D flood fill helper: computes on currently selected LOD and returns an edits payload + * suitable for VOX_EDIT_COMMIT_VOXELS without committing. Hard-cap deny semantics. + * The seed is simply the first clicked voxel in canonical/world units. + */ + async floodFillPlane2D( + startPositionCanonical: Float32Array, + fillValue: bigint, + maxVoxels: number, + planeNormal: vec3, // MUST be a normalized vector + ): Promise<{ + edits: { key: string; indices: number[]; value: bigint }[]; + filledCount: number; + originalValue: bigint; + }> { + const sourceIndex = 0; + const source = this.getSourceForLOD(sourceIndex); + const startVoxelLod = vec3.round( + vec3.create(), + startPositionCanonical as vec3, + ); + + const originalValueResult = await source.getEnsuredValueAt( + startVoxelLod as Float32Array, + this.singleChannelAccess, + ); + if (originalValueResult === null) { + throw new Error( + "Flood fill seed is in an unloaded or out-of-bounds chunk.", + ); + } + const originalValue = + typeof originalValueResult !== "bigint" + ? BigInt(originalValueResult as number) + : originalValueResult; + + if (originalValue === fillValue) { + return { edits: [], filledCount: 0, originalValue }; + } + + const U = vec3.create(); + const V = vec3.create(); + const tempVec = + Math.abs(vec3.dot(planeNormal, vec3.fromValues(1, 0, 0))) < 0.9 + ? vec3.fromValues(1, 0, 0) + : vec3.fromValues(0, 1, 0); + vec3.cross(U, tempVec, planeNormal); + vec3.normalize(U, U); + vec3.cross(V, planeNormal, U); + vec3.normalize(V, V); + + const visited = new Set(); + const queue: [number, number][] = []; + let filledCount = 0; + const voxelsToFill: Float32Array[] = []; + + const map2dTo3d = (u: number, v: number): vec3 => { + const point = vec3.clone(startVoxelLod); + vec3.scaleAndAdd(point, point, U, u); + vec3.scaleAndAdd(point, point, V, v); + return vec3.round(vec3.create(), point); + }; + + const isFillable = async (p: vec3): Promise => { + const value = await source.getEnsuredValueAt( + p as Float32Array, + this.singleChannelAccess, + ); + if (value === null) return false; + const bigValue = + typeof value !== "bigint" ? BigInt(value as number) : value; + if (originalValue === 0n) return bigValue === 0n; + return bigValue === originalValue; + }; + + const getCurrentThickness = (): number => { + let thickness = 1; + for (const threshold of this.morphologicalConfig.growthThresholds) { + if (filledCount >= threshold.count) { + thickness = Math.max(thickness, threshold.size); + } + } + return Math.min(thickness, this.morphologicalConfig.maxSize); + }; + + const hasThickEnoughChannel = async ( + u: number, + v: number, + nu: number, + nv: number, + requiredThickness: number, + ): Promise => { + if (requiredThickness <= 1) return true; + + const halfThickness = Math.floor(requiredThickness / 2); + const du = nu - u; + const dv = nv - v; + + // Perpendicular direction + const perpU = -dv; + const perpV = du; + + // Check if the NEIGHBOR position has sufficient thickness on both sides + for (let offset = -halfThickness; offset <= halfThickness; ++offset) { + const testU = nu + perpU * offset; + const testV = nv + perpV * offset; + const pointToTest = map2dTo3d(testU, testV); + + if (!(await isFillable(pointToTest))) { + return false; + } + } + + return true; + }; + + const fillBorderRegion = async ( + startU: number, + startV: number, + requiredThickness: number, + ) => { + const subQueue: [number, number][] = []; + // The bounding box for the local fill is defined in the (u, v) coordinate system + const halfSize = requiredThickness * 2; // multiply by 2 to avoid small artefacts + const startKey = `${startU},${startV}`; + if (visited.has(startKey)) return; + + subQueue.push([startU, startV]); + visited.add(startKey); + + while (subQueue.length > 0) { + if (filledCount >= maxVoxels) return; + const [u, v] = subQueue.shift()!; + + const currentPoint = map2dTo3d(u, v); + filledCount++; + voxelsToFill.push(currentPoint as Float32Array); + + const neighbors2d: [number, number][] = [ + [u + 1, v], + [u - 1, v], + [u, v + 1], + [u, v - 1], + ]; + for (const [nu, nv] of neighbors2d) { + const du = nu - startU; + const dv = nv - startV; + const distanceSquared = du * du + dv * dv; + if (distanceSquared > halfSize * halfSize) { + continue; + } + + const neighborKey = `${nu},${nv}`; + if (visited.has(neighborKey)) continue; + + const neighborPoint = map2dTo3d(nu, nv); + if (await isFillable(neighborPoint)) { + visited.add(neighborKey); + subQueue.push([nu, nv]); + } + } + } + }; + + queue.push([0, 0]); + visited.add("0,0"); + + while (queue.length > 0) { + if (filledCount >= maxVoxels) { + throw new Error( + `Flood fill region exceeds the limit of ${maxVoxels} voxels.`, + ); + } + const [u, v] = queue.shift()!; + + const currentPoint = map2dTo3d(u, v); + filledCount++; + voxelsToFill.push(currentPoint as Float32Array); + + const requiredThickness = getCurrentThickness(); + const neighbors2d: [number, number][] = [ + [u + 1, v], + [u - 1, v], + [u, v + 1], + [u, v - 1], + ]; + + for (const [nu, nv] of neighbors2d) { + const k = `${nu},${nv}`; + if (visited.has(k)) continue; + + const neighborPoint = map2dTo3d(nu, nv); + if (await isFillable(neighborPoint)) { + if (await hasThickEnoughChannel(u, v, nu, nv, requiredThickness)) { + visited.add(k); + queue.push([nu, nv]); + } else { + await fillBorderRegion(nu, nv, requiredThickness); + } + } + } + } + if (!this.host.previewSource) + throw new Error( + "paintBrushWithShape: ERROR Missing preview source from host.", + ); + const previewSource = this.host.previewSource.getSources( + this.getIdentitySliceViewSourceOptions(), + )[0][sourceIndex]!.chunkSource as InMemoryVolumeChunkSource; + if (!previewSource) { + throw new Error("paintBrushWithShape: Missing preview source"); + } + const editsByVoxKey = new Map< + string, + { indices: number[]; value: bigint } + >(); + for (const voxelCoord of voxelsToFill) { + const { chunkGridPosition, positionWithinChunk } = + previewSource.computeChunkIndices(voxelCoord); + const chunkKey = chunkGridPosition.join(); + const voxKey = makeVoxChunkKey(chunkKey, sourceIndex); + let entry = editsByVoxKey.get(voxKey); + if (!entry) { + entry = { indices: [], value: fillValue }; + editsByVoxKey.set(voxKey, entry); + } + const { chunkDataSize } = previewSource.spec; + const index = + (positionWithinChunk[2] * chunkDataSize[1] + positionWithinChunk[1]) * + chunkDataSize[0] + + positionWithinChunk[0]; + entry.indices.push(index); + } + const localEdits = new Map(); + for (const [voxKey, edit] of editsByVoxKey.entries()) { + const parsed = parseVoxChunkKey(voxKey); + if (!parsed) continue; + localEdits.set(parsed.chunkKey, edit); + } + previewSource.applyLocalEdits(localEdits); + const backendEdits: { key: string; indices: number[]; value: bigint }[] = + []; + for (const [voxKey, edit] of editsByVoxKey.entries()) { + backendEdits.push({ + key: voxKey, + indices: edit.indices, + value: edit.value, + }); + } + this.commitEdits(backendEdits); + + return { edits: backendEdits, filledCount, originalValue }; + } + + callChunkReload(voxChunkKeys: string[], isForPreviewChunks: boolean) { + if (!Array.isArray(voxChunkKeys) || voxChunkKeys.length === 0) return; + const multiscaleSource = isForPreviewChunks + ? this.host.previewSource + : this.host.primarySource; + if (!multiscaleSource) { + throw new Error( + "VoxelEditController.callChunkReload: ERROR Missing source", + ); + } + const sources = multiscaleSource.getSources( + this.getIdentitySliceViewSourceOptions(), + )[0]; + if (!sources) { + throw new Error( + "VoxelEditController.callChunkReload: Missing base source", + ); + } + + const chunksToInvalidateBySource = new Map(); + + for (const voxKey of voxChunkKeys) { + const parsed = parseVoxChunkKey(voxKey); + if (!parsed) continue; + const source = sources[parsed.lodIndex]?.chunkSource as + | VolumeChunkSource + | undefined; + if (source) { + let arr = chunksToInvalidateBySource.get(source); + if (!arr) { + arr = []; + chunksToInvalidateBySource.set(source, arr); + } + arr.push(parsed.chunkKey); + } + } + + for (const [source, keys] of chunksToInvalidateBySource.entries()) { + if (keys.length > 0) { + source.invalidateChunks(keys); + } + } + } + + handleCommitFailure(voxChunkKeys: string[], message: string): void { + try { + this.callChunkReload(voxChunkKeys, true); + } finally { + StatusMessage.showTemporaryMessage(message); + } + } + + public undo(): void { + if (!this.rpc) + throw new Error("VoxelEditController.undo: RPC not initialized."); + this.rpc + .promiseInvoke(VOX_EDIT_UNDO_RPC_ID, { rpcId: this.rpcId }) + .catch((error: unknown) => { + const message = error instanceof Error ? error.message : String(error); + StatusMessage.showTemporaryMessage(`Undo failed: ${message}`, 3000); + }); + } + + public redo(): void { + if (!this.rpc) + throw new Error("VoxelEditController.redo: RPC not initialized."); + this.rpc + .promiseInvoke(VOX_EDIT_REDO_RPC_ID, { rpcId: this.rpcId }) + .catch((error: unknown) => { + const message = error instanceof Error ? error.message : String(error); + StatusMessage.showTemporaryMessage(`Redo failed: ${message}`, 3000); + }); + } +} + +registerRPC(VOX_RELOAD_CHUNKS_RPC_ID, function (x: any) { + const obj = this.get(x.rpcId) as VoxelEditController; + const keys: string[] = Array.isArray(x.voxChunkKeys) ? x.voxChunkKeys : []; + obj.callChunkReload(keys, x.isForPreviewChunks); +}); + +registerRPC(VOX_EDIT_FAILURE_RPC_ID, function (x: any) { + const obj = this.get(x.rpcId) as VoxelEditController; + const keys: string[] = Array.isArray(x.voxChunkKeys) ? x.voxChunkKeys : []; + const message: string = + typeof x.message === "string" ? x.message : "Voxel edit failed."; + obj.handleCommitFailure(keys, message); +}); + +registerRPC(VOX_EDIT_HISTORY_UPDATE_RPC_ID, function (x: any) { + const obj = this.get(x.rpcId) as VoxelEditController; + const undoCount = typeof x.undoCount === "number" ? x.undoCount : 0; + const redoCount = typeof x.redoCount === "number" ? x.redoCount : 0; + obj.undoCount.value = undoCount; + obj.redoCount.value = redoCount; +}); diff --git a/src/webgl/shader.ts b/src/webgl/shader.ts index ed2f4220db..3f7f4ac835 100644 --- a/src/webgl/shader.ts +++ b/src/webgl/shader.ts @@ -663,6 +663,34 @@ ${this.fragmentMain} } return shader; } + + print() { + const vertexSource = `#version 300 es +precision highp float; +precision highp int; +${this.uniformsCode} +${this.attributesCode} +${this.varyingsCodeVS} +float defaultMaxProjectionIntensity = 0.0; +${this.vertexCode} +void main() { +${this.vertexMain} +} +`; + const fragmentSource = `#version 300 es +${this.fragmentExtensions} +precision highp float; +precision highp int; +${this.uniformsCode} +${this.varyingsCodeFS} +${this.outputBufferCode} +float defaultMaxProjectionIntensity = 0.0; +${this.fragmentCode} +${this.fragmentMain} +`; + console.log("----- VERTEX SHADER -----\n" + vertexSource); + console.log("----- FRAGMENT SHADER -----\n" + fragmentSource); + } } export function shaderContainsIdentifiers( diff --git a/src/widget/layer_control_button.ts b/src/widget/layer_control_button.ts new file mode 100644 index 0000000000..273162bd62 --- /dev/null +++ b/src/widget/layer_control_button.ts @@ -0,0 +1,37 @@ +/** + * @license + * Copyright 2025 Google Inc. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import type { UserLayer } from "#src/layer/index.js"; +import type { LayerControlFactory } from "#src/widget/layer_control.js"; + +export function buttonLayerControl(options: { + text: string; + onClick: (layer: LayerType) => void; +}): LayerControlFactory { + return { + makeControl: (layer, context) => { + const control = document.createElement("button"); + control.textContent = options.text; + context.registerEventListener(control, "click", () => + options.onClick(layer), + ); + return { control, controlElement: control }; + }, + activateTool: (activation) => { + options.onClick(activation.tool.layer as LayerType); + }, + }; +}