diff --git a/client/dive-common/constants.ts b/client/dive-common/constants.ts index 288b59895..8cc7401b7 100644 --- a/client/dive-common/constants.ts +++ b/client/dive-common/constants.ts @@ -124,6 +124,7 @@ const zipFileTypes = [ const stereoPipelineMarker = 'measurement'; const multiCamPipelineMarkers = ['2-cam', '3-cam']; +const pipelineCreatesDatasetMarkers = ['transcode', 'filter']; const JsonMetaRegEx = /^.*\.?(meta|config)\.json$/; @@ -153,6 +154,7 @@ export { zipFileTypes, stereoPipelineMarker, multiCamPipelineMarkers, + pipelineCreatesDatasetMarkers, JsonMetaRegEx, simplifyTrainingName, }; diff --git a/client/platform/desktop/backend/native/viame.ts b/client/platform/desktop/backend/native/viame.ts index 51e5c795b..728320357 100644 --- a/client/platform/desktop/backend/native/viame.ts +++ b/client/platform/desktop/backend/native/viame.ts @@ -10,6 +10,7 @@ import { import { cleanString } from 'platform/desktop/sharedUtils'; import { serialize } from 'platform/desktop/backend/serializers/viame'; import { observeChild } from 'platform/desktop/backend/native/processManager'; +import sendToRenderer from 'platform/desktop/background'; import { MultiType, stereoPipelineMarker, multiCamPipelineMarkers } from 'dive-common/constants'; import * as common from './common'; @@ -129,12 +130,27 @@ async function runPipeline( } } + let outputDir: string | undefined; + const DIVE_OUTPUT_PROJECT_DIR = 'DIVE_Jobs_Output'; if (runPipelineArgs.pipeline.type === 'filter') { - command.push(`-s kwa_writer:output_directory="${npath.join(jobWorkDir, 'output')}"`); - command.push(`-s image_writer:file_name_prefix="${jobWorkDir}/"`); + if (runPipelineArgs.outputDatasetName) { + outputDir = runPipelineArgs.outputDatasetName; + } else { + const timestamp = (new Date()).toISOString().replace(/[:.]/g, '-'); + outputDir = `${runPipelineArgs.pipeline.name}_${runPipelineArgs.datasetId}_${timestamp}`; + } + outputDir = `${npath.join(settings.dataPath, DIVE_OUTPUT_PROJECT_DIR, outputDir)}`; + if (outputDir !== jobWorkDir) { + await fs.mkdir(outputDir, { recursive: true }); + } + command.push(`-s kwa_writer:output_directory="${outputDir}/"`); + command.push(`-s image_writer:file_name_prefix="${outputDir}/"`); } + + let transcodedFilename: string; if (runPipelineArgs.pipeline.type === 'transcode') { - command.push(`-s video_writer:video_filename="${npath.join(jobWorkDir, `${datasetId}.mp4`)}"`); + transcodedFilename = npath.join(jobWorkDir, `${datasetId}.mp4`); + command.push(`-s video_writer:video_filename="${transcodedFilename}"`); } if (requiresInput && !stereoOrMultiCam) { @@ -211,6 +227,31 @@ async function runPipeline( }); }); + if (['filter', 'transcode'].includes(runPipelineArgs.pipeline.type)) { + job.on('exit', async (code) => { + if (runPipelineArgs.pipeline.type === 'transcode') { + // TODO: work must be done to find the video file + return; + } + if (code === 0) { + // Ingest the output into a new dataset + updater({ + ...jobBase, + body: ['Creating dataset from output...'], + exitCode: code, + endTime: new Date(), + }); + // transcodedFilename will only be assigned for transcode pipelines + const importSource = transcodedFilename || outputDir; + if (importSource) { + const importPayload = await common.beginMediaImport(importSource); + const conversionJobArgs = await common.finalizeMediaImport(settings, importPayload); + sendToRenderer('filter-complete', conversionJobArgs.meta); + } + } + }); + } + return jobBase; } diff --git a/client/platform/desktop/background.ts b/client/platform/desktop/background.ts index 56cb8934e..9df8305c2 100644 --- a/client/platform/desktop/background.ts +++ b/client/platform/desktop/background.ts @@ -133,3 +133,9 @@ if (process.platform === 'win32') { cleanup(); }); } + +export default function sendToRenderer(channel: string, payload?: unknown) { + if (win) { + win.webContents.send(channel, payload); + } +} diff --git a/client/platform/desktop/constants.ts b/client/platform/desktop/constants.ts index 4f4aab78a..f2109b1fe 100644 --- a/client/platform/desktop/constants.ts +++ b/client/platform/desktop/constants.ts @@ -170,6 +170,7 @@ export interface RunPipeline extends JobArgs { type: JobType.RunPipeline; datasetId: string; pipeline: Pipe; + outputDatasetName?: string; } export interface ExportTrainedPipeline extends JobArgs { diff --git a/client/platform/desktop/frontend/api.ts b/client/platform/desktop/frontend/api.ts index fb71cdd3b..c8b22eb35 100644 --- a/client/platform/desktop/frontend/api.ts +++ b/client/platform/desktop/frontend/api.ts @@ -100,6 +100,16 @@ async function runPipeline(itemId: string, pipeline: Pipe): Promise { gpuJobQueue.enqueue(args); } +async function runPipelineWithOutput(itemId: string, pipeline: Pipe, outputDatasetName: string): Promise { + const args: RunPipeline = { + type: JobType.RunPipeline, + pipeline, + datasetId: itemId, + outputDatasetName, + }; + gpuJobQueue.enqueue(args); +} + async function exportTrainedPipeline(path: string, pipeline: Pipe): Promise { const args: ExportTrainedPipeline = { type: JobType.ExportTrainedPipeline, @@ -277,4 +287,5 @@ export { openLink, nvidiaSmi, cancelJob, + runPipelineWithOutput, }; diff --git a/client/platform/desktop/frontend/components/MultiPipeline.vue b/client/platform/desktop/frontend/components/MultiPipeline.vue index 8c6336fd9..13bf694b5 100644 --- a/client/platform/desktop/frontend/components/MultiPipeline.vue +++ b/client/platform/desktop/frontend/components/MultiPipeline.vue @@ -9,10 +9,12 @@ import { import { DataTableHeader } from 'vuetify'; import { useRouter } from 'vue-router/composables'; import { Pipe, Pipelines, useApi } from 'dive-common/apispec'; +import { runPipelineWithOutput } from 'platform/desktop/frontend/api'; import { itemsPerPageOptions, stereoPipelineMarker, multiCamPipelineMarkers, + pipelineCreatesDatasetMarkers, MultiType, } from 'dive-common/constants'; import { usePrompt } from 'dive-common/vue-utilities/prompt-service'; @@ -77,6 +79,23 @@ const stagedDatasetHeaders: DataTableHeader[] = headersTmpl.concat([ width: 80, }, ]); +const createNewDatasetHeaders: DataTableHeader[] = headersTmpl.concat([ + { + text: 'Output Dataset Name', + value: 'output', + sortable: false, + }, + { + text: 'Remove', + value: 'remove', + sortable: false, + width: 80, + }, +]); +function computeOutputDatasetName(item: JsonMetaCache) { + const timeStamp = (new Date()).toISOString().replace(/[:.]/g, '-'); + return `${selectedPipeline.value?.name}_${item.name}_${timeStamp}`; +} function getAvailableItems(): JsonMetaCache[] { if (!selectedPipelineType.value || !selectedPipeline.value) { return []; @@ -108,7 +127,16 @@ function toggleStaged(item: JsonMetaCache) { async function runPipelineForDatasets() { if (selectedPipeline.value !== null) { const results = await Promise.allSettled( - stagedDatasetIds.value.map((datasetId: string) => runPipeline(datasetId, selectedPipeline.value!)), + stagedDatasetIds.value.map((datasetId: string) => { + if (['transcode', 'filter'].includes(selectedPipeline.value?.type || '')) { + const datasetMeta = availableItems.value.find((item: JsonMetaCache) => item.id === datasetId); + if (!datasetMeta) { + throw new Error(`Attempted to run pipeline on nonexistant dataset ${datasetId}`); + } + return runPipelineWithOutput(datasetId, selectedPipeline.value!, computeOutputDatasetName(datasetMeta)); + } + return runPipeline(datasetId, selectedPipeline.value!); + }), ); const failed = results .map((result, i) => ({ result, datasetId: stagedDatasetIds.value[i] })) @@ -178,7 +206,10 @@ onBeforeMount(async () => { Datasets staged for selected pipeline { mdi-minus + diff --git a/client/platform/desktop/frontend/store/jobs.ts b/client/platform/desktop/frontend/store/jobs.ts index fd77916d5..dcacb06d7 100644 --- a/client/platform/desktop/frontend/store/jobs.ts +++ b/client/platform/desktop/frontend/store/jobs.ts @@ -15,9 +15,11 @@ import { JobType, RunPipeline, RunTraining, + JsonMeta, } from 'platform/desktop/constants'; import AsyncGpuJobQueue from './queues/asyncGpuJobQueue'; import AsyncCpuJobQueue from './queues/asyncCpuJobQueue'; +import { setRecents } from './dataset'; interface DesktopJobHistory { job: DesktopJob; @@ -94,6 +96,9 @@ function init() { ...args, body: ['Job cancelled by user'], exitCode: cancelledJobExitCode, endTime: new Date(), cancelledJob: true, }); }); + ipcRenderer.on('filter-complete', (event, args: JsonMeta) => { + setRecents(args); + }); } init();