@@ -2,7 +2,6 @@ import fs from 'fs';
2
2
import { EJSON } from 'bson' ;
3
3
import type { Document } from 'bson' ;
4
4
import { pipeline } from 'stream/promises' ;
5
- import temp from 'temp' ;
6
5
import { Transform } from 'stream' ;
7
6
import type { Readable , Writable } from 'stream' ;
8
7
import toNS from 'mongodb-ns' ;
@@ -11,6 +10,8 @@ import type { PreferencesAccess } from 'compass-preferences-model/provider';
11
10
import { capMaxTimeMSAtPreferenceLimit } from 'compass-preferences-model/provider' ;
12
11
import Parser from 'stream-json/Parser' ;
13
12
import StreamValues from 'stream-json/streamers/StreamValues' ;
13
+ import path from 'path' ;
14
+ import os from 'os' ;
14
15
15
16
import { lookupValueForPath , ColumnRecorder } from './export-utils' ;
16
17
import {
@@ -31,6 +32,12 @@ import type { AggregationCursor, FindCursor } from 'mongodb';
31
32
32
33
const debug = createDebug ( 'export-csv' ) ;
33
34
35
+ const generateTempFilename = ( suffix : string ) => {
36
+ const randomString = Math . random ( ) . toString ( 36 ) . substring ( 2 , 15 ) ;
37
+ const filename = `temp-${ randomString } ${ suffix } ` ;
38
+ return path . join ( os . tmpdir ( ) , filename ) ;
39
+ } ;
40
+
34
41
// First we download all the docs for the query/aggregation to a temporary file
35
42
// while determining the unique set of columns we'll need and their order
36
43
// (DOWNLOAD), then we write the header row, then process that temp file in
@@ -223,7 +230,7 @@ async function loadEJSONFileAndColumns({
223
230
// while simultaneously determining the unique set of columns in the order
224
231
// we'll have to write to the file.
225
232
const inputStream = cursor . stream ( ) ;
226
- const filename = temp . path ( { suffix : '.jsonl' } ) ;
233
+ const filename = generateTempFilename ( '.jsonl' ) ;
227
234
const output = fs . createWriteStream ( filename ) ;
228
235
229
236
const columnStream = new ColumnStream ( progressCallback ) ;
0 commit comments