|
1 | | -import { ColumnData, parquetQuery } from 'hyparquet' |
| 1 | +import { ColumnData, parquetRead, parquetReadObjects } from 'hyparquet' |
2 | 2 | import { compressors } from 'hyparquet-compressors' |
3 | 3 | import { getParquetColumn } from '../getParquetColumn.js' |
4 | 4 | import { asyncBufferFrom } from '../utils.js' |
@@ -51,11 +51,18 @@ self.onmessage = async ({ data }: { data: ClientMessage }) => { |
51 | 51 | } catch (error) { |
52 | 52 | postErrorMessage({ error: error as Error, queryId }) |
53 | 53 | } |
| 54 | + } else if (data.chunks) { |
| 55 | + function onChunk(chunk: ColumnData) { postChunkMessage({ chunk, queryId }) } |
| 56 | + const { rowStart, rowEnd } = data |
| 57 | + try { |
| 58 | + await parquetRead({ metadata, file, rowStart, rowEnd, compressors, onChunk }) |
| 59 | + } catch (error) { |
| 60 | + postErrorMessage({ error: error as Error, queryId }) |
| 61 | + } |
54 | 62 | } else { |
55 | | - const { rowStart, rowEnd, chunks } = data |
56 | | - const onChunk = chunks ? (chunk: ColumnData) => { postChunkMessage({ chunk, queryId }) } : undefined |
| 63 | + const { rowStart, rowEnd } = data |
57 | 64 | try { |
58 | | - const result = await parquetQuery({ metadata, file, rowStart, rowEnd, compressors, onChunk }) |
| 65 | + const result = await parquetReadObjects({ metadata, file, rowStart, rowEnd, compressors }) |
59 | 66 | postResultMessage({ result, queryId }) |
60 | 67 | } catch (error) { |
61 | 68 | postErrorMessage({ error: error as Error, queryId }) |
|
0 commit comments