11import R from 'ramda' ;
2- import { getEnv } from '@cubejs-backend/shared' ;
2+ import { getEnv , getProcessUid } from '@cubejs-backend/shared' ;
33import { QueueDriverInterface } from '@cubejs-backend/base-driver' ;
4- import { CubeStoreDriver , CubeStoreQueueDriver } from '@cubejs-backend/cubestore-driver' ;
4+ import { CubeStoreQueueDriver } from '@cubejs-backend/cubestore-driver' ;
55
66import { TimeoutError } from './TimeoutError' ;
77import { ContinueWaitError } from './ContinueWaitError' ;
88import { RedisQueueDriver } from './RedisQueueDriver' ;
99import { LocalQueueDriver } from './LocalQueueDriver' ;
10- import { getProcessUid } from './utils' ;
1110import { QueryStream } from './QueryStream' ;
1211
1312/**
@@ -140,20 +139,30 @@ export class QueryQueue {
140139 /**
141140 * Returns stream object which will be used to pipe data from data source.
142141 *
142+ * @param {* } queryKeyHash
143+ */
144+ getQueryStream ( queryKeyHash ) {
145+ if ( ! this . streams . queued . has ( queryKeyHash ) ) {
146+ throw new Error ( `Unable to find stream for persisted query with id: ${ queryKeyHash } ` ) ;
147+ }
148+
149+ return this . streams . queued . get ( queryKeyHash ) ;
150+ }
151+
152+ /**
143153 * @param {* } queryKey
144154 * @param {{ [alias: string]: string } } aliasNameToMember
145155 */
146- getQueryStream ( queryKey , aliasNameToMember ) {
156+ setQueryStream ( queryKey , aliasNameToMember ) {
147157 const key = this . redisHash ( queryKey ) ;
148- if ( ! this . streams . queued . has ( key ) ) {
149- const _stream = new QueryStream ( {
150- key,
151- maps : this . streams ,
152- aliasNameToMember,
153- } ) ;
154- this . streams . queued . set ( key , _stream ) ;
155- }
156- return this . streams . queued . get ( key ) ;
158+ const stream = new QueryStream ( {
159+ key,
160+ maps : this . streams ,
161+ aliasNameToMember,
162+ } ) ;
163+ this . streams . queued . set ( key , stream ) ;
164+
165+ return stream ;
157166 }
158167
159168 /**
@@ -559,7 +568,7 @@ export class QueryQueue {
559568 R . pipe (
560569 R . filter ( p => {
561570 if ( active . indexOf ( p ) === - 1 ) {
562- const subKeys = p . split ( ':: ' ) ;
571+ const subKeys = p . split ( '@ ' ) ;
563572 if ( subKeys . length === 1 ) {
564573 // common queries
565574 return true ;
@@ -729,10 +738,10 @@ export class QueryQueue {
729738 * Processing query specified by the `queryKey`. This method incapsulate most
730739 * of the logic related with the queues updates, heartbeating, etc.
731740 *
732- * @param {string } queryKey
741+ * @param {string } queryKeyHashed
733742 * @return {Promise<{ result: undefined | Object, error: string | undefined }> }
734743 */
735- async processQuery ( queryKey ) {
744+ async processQuery ( queryKeyHashed ) {
736745 const queueConnection = await this . queueDriver . createConnection ( ) ;
737746
738747 let insertedCount ;
@@ -743,15 +752,15 @@ export class QueryQueue {
743752 let processingLockAcquired ;
744753 try {
745754 const processingId = await queueConnection . getNextProcessingId ( ) ;
746- const retrieveResult = await queueConnection . retrieveForProcessing ( queryKey , processingId ) ;
755+ const retrieveResult = await queueConnection . retrieveForProcessing ( queryKeyHashed , processingId ) ;
747756
748757 if ( retrieveResult ) {
749758 [ insertedCount , _removedCount , activeKeys , queueSize , query , processingLockAcquired ] = retrieveResult ;
750759 }
751760
752- const activated = activeKeys && activeKeys . indexOf ( this . redisHash ( queryKey ) ) !== - 1 ;
761+ const activated = activeKeys && activeKeys . indexOf ( queryKeyHashed ) !== - 1 ;
753762 if ( ! query ) {
754- query = await queueConnection . getQueryDef ( this . redisHash ( queryKey ) ) ;
763+ query = await queueConnection . getQueryDef ( queryKeyHashed ) ;
755764 }
756765
757766 if ( query && insertedCount && activated && processingLockAcquired ) {
@@ -771,19 +780,22 @@ export class QueryQueue {
771780 preAggregation : query . query ?. preAggregation ,
772781 addedToQueueTime : query . addedToQueueTime ,
773782 } ) ;
774- await queueConnection . optimisticQueryUpdate ( queryKey , { startQueryTime } , processingId ) ;
783+ await queueConnection . optimisticQueryUpdate ( queryKeyHashed , { startQueryTime } , processingId ) ;
775784
776785 const heartBeatTimer = setInterval (
777- ( ) => queueConnection . updateHeartBeat ( queryKey ) ,
786+ ( ) => queueConnection . updateHeartBeat ( queryKeyHashed ) ,
778787 this . heartBeatInterval * 1000
779788 ) ;
780789 try {
781790 const handler = query ?. queryHandler ;
782- let target ;
783791 switch ( handler ) {
784792 case 'stream' :
785- target = this . getQueryStream ( this . redisHash ( queryKey ) ) ;
786- await this . queryTimeout ( this . queryHandlers . stream ( query . query , target ) ) ;
793+ await this . queryTimeout (
794+ this . queryHandlers . stream ( query . query , this . getQueryStream ( queryKeyHashed ) )
795+ ) ;
796+
797+ // CubeStore has special handling for null
798+ executionResult = null ;
787799 break ;
788800 default :
789801 executionResult = {
@@ -792,7 +804,7 @@ export class QueryQueue {
792804 query . query ,
793805 async ( cancelHandler ) => {
794806 try {
795- return queueConnection . optimisticQueryUpdate ( queryKey , { cancelHandler } , processingId ) ;
807+ return queueConnection . optimisticQueryUpdate ( queryKeyHashed , { cancelHandler } , processingId ) ;
796808 } catch ( e ) {
797809 this . logger ( 'Error while query update' , {
798810 queryKey : query . queryKey ,
@@ -848,7 +860,7 @@ export class QueryQueue {
848860 error : ( e . stack || e ) . toString ( )
849861 } ) ;
850862 if ( e instanceof TimeoutError ) {
851- const queryWithCancelHandle = await queueConnection . getQueryDef ( queryKey ) ;
863+ const queryWithCancelHandle = await queueConnection . getQueryDef ( queryKeyHashed ) ;
852864 if ( queryWithCancelHandle ) {
853865 this . logger ( 'Cancelling query due to timeout' , {
854866 processingId,
@@ -868,7 +880,7 @@ export class QueryQueue {
868880
869881 clearInterval ( heartBeatTimer ) ;
870882
871- if ( ! ( await queueConnection . setResultAndRemoveQuery ( queryKey , executionResult , processingId ) ) ) {
883+ if ( ! ( await queueConnection . setResultAndRemoveQuery ( queryKeyHashed , executionResult , processingId ) ) ) {
872884 this . logger ( 'Orphaned execution result' , {
873885 processingId,
874886 warn : 'Result for query was not set due to processing lock wasn\'t acquired' ,
@@ -887,7 +899,7 @@ export class QueryQueue {
887899 } else {
888900 this . logger ( 'Skip processing' , {
889901 processingId,
890- queryKey : query && query . queryKey || queryKey ,
902+ queryKey : query && query . queryKey || queryKeyHashed ,
891903 requestId : query && query . requestId ,
892904 queuePrefix : this . redisQueuePrefix ,
893905 processingLockAcquired,
@@ -899,15 +911,15 @@ export class QueryQueue {
899911 } ) ;
900912 // closing stream
901913 if ( query ?. queryHandler === 'stream' ) {
902- const stream = this . getQueryStream ( this . redisHash ( queryKey ) ) ;
914+ const stream = this . getQueryStream ( queryKeyHashed ) ;
903915 stream . destroy ( ) ;
904916 }
905- const currentProcessingId = await queueConnection . freeProcessingLock ( queryKey , processingId , activated ) ;
917+ const currentProcessingId = await queueConnection . freeProcessingLock ( queryKeyHashed , processingId , activated ) ;
906918 if ( currentProcessingId ) {
907919 this . logger ( 'Skipping free processing lock' , {
908920 processingId,
909921 currentProcessingId,
910- queryKey : query && query . queryKey || queryKey ,
922+ queryKey : query && query . queryKey || queryKeyHashed ,
911923 requestId : query && query . requestId ,
912924 queuePrefix : this . redisQueuePrefix ,
913925 processingLockAcquired,
@@ -921,7 +933,7 @@ export class QueryQueue {
921933 }
922934 } catch ( e ) {
923935 this . logger ( 'Queue storage error' , {
924- queryKey : query && query . queryKey || queryKey ,
936+ queryKey : query && query . queryKey || queryKeyHashed ,
925937 requestId : query && query . requestId ,
926938 error : ( e . stack || e ) . toString ( ) ,
927939 queuePrefix : this . redisQueuePrefix
0 commit comments