File tree Expand file tree Collapse file tree 4 files changed +40
-2
lines changed
packages/cubejs-ksql-driver/src
rust/cubestore/cubestore/src/streaming Expand file tree Collapse file tree 4 files changed +40
-2
lines changed Original file line number Diff line number Diff line change @@ -325,6 +325,28 @@ Refresh worker should be able to finish pre-aggregation refresh before
325325garbage collection starts. It means that all pre-aggregation partitions
326326should be built before any tables are removed.
327327
328+ # ### Supported file systems
329+
330+ The garbage collection mechanism relies on the ability of the underlying file
331+ system to report the creation time of a file.
332+
333+ If the file system does not support getting the creation time, you will see the
334+ following error message in Cube Store logs :
335+
336+ ` ` `
337+ ERROR [cubestore::remotefs::cleanup] <pid:1>
338+ error while getting created time for file "<name>.chunk.parquet":
339+ creation time is not available for the filesystem
340+ ` ` `
341+
342+ <ReferenceBox>
343+
344+ XFS is known to not support getting the creation time of a file.
345+ Please see [this issue](https://github.com/cube-js/cube/issues/7905#issuecomment-2504212623)
346+ for possible workarounds.
347+
348+ </ReferenceBox>
349+
328350# # Security
329351
330352# ## Authentication
Original file line number Diff line number Diff line change @@ -1635,6 +1635,13 @@ cubes:
16351635
16361636</CodeTabs>
16371637
1638+ <ReferenceBox>
1639+
1640+ In some cases, indexes would not work with ` original_sql` pre-aggregations.
1641+ Please [track this issue](https://github.com/cube-js/cube/issues/7420).
1642+
1643+ </ReferenceBox>
1644+
16381645# ### `type`
16391646
16401647This option is used to define [aggregating indexes][ref-aggregating-indexes]
Original file line number Diff line number Diff line change @@ -131,7 +131,9 @@ export class KsqlDriver extends BaseDriver implements DriverInterface {
131131 if ( this . config . kafkaHost ) {
132132 this . kafkaClient = new Kafka ( {
133133 clientId : 'Cube' ,
134- brokers : [ this . config . kafkaHost ] ,
134+ brokers : this . config . kafkaHost
135+ . split ( ',' )
136+ . map ( h => h . trim ( ) ) ,
135137 // authenticationTimeout: 10000,
136138 // reauthenticationThreshold: 10000,
137139 ssl : this . config . kafkaUseSsl ,
Original file line number Diff line number Diff line change @@ -306,6 +306,13 @@ impl StreamingSource for KafkaStreamingSource {
306306 let unique_key_columns = self . unique_key_columns . clone ( ) ;
307307 let seq_column_index_to_move = self . seq_column_index ;
308308 let traffic_sender = TrafficSender :: new ( self . trace_obj . clone ( ) ) ;
309+ let hosts = self
310+ . host
311+ . clone ( )
312+ . split ( "," )
313+ . filter ( |s| !s. is_empty ( ) )
314+ . map ( |s| s. trim ( ) . to_string ( ) )
315+ . collect ( ) ;
309316 let stream = self
310317 . kafka_client
311318 . create_message_stream (
@@ -321,7 +328,7 @@ impl StreamingSource for KafkaStreamingSource {
321328 } )
322329 . unwrap_or ( Offset :: End ) ,
323330 ) ,
324- vec ! [ self . host . clone ( ) ] ,
331+ hosts ,
325332 & self . user ,
326333 & self . password ,
327334 self . use_ssl ,
You can’t perform that action at this time.
0 commit comments