Skip to content

Commit b75dbb9

Browse files
committed
Table: Add Vec of primary key column specs
We want to move token calculation methods in ClusterState to accept SerializeRow. This in turn means we need to serialize those values, so we have to create RowSerializationContext from the Table struct. RowSerializationContext currently needs a slice of ColumnSpec. Table has no such slice. Instead it has a hashmap from column name to a ColumnSpec, and a Vec of primary key column names. We have three options: - Add a field with the required slice to Table struct. - Modify RowSerializationContext somehow so it can be created from the data that we already have in Table. I'm not sure how to do that, idea would be appreciated. - Hybrid: Modify both Table and RowSerializationContext to make them work together. This commit takes the first approach because it seemed to be the easiest one. Doing it a different way is of course open for discussion.
1 parent 77b2537 commit b75dbb9

File tree

1 file changed

+21
-2
lines changed

1 file changed

+21
-2
lines changed

scylla/src/cluster/metadata.rs

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ use futures::Stream;
3535
use itertools::Itertools;
3636
use rand::seq::{IndexedRandom, SliceRandom};
3737
use rand::{rng, Rng};
38+
use scylla_cql::frame::response::result::{ColumnSpec, TableSpec};
3839
use scylla_macros::DeserializeRow;
3940
use std::borrow::BorrowMut;
4041
use std::cell::Cell;
@@ -197,13 +198,14 @@ pub struct Keyspace {
197198
#[derive(Clone, Debug, PartialEq, Eq)]
198199
pub struct Table {
199200
pub columns: HashMap<String, Column>,
200-
/// Names of the column of partition key.
201+
/// Names of the column of partition key.
201202
/// All of the names are guaranteed to be present in `columns` field.
202203
pub partition_key: Vec<String>,
203-
/// Names of the column of clustering key.
204+
/// Names of the column of clustering key.
204205
/// All of the names are guaranteed to be present in `columns` field.
205206
pub clustering_key: Vec<String>,
206207
pub partitioner: Option<String>,
208+
pub(crate) pk_column_specs: Vec<ColumnSpec<'static>>,
207209
}
208210

209211
#[derive(Clone, Debug, PartialEq, Eq)]
@@ -1410,6 +1412,7 @@ async fn query_tables(
14101412
partition_key: vec![],
14111413
clustering_key: vec![],
14121414
partitioner: None,
1415+
pk_column_specs: vec![],
14131416
}));
14141417

14151418
let mut entry = result
@@ -1461,6 +1464,7 @@ async fn query_views(
14611464
partition_key: vec![],
14621465
clustering_key: vec![],
14631466
partitioner: None,
1467+
pk_column_specs: vec![],
14641468
}))
14651469
.map(|table| MaterializedView {
14661470
view_metadata: table,
@@ -1665,13 +1669,28 @@ async fn query_tables_schema(
16651669
.remove(&keyspace_and_table_name)
16661670
.unwrap_or_default();
16671671

1672+
// unwrap of get() result: all colum names in `partition_key` are at this
1673+
// point guaranteed to be present in `columns`. See the construction of `partition_key`
1674+
let pk_column_specs = partition_key
1675+
.iter()
1676+
.map(|column_name| (column_name, columns.get(column_name).unwrap().clone().typ))
1677+
.map(|(name, typ)| {
1678+
let table_spec = TableSpec::owned(
1679+
keyspace_and_table_name.0.clone(),
1680+
keyspace_and_table_name.1.clone(),
1681+
);
1682+
ColumnSpec::owned(name.to_owned(), typ, table_spec)
1683+
})
1684+
.collect();
1685+
16681686
result.insert(
16691687
keyspace_and_table_name,
16701688
Ok(Table {
16711689
columns,
16721690
partition_key,
16731691
clustering_key,
16741692
partitioner,
1693+
pk_column_specs,
16751694
}),
16761695
);
16771696
}

0 commit comments

Comments
 (0)