Skip to content

Commit 9978132

Browse files
authored
chore: Clippy fixes (#182)
Signed-off-by: Anush008 <[email protected]>
1 parent 6e8244d commit 9978132

28 files changed

+185
-267
lines changed

src/base/json_schema.rs

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ impl ToJsonSchema for schema::BasicValueType {
4747
}));
4848
schema
4949
.metadata
50-
.get_or_insert_with(|| Default::default())
50+
.get_or_insert_with(Default::default)
5151
.description =
5252
Some("A range, start pos (inclusive), end pos (exclusive).".to_string());
5353
}
@@ -80,7 +80,8 @@ impl ToJsonSchema for schema::StructSchema {
8080
required: self
8181
.fields
8282
.iter()
83-
.filter_map(|f| (!f.value_type.nullable).then(|| f.name.to_string()))
83+
.filter(|&f| (!f.value_type.nullable))
84+
.map(|f| f.name.to_string())
8485
.collect(),
8586
additional_properties: Some(Schema::Bool(false).into()),
8687
..Default::default()

src/base/schema.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -178,7 +178,7 @@ impl CollectionSchema {
178178
}
179179
}
180180

181-
pub fn key_field<'a>(&'a self) -> Option<&'a FieldSchema> {
181+
pub fn key_field(&self) -> Option<&FieldSchema> {
182182
match self.kind {
183183
CollectionKind::Table => Some(self.row.fields.first().unwrap()),
184184
CollectionKind::Collection | CollectionKind::List => None,

src/base/spec.rs

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -132,12 +132,7 @@ impl std::fmt::Display for ValueMapping {
132132
.unwrap_or_else(|_| "#(invalid json value)".to_string())
133133
),
134134
ValueMapping::Field(v) => {
135-
write!(
136-
f,
137-
"{}.{}",
138-
v.scope.as_ref().map(|s| s.as_str()).unwrap_or(""),
139-
v.field_path
140-
)
135+
write!(f, "{}.{}", v.scope.as_deref().unwrap_or(""), v.field_path)
141136
}
142137
ValueMapping::Struct(v) => write!(
143138
f,

src/base/value.rs

Lines changed: 8 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -405,7 +405,7 @@ impl From<KeyValue> for Value {
405405
KeyValue::Int64(v) => Value::Basic(BasicValue::Int64(v)),
406406
KeyValue::Range(v) => Value::Basic(BasicValue::Range(v)),
407407
KeyValue::Struct(v) => Value::Struct(FieldValues {
408-
fields: v.into_iter().map(|k| Value::from(k)).collect(),
408+
fields: v.into_iter().map(Value::from).collect(),
409409
}),
410410
}
411411
}
@@ -463,11 +463,9 @@ impl<VS> Value<VS> {
463463
.map(|v| Value::<VS>::from_alternative_ref(v))
464464
.collect(),
465465
}),
466-
Value::Collection(v) => Value::Collection(v.into_iter().map(|v| v.into()).collect()),
467-
Value::Table(v) => {
468-
Value::Table(v.into_iter().map(|(k, v)| (k.clone(), v.into())).collect())
469-
}
470-
Value::List(v) => Value::List(v.into_iter().map(|v| v.into()).collect()),
466+
Value::Collection(v) => Value::Collection(v.iter().map(|v| v.into()).collect()),
467+
Value::Table(v) => Value::Table(v.iter().map(|(k, v)| (k.clone(), v.into())).collect()),
468+
Value::List(v) => Value::List(v.iter().map(|v| v.into()).collect()),
471469
}
472470
}
473471

@@ -669,7 +667,7 @@ where
669667
if v.len() != fields_schema.len() {
670668
api_bail!("unmatched value length");
671669
}
672-
Self::from_json_values(fields_schema.iter().zip(v.into_iter()))
670+
Self::from_json_values(fields_schema.iter().zip(v))
673671
}
674672
serde_json::Value::Object(v) => Self::from_json_object(v, fields_schema.iter()),
675673
_ => api_bail!("invalid value type"),
@@ -741,7 +739,7 @@ impl BasicValue {
741739
) => {
742740
let vec = v
743741
.into_iter()
744-
.map(|v| BasicValue::from_json(v, &element_type))
742+
.map(|v| BasicValue::from_json(v, element_type))
745743
.collect::<Result<Vec<_>>>()?;
746744
BasicValue::Vector(Arc::from(vec))
747745
}
@@ -774,7 +772,7 @@ impl serde::Serialize for Value<ScopeValue> {
774772
}
775773
}
776774

777-
impl<'a> serde::Serialize for TableEntry<'a> {
775+
impl serde::Serialize for TableEntry<'_> {
778776
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
779777
let &TableEntry(key, value) = self;
780778
let mut seq = serializer.serialize_seq(Some(value.0.fields.len() + 1))?;
@@ -873,7 +871,7 @@ pub struct TypedValue<'a> {
873871
pub v: &'a Value,
874872
}
875873

876-
impl<'a> Serialize for TypedValue<'a> {
874+
impl Serialize for TypedValue<'_> {
877875
fn serialize<S: serde::Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
878876
match (self.t, self.v) {
879877
(ValueType::Basic(_), v) => v.serialize(serializer),

src/builder/analyzed_flow.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,9 +83,9 @@ impl AnalyzedTransientFlow {
8383
analyzer::analyze_transient_flow(&transient_flow, &ctx, registry)?;
8484
Ok(Self {
8585
transient_flow_instance: transient_flow,
86-
data_schema: data_schema,
86+
data_schema,
8787
execution_plan: execution_plan_fut.await?,
88-
output_type: output_type,
88+
output_type,
8989
})
9090
}
9191
}

src/builder/analyzer.rs

Lines changed: 15 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -107,7 +107,7 @@ impl TryInto<StructSchema> for &StructSchemaBuilder {
107107
fields: Arc::new(
108108
self.fields
109109
.iter()
110-
.map(|f| FieldSchema::<ValueType>::from_alternative(f))
110+
.map(FieldSchema::<ValueType>::from_alternative)
111111
.collect::<Result<Vec<_>>>()?,
112112
),
113113
})
@@ -306,7 +306,7 @@ impl CollectorBuilder {
306306
api_bail!("Collector is already used");
307307
}
308308
let common_schema =
309-
try_make_common_struct_schemas(&self.schema, &schema).with_context(|| {
309+
try_make_common_struct_schemas(&self.schema, schema).with_context(|| {
310310
format!(
311311
"Collectors are sent with entries in incompatible schemas:\n {}\n {}\n",
312312
self.schema, schema
@@ -377,7 +377,7 @@ impl DataScopeBuilder {
377377
i += 1;
378378

379379
struct_schema = match &field.value_type.typ {
380-
ValueTypeBuilder::Struct(struct_type) => &struct_type,
380+
ValueTypeBuilder::Struct(struct_type) => struct_type,
381381
_ => {
382382
api_bail!("Field {} is not a struct", field_path[0..(i + 1)].join("."));
383383
}
@@ -508,7 +508,7 @@ fn analyze_value_mapping(
508508
(
509509
AnalyzedValueMapping::Field(AnalyzedFieldReference {
510510
local: local_field_ref,
511-
scope_up_level: scope_up_level as u32,
511+
scope_up_level,
512512
}),
513513
EnrichedValueType::from_alternative(value_type)?,
514514
)
@@ -559,7 +559,7 @@ fn add_collector(
559559
})
560560
}
561561

562-
impl<'a> AnalyzerContext<'a> {
562+
impl AnalyzerContext<'_> {
563563
pub(super) fn analyze_source_op(
564564
&self,
565565
scope: &mut DataScopeBuilder,
@@ -592,8 +592,7 @@ impl<'a> AnalyzerContext<'a> {
592592
let source_id = metadata.map(|metadata| {
593593
let existing_source_ids = existing_source_states
594594
.iter()
595-
.map(|v| v.iter())
596-
.flatten()
595+
.flat_map(|v| v.iter())
597596
.filter_map(|state| {
598597
if state.key_schema == key_schema_no_attrs {
599598
Some(state.source_id)
@@ -741,7 +740,7 @@ impl<'a> AnalyzerContext<'a> {
741740
self.analyze_op_scope(
742741
&mut exec_scope,
743742
&op.op_scope.ops,
744-
parent_scopes.prepend(&scope),
743+
parent_scopes.prepend(scope),
745744
)?
746745
};
747746
let op_name = reactive_op.name.clone();
@@ -873,8 +872,7 @@ impl<'a> AnalyzerContext<'a> {
873872
.map(|setup_state| -> Result<i32> {
874873
let existing_target_ids = existing_target_states
875874
.iter()
876-
.map(|v| v.iter())
877-
.flatten()
875+
.flat_map(|v| v.iter())
878876
.map(|state| state.common.target_id)
879877
.collect::<HashSet<_>>();
880878
let target_id = if existing_target_ids.len() == 1 {
@@ -888,15 +886,13 @@ impl<'a> AnalyzerContext<'a> {
888886
};
889887
let max_schema_version_id = existing_target_states
890888
.iter()
891-
.map(|v| v.iter())
892-
.flatten()
889+
.flat_map(|v| v.iter())
893890
.map(|s| s.common.max_schema_version_id)
894891
.max()
895892
.unwrap_or(0);
896893
let reusable_schema_version_ids = existing_target_states
897894
.iter()
898-
.map(|v| v.iter())
899-
.flatten()
895+
.flat_map(|v| v.iter())
900896
.map(|s| {
901897
Ok({
902898
if export_factory.will_keep_all_existing_data(
@@ -1010,8 +1006,7 @@ pub fn analyze_flow(
10101006
let existing_metadata_versions = || {
10111007
existing_flow_ss
10121008
.iter()
1013-
.map(|flow_ss| flow_ss.metadata.possible_versions())
1014-
.flatten()
1009+
.flat_map(|flow_ss| flow_ss.metadata.possible_versions())
10151010
};
10161011

10171012
let mut source_states_by_name = HashMap::<&str, Vec<&SourceSetupState>>::new();
@@ -1037,8 +1032,7 @@ pub fn analyze_flow(
10371032

10381033
let mut setup_state = setup::FlowSetupState::<setup::DesiredMode> {
10391034
seen_flow_metadata_version: existing_flow_ss
1040-
.map(|flow_ss| flow_ss.seen_flow_metadata_version)
1041-
.flatten(),
1035+
.and_then(|flow_ss| flow_ss.seen_flow_metadata_version),
10421036
metadata: FlowSetupMetadata {
10431037
last_source_id: existing_metadata_versions()
10441038
.map(|metadata| metadata.last_source_id)
@@ -1052,14 +1046,13 @@ pub fn analyze_flow(
10521046
},
10531047
tracking_table: db_tracking_setup::TrackingTableSetupState {
10541048
table_name: existing_flow_ss
1055-
.map(|flow_ss| {
1049+
.and_then(|flow_ss| {
10561050
flow_ss
10571051
.tracking_table
10581052
.current
10591053
.as_ref()
10601054
.map(|v| v.table_name.clone())
10611055
})
1062-
.flatten()
10631056
.unwrap_or_else(|| db_tracking_setup::default_tracking_table_name(&flow_inst.name)),
10641057
version_id: db_tracking_setup::CURRENT_TRACKING_TABLE_VERSION,
10651058
},
@@ -1078,7 +1071,7 @@ pub fn analyze_flow(
10781071
.map(|source_op| {
10791072
let existing_source_states = source_states_by_name.get(source_op.name.as_str());
10801073
analyzer_ctx.analyze_source_op(
1081-
&mut root_exec_scope.data,
1074+
root_exec_scope.data,
10821075
source_op.clone(),
10831076
Some(&mut setup_state.metadata),
10841077
existing_source_states,
@@ -1095,7 +1088,7 @@ pub fn analyze_flow(
10951088
.iter()
10961089
.map(|export_op| {
10971090
analyzer_ctx.analyze_export_op(
1098-
&mut root_exec_scope.data,
1091+
root_exec_scope.data,
10991092
export_op.clone(),
11001093
Some(&mut setup_state),
11011094
&target_states_by_name_type,

src/builder/flow_builder.rs

Lines changed: 10 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -85,13 +85,13 @@ impl DataScopeRef {
8585
if let Some(child) = child {
8686
DataScopeRef(child)
8787
} else {
88-
let new_scope = self.make_child_scope(&entry.key())?;
88+
let new_scope = self.make_child_scope(entry.key())?;
8989
entry.insert(Arc::downgrade(&new_scope.0));
9090
new_scope
9191
}
9292
}
9393
Entry::Vacant(entry) => {
94-
let new_scope = self.make_child_scope(&entry.key())?;
94+
let new_scope = self.make_child_scope(entry.key())?;
9595
entry.insert(Arc::downgrade(&new_scope.0));
9696
new_scope
9797
}
@@ -118,7 +118,7 @@ impl DataScopeRef {
118118
.1
119119
.value_type
120120
.typ;
121-
for field in (&field_path[1..]).iter() {
121+
for field in field_path[1..].iter() {
122122
let struct_builder = match field_typ {
123123
ValueTypeBuilder::Struct(struct_type) => struct_type,
124124
_ => bail!("expect struct type"),
@@ -250,10 +250,9 @@ impl DataSlice {
250250
spec::ValueMapping::Field(v) => &v.field_path,
251251
_ => return Err(PyException::new_err("expect field path")),
252252
};
253-
Ok(self
254-
.scope
253+
self.scope
255254
.get_child_scope(field_path.clone())
256-
.into_py_result()?)
255+
.into_py_result()
257256
}
258257
}
259258

@@ -420,10 +419,10 @@ impl FlowBuilder {
420419
Ok(result)
421420
}
422421

423-
pub fn constant<'py>(
422+
pub fn constant(
424423
&self,
425424
value_type: py::Pythonized<schema::EnrichedValueType>,
426-
value: Bound<'py, PyAny>,
425+
value: Bound<'_, PyAny>,
427426
) -> PyResult<DataSlice> {
428427
let schema = value_type.into_inner();
429428
let value = py::value_from_py_object(&schema.typ, &value)?;
@@ -502,7 +501,7 @@ impl FlowBuilder {
502501
let _ = analyzer_ctx.analyze_reactive_op(scope, &reactive_op, parent_scopes)?;
503502

504503
reactive_ops.push(reactive_op);
505-
let result = Self::last_field_to_data_slice(&scope.data, common_scope.clone())
504+
let result = Self::last_field_to_data_slice(scope.data, common_scope.clone())
506505
.into_py_result()?;
507506
Ok(result)
508507
},
@@ -706,10 +705,10 @@ impl std::fmt::Display for FlowBuilder {
706705
)?;
707706
}
708707
for field in self.direct_input_fields.iter() {
709-
write!(f, "Direct input {}: {}\n", field.name, field.value_type)?;
708+
writeln!(f, "Direct input {}: {}", field.name, field.value_type)?;
710709
}
711710
if !self.direct_input_fields.is_empty() {
712-
write!(f, "\n")?;
711+
writeln!(f)?;
713712
}
714713
for op in self.reactive_ops.iter() {
715714
write!(

src/execution/db_tracking.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -91,7 +91,7 @@ pub async fn precommit_source_tracking_info(
9191
.bind(source_key_json) // $2
9292
.bind(max_process_ordinal) // $3
9393
.bind(sqlx::types::Json(staging_target_keys)) // $4
94-
.bind(memoization_info.map(|m| sqlx::types::Json(m))) // $5
94+
.bind(memoization_info.map(sqlx::types::Json)) // $5
9595
.execute(db_executor)
9696
.await?;
9797
Ok(())

src/execution/db_tracking_setup.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -101,15 +101,15 @@ impl ResourceSetupStatusCheck for TrackingTableSetupStatusCheck {
101101

102102
fn describe_changes(&self) -> Vec<String> {
103103
let mut changes: Vec<String> = vec![];
104-
if self.desired_state.is_some() && self.legacy_table_names.len() > 0 {
104+
if self.desired_state.is_some() && !self.legacy_table_names.is_empty() {
105105
changes.push(format!(
106106
"Rename legacy tracking tables: {}. ",
107107
self.legacy_table_names.join(", ")
108108
));
109109
}
110110
match (self.min_existing_version_ids, &self.desired_state) {
111111
(None, Some(state)) => {
112-
changes.push(format!("Create the tracking table: {}. ", state.table_name).into())
112+
changes.push(format!("Create the tracking table: {}. ", state.table_name))
113113
}
114114
(Some(min_version_id), Some(desired)) => {
115115
if min_version_id < desired.version_id {
@@ -139,7 +139,7 @@ impl ResourceSetupStatusCheck for TrackingTableSetupStatusCheck {
139139
match (self.min_existing_version_ids, &self.desired_state) {
140140
(None, Some(_)) => SetupChangeType::Create,
141141
(Some(min_version_id), Some(desired)) => {
142-
if min_version_id == desired.version_id && self.legacy_table_names.len() == 0 {
142+
if min_version_id == desired.version_id && self.legacy_table_names.is_empty() {
143143
SetupChangeType::NoChange
144144
} else if min_version_id < desired.version_id {
145145
SetupChangeType::Update

0 commit comments

Comments
 (0)