Skip to content

Commit 26f7a0b

Browse files
committed
style: clippy
1 parent 6aaab2e commit 26f7a0b

File tree

12 files changed

+22
-24
lines changed

12 files changed

+22
-24
lines changed

encoderfile/build.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
const BUILD_VARS: &[&'static str] = &[
1+
const BUILD_VARS: &[&str] = &[
22
"MODEL_WEIGHTS_PATH",
33
"TOKENIZER_PATH",
44
"MODEL_CONFIG_PATH",
@@ -18,7 +18,7 @@ fn main() -> Result<(), Box<dyn std::error::Error>> {
1818
for var in BUILD_VARS {
1919
let val = std::env::var(var).expect("Missing required environment variable: {var}");
2020

21-
println!("cargo:rustc-env={}={}", var, val);
21+
println!("cargo:rustc-env={var}={val}");
2222
}
2323

2424
Ok(())

encoderfile/src/assets.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -71,7 +71,7 @@ embed_in_section!(
7171
embed_in_section!(MODEL_TYPE_STR, "MODEL_TYPE", "model_type", Env);
7272
embed_in_section!(MODEL_ID, "MODEL_NAME", "model_id", Env);
7373

74-
pub const BANNER: &'static str = include_str!("../../assets/banner.txt");
74+
pub const BANNER: &str = include_str!("../../assets/banner.txt");
7575

7676
pub fn get_banner() -> String {
7777
let model_id_len = MODEL_ID.len();
@@ -82,8 +82,7 @@ pub fn get_banner() -> String {
8282
let spaces = " ".repeat(remaining_len);
8383

8484
format!(
85-
"{}\nModel ID: {}{}{}\n",
86-
BANNER, MODEL_ID, spaces, signature
85+
"{BANNER}\nModel ID: {MODEL_ID}{spaces}{signature}\n"
8786
)
8887
}
8988

encoderfile/src/common/embedding.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -33,7 +33,7 @@ impl From<EmbeddingResponse> for crate::generated::embedding::EmbeddingResponse
3333
Self {
3434
results: val.results.into_iter().map(|embs| embs.into()).collect(),
3535
model_id: val.model_id,
36-
metadata: val.metadata.unwrap_or(HashMap::new()),
36+
metadata: val.metadata.unwrap_or_default(),
3737
}
3838
}
3939
}

encoderfile/src/common/model_metadata.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,7 @@ impl From<GetModelMetadataResponse> for crate::generated::encoderfile::GetModelM
1313
Self {
1414
model_id: val.model_id,
1515
model_type: crate::generated::encoderfile::ModelType::from(val.model_type).into(),
16-
id2label: val.id2label.unwrap_or(HashMap::new()),
16+
id2label: val.id2label.unwrap_or_default(),
1717
}
1818
}
1919
}

encoderfile/src/common/sequence_classification.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ impl From<SequenceClassificationResponse>
3535
Self {
3636
results: val.results.into_iter().map(|i| i.into()).collect(),
3737
model_id: val.model_id,
38-
metadata: val.metadata.unwrap_or(HashMap::new()),
38+
metadata: val.metadata.unwrap_or_default(),
3939
}
4040
}
4141
}

encoderfile/src/common/token_classification.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@ impl From<TokenClassificationResponse>
3535
Self {
3636
results: val.results.into_iter().map(|i| i.into()).collect(),
3737
model_id: val.model_id,
38-
metadata: val.metadata.unwrap_or(HashMap::new()),
38+
metadata: val.metadata.unwrap_or_default(),
3939
}
4040
}
4141
}

encoderfile/src/config.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ pub fn get_model_type() -> ModelType {
3030
"embedding" => ModelType::Embedding,
3131
"sequence_classification" => ModelType::SequenceClassification,
3232
"token_classification" => ModelType::TokenClassification,
33-
other => panic!("Invalid model type: {}", other),
33+
other => panic!("Invalid model type: {other}"),
3434
})
3535
.clone()
3636
}
@@ -49,6 +49,6 @@ impl ModelConfig {
4949
}
5050

5151
pub fn label2id(&self, label: &str) -> Option<u32> {
52-
self.label2id.as_ref()?.get(label).map(|i| *i)
52+
self.label2id.as_ref()?.get(label).copied()
5353
}
5454
}

encoderfile/src/inference/token_classification.rs

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -43,8 +43,7 @@ pub fn token_classification<'a>(
4343
Some(l) => l.to_string(),
4444
None => {
4545
panic!(
46-
"FATAL: No label found for ID {}. Check to make sure that your config is correct.",
47-
argmax
46+
"FATAL: No label found for ID {argmax}. Check to make sure that your config is correct."
4847
)
4948
}
5049
};
@@ -61,7 +60,7 @@ pub fn token_classification<'a>(
6160
start,
6261
end,
6362
},
64-
score: score,
63+
score,
6564
label,
6665
logits: logits
6766
.index_axis(Axis(0), i)

encoderfile/src/inference/utils.rs

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -73,13 +73,13 @@ pub fn requires_token_type_ids<'a>(session: &MutexGuard<'a, Session>) -> bool {
7373
#[macro_export]
7474
macro_rules! run_model {
7575
($session:expr, $a_ids:expr, $a_mask:expr, $a_type_ids:expr) => {{
76-
match crate::inference::utils::requires_token_type_ids(&$session) {
76+
match $crate::inference::utils::requires_token_type_ids(&$session) {
7777
true => $session.run(ort::inputs!($a_ids, $a_mask, $a_type_ids)),
7878
false => $session.run(ort::inputs!($a_ids, $a_mask)),
7979
}
8080
.map_err(|e| {
8181
tracing::error!("Error running model: {:?}", e);
82-
crate::error::ApiError::InternalError("Error running model")
82+
$crate::error::ApiError::InternalError("Error running model")
8383
})
8484
}};
8585
}

encoderfile/src/tokenizer.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -25,7 +25,7 @@ pub fn get_tokenizer_from_string(s: &str, config: &Arc<ModelConfig>) -> Tokenize
2525

2626
let mut tokenizer = match Tokenizer::from_str(s) {
2727
Ok(t) => t,
28-
Err(e) => panic!("FATAL: Error loading tokenizer: {:?}", e),
28+
Err(e) => panic!("FATAL: Error loading tokenizer: {e:?}"),
2929
};
3030

3131
let pad_token = match tokenizer.id_to_token(pad_token_id) {

0 commit comments

Comments
 (0)