Skip to content

Commit 966aa6a

Browse files
committed
Apply new styles
1 parent adad65f commit 966aa6a

File tree

6 files changed

+23
-23
lines changed

6 files changed

+23
-23
lines changed

src/index.rs

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,16 @@
11
//! Building an `Index` to efficiently map vocabulary tokens to state transitions.
22
3-
use crate::prelude::*;
4-
use crate::vocabulary::Vocabulary;
5-
use crate::{Error, Result};
63
use bincode::{Decode, Encode};
7-
use regex_automata::dfa::{dense::DFA, Automaton};
4+
use regex_automata::dfa::dense::DFA;
5+
use regex_automata::dfa::Automaton;
86
use regex_automata::util::primitives::StateID as AutomataStateId;
97
use regex_automata::Anchored;
108
use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet};
119

10+
use crate::prelude::*;
11+
use crate::vocabulary::Vocabulary;
12+
use crate::{Error, Result};
13+
1214
/// `Index` efficiently maps vocabulary tokens to state transitions.
1315
#[derive(Clone, Debug, PartialEq, Encode, Decode)]
1416
pub struct Index {

src/json_schema/parsing.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,8 +3,7 @@
33
use std::num::NonZeroU64;
44

55
use regex::escape;
6-
use serde_json::json;
7-
use serde_json::Value;
6+
use serde_json::{json, Value};
87

98
use crate::json_schema::types;
109
use crate::Error;

src/prelude.rs

Lines changed: 4 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,7 @@
22
33
pub use tokenizers::FromPretrainedParameters;
44

5-
pub use super::{
6-
index::Index,
7-
json_schema,
8-
primitives::{StateId, Token, TokenId},
9-
vocabulary::Vocabulary,
10-
};
5+
pub use super::index::Index;
6+
pub use super::json_schema;
7+
pub use super::primitives::{StateId, Token, TokenId};
8+
pub use super::vocabulary::Vocabulary;

src/python_bindings/mod.rs

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,18 +2,18 @@
22
33
use std::sync::Arc;
44

5-
use crate::index::Index;
6-
use crate::json_schema;
7-
use crate::prelude::*;
8-
use bincode::config;
9-
use bincode::{Decode, Encode};
5+
use bincode::{config, Decode, Encode};
106
use pyo3::exceptions::PyValueError;
117
use pyo3::prelude::*;
128
use pyo3::types::{PyAny, PyDict};
139
use pyo3::wrap_pyfunction;
1410
use rustc_hash::{FxHashMap as HashMap, FxHashSet as HashSet};
1511
use tokenizers::FromPretrainedParameters;
1612

13+
use crate::index::Index;
14+
use crate::json_schema;
15+
use crate::prelude::*;
16+
1717
macro_rules! type_name {
1818
($obj:expr) => {
1919
// Safety: obj is always initialized and tp_name is a C-string
@@ -148,6 +148,7 @@ impl PyIndex {
148148
fn get_initial_state(&self) -> StateId {
149149
self.0.initial_state()
150150
}
151+
151152
fn __repr__(&self) -> String {
152153
format!("{:#?}", self.0)
153154
}

src/vocabulary/locator.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
//! Parsing known locations in order to retrieve `eos_token_id` information.
22
3-
use hf_hub::{api::sync::ApiBuilder, Repo, RepoType};
3+
use hf_hub::api::sync::ApiBuilder;
4+
use hf_hub::{Repo, RepoType};
45
use serde::{Deserialize, Serialize};
56
use tokenizers::{FromPretrainedParameters, Tokenizer};
67

src/vocabulary/mod.rs

Lines changed: 4 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,15 @@
11
//! Creates `Vocabulary` manually or from pretrained large language model.
22
33
use bincode::{Decode, Encode};
4+
use locator::{HFLocator, Locator};
5+
use processor::TokenProcessor;
46
use rustc_hash::FxHashMap as HashMap;
5-
67
use tokenizers::normalizers::Sequence;
78
use tokenizers::{NormalizerWrapper, Tokenizer};
89

910
use crate::prelude::*;
1011
use crate::{Error, Result};
1112

12-
use locator::{HFLocator, Locator};
13-
use processor::TokenProcessor;
14-
1513
mod locator;
1614
mod processor;
1715

@@ -241,9 +239,10 @@ impl TryFrom<(TokenId, HashMap<String, Vec<TokenId>>)> for Vocabulary {
241239

242240
#[cfg(test)]
243241
mod tests {
244-
use super::*;
245242
use rustc_hash::FxHashSet as HashSet;
246243

244+
use super::*;
245+
247246
#[test]
248247
fn basic_interface() {
249248
let eos_token_id = 3;

0 commit comments

Comments
 (0)