Skip to content

Commit 548b425

Browse files
authored
Bump bindgen 0.63.0 -> 0.64.0 (tikv#734)
1 parent c1314a3 commit 548b425

File tree

11 files changed

+41
-54
lines changed

11 files changed

+41
-54
lines changed

librocksdb-sys/Cargo.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,6 @@ uuid = { version = "1.0", features = ["v4"] }
3737

3838
[build-dependencies]
3939
cc = { version = "1.0", features = ["parallel"] }
40-
bindgen = { version = "0.63", default-features = false, features = ["runtime"] }
40+
bindgen = { version = "0.64", default-features = false, features = ["runtime"] }
4141
glob = "0.3"
4242
pkg-config = { version = "0.3", optional = true }

librocksdb-sys/build.rs

Lines changed: 10 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,7 @@ fn link(name: &str, bundled: bool) {
66
let target = var("TARGET").unwrap();
77
let target: Vec<_> = target.split('-').collect();
88
if target.get(2) == Some(&"windows") {
9-
println!("cargo:rustc-link-lib=dylib={}", name);
9+
println!("cargo:rustc-link-lib=dylib={name}");
1010
if bundled && target.get(3) == Some(&"gnu") {
1111
let dir = var("CARGO_MANIFEST_DIR").unwrap();
1212
println!("cargo:rustc-link-search=native={}/{}", dir, target[0]);
@@ -16,10 +16,7 @@ fn link(name: &str, bundled: bool) {
1616

1717
fn fail_on_empty_directory(name: &str) {
1818
if fs::read_dir(name).unwrap().count() == 0 {
19-
println!(
20-
"The `{}` directory is empty, did you forget to pull the submodules?",
21-
name
22-
);
19+
println!("The `{name}` directory is empty, did you forget to pull the submodules?");
2320
println!("Try `git submodule update --init --recursive`");
2421
panic!();
2522
}
@@ -288,19 +285,19 @@ fn build_snappy() {
288285
}
289286

290287
fn try_to_find_and_link_lib(lib_name: &str) -> bool {
291-
println!("cargo:rerun-if-env-changed={}_COMPILE", lib_name);
292-
if let Ok(v) = env::var(format!("{}_COMPILE", lib_name)) {
288+
println!("cargo:rerun-if-env-changed={lib_name}_COMPILE");
289+
if let Ok(v) = env::var(format!("{lib_name}_COMPILE")) {
293290
if v.to_lowercase() == "true" || v == "1" {
294291
return false;
295292
}
296293
}
297294

298-
println!("cargo:rerun-if-env-changed={}_LIB_DIR", lib_name);
299-
println!("cargo:rerun-if-env-changed={}_STATIC", lib_name);
295+
println!("cargo:rerun-if-env-changed={lib_name}_LIB_DIR");
296+
println!("cargo:rerun-if-env-changed={lib_name}_STATIC");
300297

301-
if let Ok(lib_dir) = env::var(format!("{}_LIB_DIR", lib_name)) {
302-
println!("cargo:rustc-link-search=native={}", lib_dir);
303-
let mode = match env::var_os(format!("{}_STATIC", lib_name)) {
298+
if let Ok(lib_dir) = env::var(format!("{lib_name}_LIB_DIR")) {
299+
println!("cargo:rustc-link-search=native={lib_dir}");
300+
let mode = match env::var_os(format!("{lib_name}_STATIC")) {
304301
Some(_) => "static",
305302
None => "dylib",
306303
};
@@ -313,7 +310,7 @@ fn try_to_find_and_link_lib(lib_name: &str) -> bool {
313310
fn cxx_standard() -> String {
314311
env::var("ROCKSDB_CXX_STD").map_or("-std=c++17".to_owned(), |cxx_std| {
315312
if !cxx_std.starts_with("-std=") {
316-
format!("-std={}", cxx_std)
313+
format!("-std={cxx_std}")
317314
} else {
318315
cxx_std
319316
}

src/db.rs

Lines changed: 4 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -603,8 +603,7 @@ impl<T: ThreadMode> DBWithThreadMode<T> {
603603

604604
if let Err(e) = fs::create_dir_all(&path) {
605605
return Err(Error::new(format!(
606-
"Failed to create RocksDB directory: `{:?}`.",
607-
e
606+
"Failed to create RocksDB directory: `{e:?}`."
608607
)));
609608
}
610609

@@ -1733,8 +1732,7 @@ impl<T: ThreadMode, D: DBInner> DBCommon<T, D> {
17331732
Ok(prop_name) => get_property(prop_name.as_ptr()),
17341733
Err(e) => {
17351734
return Err(Error::new(format!(
1736-
"Failed to convert property name to CString: {}",
1737-
e
1735+
"Failed to convert property name to CString: {e}"
17381736
)));
17391737
}
17401738
};
@@ -1744,8 +1742,7 @@ impl<T: ThreadMode, D: DBInner> DBCommon<T, D> {
17441742
let result = match unsafe { CStr::from_ptr(value) }.to_str() {
17451743
Ok(s) => parse(s).map(|value| Some(value)),
17461744
Err(e) => Err(Error::new(format!(
1747-
"Failed to convert property value to string: {}",
1748-
e
1745+
"Failed to convert property value to string: {e}"
17491746
))),
17501747
};
17511748
unsafe {
@@ -1787,8 +1784,7 @@ impl<T: ThreadMode, D: DBInner> DBCommon<T, D> {
17871784
fn parse_property_int_value(value: &str) -> Result<u64, Error> {
17881785
value.parse::<u64>().map_err(|err| {
17891786
Error::new(format!(
1790-
"Failed to convert property value {} to int: {}",
1791-
value, err
1787+
"Failed to convert property value {value} to int: {err}"
17921788
))
17931789
})
17941790
}

src/ffi_util.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -54,8 +54,7 @@ pub(crate) fn to_cpath<P: AsRef<Path>>(path: P) -> Result<CString, Error> {
5454
match CString::new(path.as_ref().to_string_lossy().as_bytes()) {
5555
Ok(c) => Ok(c),
5656
Err(e) => Err(Error::new(format!(
57-
"Failed to convert path to CString: {}",
58-
e,
57+
"Failed to convert path to CString: {e}"
5958
))),
6059
}
6160
}

src/transactions/optimistic_transaction_db.rs

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -134,8 +134,7 @@ impl<T: ThreadMode> OptimisticTransactionDB<T> {
134134

135135
if let Err(e) = fs::create_dir_all(&path) {
136136
return Err(Error::new(format!(
137-
"Failed to create RocksDB directory: `{:?}`.",
138-
e
137+
"Failed to create RocksDB directory: `{e:?}`."
139138
)));
140139
}
141140

src/transactions/transaction_db.rs

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -234,8 +234,7 @@ impl<T: ThreadMode> TransactionDB<T> {
234234

235235
if let Err(e) = fs::create_dir_all(&path) {
236236
return Err(Error::new(format!(
237-
"Failed to create RocksDB directory: `{:?}`.",
238-
e
237+
"Failed to create RocksDB directory: `{e:?}`."
239238
)));
240239
}
241240

@@ -360,13 +359,12 @@ impl<T: ThreadMode> TransactionDB<T> {
360359
name: &str,
361360
opts: &Options,
362361
) -> Result<*mut ffi::rocksdb_column_family_handle_t, Error> {
363-
let cf_name = if let Ok(c) = CString::new(name.as_bytes()) {
364-
c
365-
} else {
362+
let Ok(cf_name) = CString::new(name.as_bytes()) else {
366363
return Err(Error::new(
367364
"Failed to convert path to CString when creating cf".to_owned(),
368365
));
369366
};
367+
370368
Ok(unsafe {
371369
ffi_try!(ffi::rocksdb_transactiondb_create_column_family(
372370
self.inner,

tests/test_checkpoint.rs

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ pub fn test_single_checkpoint() {
2424
const PATH_PREFIX: &str = "_rust_rocksdb_cp_single_";
2525

2626
// Create DB with some data
27-
let db_path = DBPath::new(&format!("{}db1", PATH_PREFIX));
27+
let db_path = DBPath::new(&format!("{PATH_PREFIX}db1"));
2828

2929
let mut opts = Options::default();
3030
opts.create_if_missing(true);
@@ -37,7 +37,7 @@ pub fn test_single_checkpoint() {
3737

3838
// Create checkpoint
3939
let cp1 = Checkpoint::new(&db).unwrap();
40-
let cp1_path = DBPath::new(&format!("{}cp1", PATH_PREFIX));
40+
let cp1_path = DBPath::new(&format!("{PATH_PREFIX}cp1"));
4141
cp1.create_checkpoint(&cp1_path).unwrap();
4242

4343
// Verify checkpoint
@@ -54,7 +54,7 @@ pub fn test_multi_checkpoints() {
5454
const PATH_PREFIX: &str = "_rust_rocksdb_cp_multi_";
5555

5656
// Create DB with some data
57-
let db_path = DBPath::new(&format!("{}db1", PATH_PREFIX));
57+
let db_path = DBPath::new(&format!("{PATH_PREFIX}db1"));
5858

5959
let mut opts = Options::default();
6060
opts.create_if_missing(true);
@@ -67,7 +67,7 @@ pub fn test_multi_checkpoints() {
6767

6868
// Create first checkpoint
6969
let cp1 = Checkpoint::new(&db).unwrap();
70-
let cp1_path = DBPath::new(&format!("{}cp1", PATH_PREFIX));
70+
let cp1_path = DBPath::new(&format!("{PATH_PREFIX}cp1"));
7171
cp1.create_checkpoint(&cp1_path).unwrap();
7272

7373
// Verify checkpoint
@@ -88,7 +88,7 @@ pub fn test_multi_checkpoints() {
8888

8989
// Create another checkpoint
9090
let cp2 = Checkpoint::new(&db).unwrap();
91-
let cp2_path = DBPath::new(&format!("{}cp2", PATH_PREFIX));
91+
let cp2_path = DBPath::new(&format!("{PATH_PREFIX}cp2"));
9292
cp2.create_checkpoint(&cp2_path).unwrap();
9393

9494
// Verify second checkpoint

tests/test_column_family.rs

Lines changed: 6 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -334,11 +334,11 @@ fn test_merge_operator() {
334334
db.merge_cf(&cf1, b"k1", b"d").unwrap();
335335
db.merge_cf(&cf1, b"k1", b"efg").unwrap();
336336
let m = db.merge_cf(&cf1, b"k1", b"h");
337-
println!("m is {:?}", m);
337+
println!("m is {m:?}");
338338
// TODO assert!(m.is_ok());
339339
match db.get(b"k1") {
340340
Ok(Some(value)) => match std::str::from_utf8(&value) {
341-
Ok(v) => println!("retrieved utf8 value: {}", v),
341+
Ok(v) => println!("retrieved utf8 value: {v}"),
342342
Err(_) => println!("did not read valid utf-8 out of the db"),
343343
},
344344
Err(_) => println!("error reading value"),
@@ -458,13 +458,13 @@ fn test_no_leaked_column_family() {
458458
// repeat creating and dropping cfs many time to indirectly detect
459459
// possible leak via large dir.
460460
for cf_index in 0..20 {
461-
let cf_name = format!("cf{}", cf_index);
461+
let cf_name = format!("cf{cf_index}");
462462
db.create_cf(&cf_name, &Options::default()).unwrap();
463463
let cf = db.cf_handle(&cf_name).unwrap();
464464

465465
let mut batch = rocksdb::WriteBatch::default();
466466
for key_index in 0..100 {
467-
batch.put_cf(&cf, format!("k{}", key_index), &large_blob);
467+
batch.put_cf(&cf, format!("k{key_index}"), &large_blob);
468468
}
469469
db.write_opt(batch, &write_options).unwrap();
470470

@@ -480,11 +480,8 @@ fn test_no_leaked_column_family() {
480480

481481
// if we're not leaking, the dir bytes should be well under 10M bytes in total
482482
let dir_bytes = dir_size(&n).unwrap();
483-
assert!(
484-
dir_bytes < 10_000_000,
485-
"{} is too large (maybe leaking...)",
486-
dir_bytes
487-
);
483+
let leak_msg = format!("{dir_bytes} is too large (maybe leaking...)");
484+
assert!(dir_bytes < 10_000_000, "{}", leak_msg);
488485

489486
// only if MultiThreaded, cf can outlive db.drop_cf() and shouldn't cause SEGV...
490487
#[cfg(feature = "multi-threaded-cf")]

tests/test_db.rs

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -710,7 +710,7 @@ fn fifo_compaction_test() {
710710

711711
let block_cache_hit_count = ctx.metric(PerfMetric::BlockCacheHitCount);
712712
if block_cache_hit_count > 0 {
713-
let expect = format!("block_cache_hit_count = {}", block_cache_hit_count);
713+
let expect = format!("block_cache_hit_count = {block_cache_hit_count}");
714714
assert!(ctx.report(true).contains(&expect));
715715
}
716716

@@ -829,7 +829,7 @@ fn get_with_cache_and_bulkload_test() {
829829
// write a lot
830830
let mut batch = WriteBatch::default();
831831
for i in 0..10_000 {
832-
batch.put(format!("{:0>4}", i).as_bytes(), b"v");
832+
batch.put(format!("{i:0>4}").as_bytes(), b"v");
833833
}
834834
assert!(db.write(batch).is_ok());
835835

@@ -858,7 +858,7 @@ fn get_with_cache_and_bulkload_test() {
858858
// try to get key
859859
let iter = db.iterator(IteratorMode::Start);
860860
for (expected, (k, _)) in iter.map(Result::unwrap).enumerate() {
861-
assert_eq!(k.as_ref(), format!("{:0>4}", expected).as_bytes());
861+
assert_eq!(k.as_ref(), format!("{expected:0>4}").as_bytes());
862862
}
863863

864864
// check live files (sst files meta)
@@ -919,7 +919,7 @@ fn get_with_cache_and_bulkload_test() {
919919
// try to get key
920920
let iter = db.iterator(IteratorMode::Start);
921921
for (expected, (k, _)) in iter.map(Result::unwrap).enumerate() {
922-
assert_eq!(k.as_ref(), format!("{:0>4}", expected).as_bytes());
922+
assert_eq!(k.as_ref(), format!("{expected:0>4}").as_bytes());
923923
}
924924
}
925925
}
@@ -964,7 +964,7 @@ fn get_with_cache_and_bulkload_and_blobs_test() {
964964
// write a lot
965965
let mut batch = WriteBatch::default();
966966
for i in 0..10_000 {
967-
batch.put(format!("{:0>4}", i).as_bytes(), b"v");
967+
batch.put(format!("{i:0>4}").as_bytes(), b"v");
968968
}
969969
assert!(db.write(batch).is_ok());
970970

@@ -993,7 +993,7 @@ fn get_with_cache_and_bulkload_and_blobs_test() {
993993
// try to get key
994994
let iter = db.iterator(IteratorMode::Start);
995995
for (expected, (k, _)) in iter.map(Result::unwrap).enumerate() {
996-
assert_eq!(k.as_ref(), format!("{:0>4}", expected).as_bytes());
996+
assert_eq!(k.as_ref(), format!("{expected:0>4}").as_bytes());
997997
}
998998

999999
// check live files (sst files meta)
@@ -1054,7 +1054,7 @@ fn get_with_cache_and_bulkload_and_blobs_test() {
10541054
// try to get key
10551055
let iter = db.iterator(IteratorMode::Start);
10561056
for (expected, (k, _)) in iter.map(Result::unwrap).enumerate() {
1057-
assert_eq!(k.as_ref(), format!("{:0>4}", expected).as_bytes());
1057+
assert_eq!(k.as_ref(), format!("{expected:0>4}").as_bytes());
10581058
}
10591059
}
10601060
}

tests/test_iterator.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,8 @@ fn test_iterator() {
7777
let mut it = db.iterator(IteratorMode::From(key, dir));
7878
let value = it.next();
7979
if valid {
80-
assert!(matches!(value, Some(Ok(_))), "{:?}", value);
80+
let expect = format!("{value:?}");
81+
assert!(matches!(value, Some(Ok(_))), "{:?}", &expect);
8182
} else {
8283
assert_eq!(None, value);
8384
assert_eq!(None, it.next()); // Iterator is fused

0 commit comments

Comments
 (0)