Skip to content

Commit e993b65

Browse files
authored
chore: Use comparison requirement for mlx-sys version when used as dep (#208)
* fix mlx-sys dep patch version * restrict hf-hub dep to be <0.4.2 * fix tokenizers version * clippy & fmt
1 parent 86f4959 commit e993b65

File tree

5 files changed

+29
-25
lines changed

5 files changed

+29
-25
lines changed

Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,7 @@
11
[workspace.package]
22
# All but mlx-sys should follow the same version. mlx-sys should follow
33
# the version of mlx-c.
4-
version = "0.21.0"
4+
version = "0.21.1"
55
edition = "2021"
66
authors = [
77
"Minghua Wu <michael.wu1107@gmail.com>",
@@ -28,7 +28,7 @@ resolver = "2"
2828

2929
[workspace.dependencies]
3030
# workspace local dependencies
31-
mlx-sys = { version = "0.1.0", path = "mlx-sys" }
31+
mlx-sys = { version = "=0.1.0", path = "mlx-sys" }
3232
mlx-macros = { version = "0.21.0", path = "mlx-macros" }
3333
mlx-internal-macros = { version = "0.21.0", path = "mlx-internal-macros" }
3434
mlx-rs = { version = "0.21.0", path = "mlx-rs" }

examples/mistral/Cargo.toml

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,10 +9,10 @@ authors.workspace = true
99
mlx-rs.workspace = true
1010

1111
# External dependencies
12-
tokenizers = "0.21"
12+
tokenizers = "=0.21.0" # 0.21.1 uses features that went stable in 1.82 while our MSRV is 1.81
1313
thiserror = "1.0"
1414
anyhow = "1.0"
15-
hf-hub = "0.4"
15+
hf-hub = "=0.4.1" # 0.4.1 use features that went stable in 1.82 while our MSRV is 1.81
1616
dotenv = "0.15"
1717
serde = { version = "1", features = ["derive"] }
1818
serde_json = "1"

mlx-rs/CHANGELOG.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,9 @@
11
# Changelog
22

3+
## 0.21.1
4+
5+
- Fix `mlx-sys` dependency to patch version in workspace
6+
37
## 0.21.0
48

59
- Initial feature-complete release

mlx-rs/src/nn/activation.rs

Lines changed: 18 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -285,7 +285,7 @@ impl Module<&Array> for Glu {
285285
type Output = Array;
286286

287287
fn forward(&mut self, x: &Array) -> Result<Array> {
288-
glu(x, self.axis).map_err(Into::into)
288+
glu(x, self.axis)
289289
}
290290

291291
fn training_mode(&mut self, _: bool) {}
@@ -336,7 +336,7 @@ impl Module<&Array> for Mish {
336336
type Output = Array;
337337

338338
fn forward(&mut self, x: &Array) -> Result<Array> {
339-
mish(x).map_err(Into::into)
339+
mish(x)
340340
}
341341

342342
fn training_mode(&mut self, _: bool) {}
@@ -358,7 +358,7 @@ impl Module<&Array> for Relu {
358358
type Output = Array;
359359

360360
fn forward(&mut self, x: &Array) -> Result<Array> {
361-
relu(x).map_err(Into::into)
361+
relu(x)
362362
}
363363

364364
fn training_mode(&mut self, _: bool) {}
@@ -393,7 +393,7 @@ impl Module<&Array> for LeakyRelu {
393393
type Output = Array;
394394

395395
fn forward(&mut self, x: &Array) -> Result<Array> {
396-
leaky_relu(x, self.neg_slope).map_err(Into::into)
396+
leaky_relu(x, self.neg_slope)
397397
}
398398

399399
fn training_mode(&mut self, _: bool) {}
@@ -415,7 +415,7 @@ impl Module<&Array> for Relu6 {
415415
type Output = Array;
416416

417417
fn forward(&mut self, x: &Array) -> Result<Array> {
418-
relu6(x).map_err(Into::into)
418+
relu6(x)
419419
}
420420

421421
fn training_mode(&mut self, _: bool) {}
@@ -472,7 +472,7 @@ impl Module<&Array> for Softplus {
472472
type Output = Array;
473473

474474
fn forward(&mut self, x: &Array) -> Result<Array> {
475-
softplus(x).map_err(Into::into)
475+
softplus(x)
476476
}
477477

478478
fn training_mode(&mut self, _: bool) {}
@@ -494,7 +494,7 @@ impl Module<&Array> for Softsign {
494494
type Output = Array;
495495

496496
fn forward(&mut self, x: &Array) -> Result<Array> {
497-
softsign(x).map_err(Into::into)
497+
softsign(x)
498498
}
499499

500500
fn training_mode(&mut self, _: bool) {}
@@ -530,7 +530,7 @@ impl Module<&Array> for Celu {
530530
type Output = Array;
531531

532532
fn forward(&mut self, x: &Array) -> Result<Array> {
533-
celu(x, self.alpha).map_err(Into::into)
533+
celu(x, self.alpha)
534534
}
535535

536536
fn training_mode(&mut self, _: bool) {}
@@ -552,7 +552,7 @@ impl Module<&Array> for Silu {
552552
type Output = Array;
553553

554554
fn forward(&mut self, x: &Array) -> Result<Array> {
555-
silu(x).map_err(Into::into)
555+
silu(x)
556556
}
557557

558558
fn training_mode(&mut self, _: bool) {}
@@ -587,7 +587,7 @@ impl Module<&Array> for LogSoftmax {
587587
type Output = Array;
588588

589589
fn forward(&mut self, x: &Array) -> Result<Array> {
590-
log_softmax(x, self.axis).map_err(Into::into)
590+
log_softmax(x, self.axis)
591591
}
592592

593593
fn training_mode(&mut self, _: bool) {}
@@ -609,7 +609,7 @@ impl Module<&Array> for LogSigmoid {
609609
type Output = Array;
610610

611611
fn forward(&mut self, x: &Array) -> Result<Array> {
612-
log_sigmoid(x).map_err(Into::into)
612+
log_sigmoid(x)
613613
}
614614

615615
fn training_mode(&mut self, _: bool) {}
@@ -671,7 +671,7 @@ impl Module<&Array> for Prelu {
671671
type Output = Array;
672672

673673
fn forward(&mut self, x: &Array) -> Result<Array> {
674-
prelu(x, &self.weight).map_err(Into::into)
674+
prelu(x, &self.weight)
675675
}
676676

677677
fn training_mode(&mut self, _: bool) {}
@@ -716,9 +716,9 @@ impl Module<&Array> for Gelu {
716716

717717
fn forward(&mut self, x: &Array) -> Result<Array> {
718718
match self.approximate {
719-
GeluApprox::None => gelu(x).map_err(Into::into),
720-
GeluApprox::Precise => gelu_approximate(x).map_err(Into::into),
721-
GeluApprox::Fast => gelu_fast_approximate(x).map_err(Into::into),
719+
GeluApprox::None => gelu(x),
720+
GeluApprox::Precise => gelu_approximate(x),
721+
GeluApprox::Fast => gelu_fast_approximate(x),
722722
}
723723
}
724724

@@ -757,7 +757,7 @@ impl Module<&Array> for HardSwish {
757757
type Output = Array;
758758

759759
fn forward(&mut self, x: &Array) -> Result<Array> {
760-
hard_swish(x).map_err(Into::into)
760+
hard_swish(x)
761761
}
762762

763763
fn training_mode(&mut self, _: bool) {}
@@ -795,7 +795,7 @@ impl Module<&Array> for Step {
795795
type Output = Array;
796796

797797
fn forward(&mut self, x: &Array) -> Result<Array> {
798-
step(x, self.threshold).map_err(Into::into)
798+
step(x, self.threshold)
799799
}
800800

801801
fn training_mode(&mut self, _: bool) {}
@@ -817,7 +817,7 @@ impl Module<&Array> for Selu {
817817
type Output = Array;
818818

819819
fn forward(&mut self, x: &Array) -> Result<Array> {
820-
selu(x).map_err(Into::into)
820+
selu(x)
821821
}
822822

823823
fn training_mode(&mut self, _: bool) {}

mlx-rs/src/nn/dropout.rs

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -70,7 +70,7 @@ impl Module<&Array> for Dropout {
7070

7171
let p1 = array!(self.one_minus_p);
7272
let mask = bernoulli(&p1, x.shape(), None)?;
73-
multiply(multiply(array!(1.0 / self.one_minus_p), mask)?, x).map_err(Into::into)
73+
multiply(multiply(array!(1.0 / self.one_minus_p), mask)?, x)
7474
}
7575

7676
fn training_mode(&mut self, mode: bool) {
@@ -170,7 +170,7 @@ impl Module<&Array> for Dropout2d {
170170
let p1 = array!(self.one_minus_p);
171171
let mask = bernoulli(&p1, &mask_shape, None)?;
172172

173-
multiply(multiply(array!(1.0 / self.one_minus_p), mask)?, x).map_err(Into::into)
173+
multiply(multiply(array!(1.0 / self.one_minus_p), mask)?, x)
174174
}
175175

176176
fn training_mode(&mut self, mode: bool) {
@@ -267,7 +267,7 @@ impl Module<&Array> for Dropout3d {
267267
let p1 = array!(self.one_minus_p);
268268
let mask = bernoulli(&p1, &mask_shape, None)?;
269269

270-
multiply(multiply(array!(1.0 / self.one_minus_p), mask)?, x).map_err(Into::into)
270+
multiply(multiply(array!(1.0 / self.one_minus_p), mask)?, x)
271271
}
272272

273273
fn training_mode(&mut self, mode: bool) {

0 commit comments

Comments
 (0)