Skip to content

Commit 419110c

Browse files
committed
test: 添加环境变量控制硬件选项,合并 CPU 单卡和分布式版本测试
Signed-off-by: YdrMaster <ydrml@hotmail.com>
1 parent 9ae4f93 commit 419110c

File tree

13 files changed

+30
-119
lines changed

13 files changed

+30
-119
lines changed

Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,8 @@ test-utils.path = "test-utils"
2525

2626
ggus = "0.3"
2727
itertools = "0.13"
28+
regex = "1.11"
29+
env_logger = "0.11"
2830
build-script-cfg = "0.0"
2931

3032
ndarray-layout = { git = "https://github.com/YdrMaster/ndarray-layout", rev = "f1fdd24" }

models/llama/common-cpu/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -13,3 +13,4 @@ operators = { workspace = true, features = ["common-cpu"] }
1313
[dev-dependencies]
1414
test-utils.workspace = true
1515
gguf.workspace = true
16+
regex.workspace = true
Lines changed: 14 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,7 @@ use operators::{
77
random_sample::{KVPair, SampleArgs},
88
Blob,
99
};
10+
use regex::Regex;
1011
use std::{
1112
iter::zip,
1213
ptr::copy_nonoverlapping,
@@ -22,9 +23,10 @@ use test_utils::{Inference, TokenizerAndPrompt};
2223
type Worker<'w> = LlamaWorker<Operators<InprocNode<usize>, AllReduce>, Weights<'w>>;
2324

2425
#[test]
25-
fn test_dist() {
26+
fn test_infer() {
2627
let Some(Inference {
2728
model,
29+
devices,
2830
prompt,
2931
as_user,
3032
temperature,
@@ -49,7 +51,17 @@ fn test_dist() {
4951
let sample_args = SampleArgs::new(temperature, top_p, top_k).expect("invalid sample args");
5052
println!("{sample_args:?}");
5153

52-
let lens = [1; 4];
54+
let lens = match devices {
55+
Some(devices) => {
56+
let regex = Regex::new(r"\d+").unwrap();
57+
regex
58+
.find_iter(&devices)
59+
.map(|c| c.as_str().parse::<usize>().unwrap())
60+
.collect::<Vec<_>>()
61+
}
62+
None => vec![1],
63+
};
64+
println!("distribution: {lens:?}");
5365
let count = lens.iter().sum();
5466
let (seeds, senders) = WorkerSeed::new(lens.len());
5567
thread::scope(|s| {

models/llama/common-cpu/src/lib.rs

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -287,7 +287,4 @@ impl WeightLoader for Weights<'_> {
287287
}
288288

289289
#[cfg(test)]
290-
mod test_infer;
291-
292-
#[cfg(test)]
293-
mod test_dist;
290+
mod infer;

models/llama/common-cpu/src/test_infer.rs

Lines changed: 0 additions & 109 deletions
This file was deleted.
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ fn test_infer() {
2222
top_p,
2323
top_k,
2424
max_steps,
25+
..
2526
}) = Inference::load()
2627
else {
2728
return;

models/llama/infini/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -169,4 +169,4 @@ impl WeightLoader for Weights {
169169
}
170170

171171
#[cfg(test)]
172-
mod test_infer;
172+
mod infer;
Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -23,13 +23,14 @@ fn test_infer() {
2323
top_p,
2424
top_k,
2525
max_steps,
26+
..
2627
}) = Inference::load()
2728
else {
2829
return;
2930
};
3031

3132
let roll_cache_size = load_roll_cache_size();
32-
println!("roll_cache_size: {}", roll_cache_size);
33+
println!("roll_cache_size: {roll_cache_size}");
3334
let gguf = GGufModel::read(model.iter().map(|s| &**s));
3435

3536
let TokenizerAndPrompt {

models/llama/nvidia-gpu/src/lib.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -316,4 +316,4 @@ impl<'ctx> WeightLoader for Weights<'ctx> {
316316
}
317317

318318
#[cfg(test)]
319-
mod test_infer;
319+
mod infer;
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ fn test_infer() {
2222
top_p,
2323
top_k,
2424
max_steps,
25+
..
2526
}) = Inference::load()
2627
else {
2728
return;

0 commit comments

Comments
 (0)