Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 13 additions & 1 deletion pldm-file/examples/host.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,11 @@ use std::cell::RefCell;
use std::fs::File;
use std::io::{Read, Seek, SeekFrom};
use std::os::unix::fs::MetadataExt;
use std::time::{Duration, Instant};

struct Host {
file: RefCell<File>,
stamp: RefCell<(Instant, usize)>,
}

const FILENAME: &str = "pldm-file-host.bin";
Expand All @@ -25,12 +27,21 @@ impl Host {
file: RefCell::new(File::open(FILENAME).with_context(|| {
format!("cannot open input file {FILENAME}")
})?),
stamp: RefCell::new((Instant::now(), 0)),
})
}
}

impl pldm_file::host::Host for Host {
fn read(&self, buf: &mut [u8], offset: usize) -> std::io::Result<usize> {
let mut stamp = self.stamp.borrow_mut();
let now = Instant::now();
let del = now - stamp.0;
if del > Duration::from_secs(2) {
let rate = (offset - stamp.1) / del.as_millis() as usize;
info!("{rate} kB/s, offset {offset}");
*stamp = (now, offset);
}
let mut file = self.file.borrow_mut();
file.seek(SeekFrom::Start(offset as u64))?;
file.read(buf)
Expand All @@ -40,7 +51,8 @@ impl pldm_file::host::Host for Host {
type FileResponder = pldm_file::host::Responder<1>;

fn main() -> Result<()> {
env_logger::init();
let log_env = env_logger::Env::default().filter_or("RUST_LOG", "info");
env_logger::init_from_env(log_env);

let mut listener = MctpLinuxAsyncListener::new(mctp::MCTP_TYPE_PLDM, None)?;
let mut pldm_ctrl = pldm::control::responder::Responder::<2>::new();
Expand Down
38 changes: 20 additions & 18 deletions pldm-file/src/host.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,10 @@ pub trait Host {
// Created at the first stage (XFER_FIRST_PART) of a MultpartReceive,
// where we have the offset and size.
struct FileTransferContext {
buf: Vec<u8>,
// File starting offset
start: usize,
len: usize,
// Current transfer 0..len
offset: usize,
}

Expand Down Expand Up @@ -345,14 +348,14 @@ impl<const N: usize> Responder<N> {
if let Some(ctx) = file_ctx.xfer_ctx.as_mut() {
ctx.offset = 0;
} else {
let new_ctx = Self::init_read(&cmd, host)?;
let new_ctx = Self::init_read(&cmd)?;
// a repeated FIRST_PART is valid, and restarts the transfer
file_ctx.xfer_ctx.replace(new_ctx);
};
}

let xfer_ctx = file_ctx.xfer_ctx.as_mut().ok_or(CCode::ERROR)?;
let full_len = xfer_ctx.buf.len();
let full_len = xfer_ctx.len;

let offset = match cmd.xfer_op {
pldm::control::xfer_op::FIRST_PART
Expand All @@ -361,7 +364,7 @@ impl<const N: usize> Responder<N> {
_ => Err(CCode::ERROR_INVALID_DATA)?,
};

if offset >= xfer_ctx.buf.len() {
if offset >= xfer_ctx.len {
Err(CCode::ERROR_INVALID_DATA)?;
}

Expand Down Expand Up @@ -394,11 +397,13 @@ impl<const N: usize> Responder<N> {
let mut resp_data = Vec::new();
resp_data.extend_from_slice(&dfread_resp.to_bytes()?);

let data = &xfer_ctx.buf[offset..offset + len];
let l = resp_data.len();
resp_data.resize(resp_data.len() + len, 0);
let data = &mut resp_data[l..];
host.read(data, xfer_ctx.start + offset)
.map_err(|_| CCode::ERROR)?;
let crc32 = crc::Crc::<u32>::new(&crc::CRC_32_ISO_HDLC);
let cs = crc32.checksum(data);

resp_data.extend_from_slice(data);
resp_data.extend_from_slice(&cs.to_le_bytes());

xfer_ctx.offset = offset;
Expand All @@ -409,18 +414,15 @@ impl<const N: usize> Responder<N> {

fn init_read(
req: &pldm::control::MultipartReceiveReq,
host: &mut impl Host,
) -> Result<FileTransferContext> {
let offset = req.req_offset;
let len = req.req_length;

let mut buf = vec![0; len as usize];
let read_len =
host.read(&mut buf, offset as usize).or(Err(CCode::ERROR))?;

buf.truncate(read_len);

Ok(FileTransferContext { buf, offset: 0 })
trace!("init_read {req:?}");
let start = req.req_offset as usize;
let len = req.req_length as usize;
Ok(FileTransferContext {
start,
len,
offset: 0,
})
}
}

Expand Down