From 574e5b9a69fbcbcdab65b49ee3334a7bb4f6f82a Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=91=A8=E7=9D=BF?= Date: Fri, 28 Nov 2025 13:17:00 +0800 Subject: [PATCH 1/2] feat: add devspace management commands - Introduced `devspace` module for managing local development dependencies. - Added `start` and `stop` commands to the `devspace` functionality. - Updated `.cargo/config.toml` to include an alias for `xtask`. - Modified `.gitignore` to exclude `.devspace/` directory. - Added new dependencies in `Cargo.lock` and `xtask/Cargo.toml`. - Refactored `xtask/src/cargo.rs` and `xtask/src/image.rs` for improved code structure and readability. - Cleaned up unnecessary whitespace in various files. --- .cargo/config.toml | 4 +- .gitignore | 1 + .gitmodules | 0 modules/axconfig/src/lib.rs | 4 +- platform/x86-qemu-q35/src/mp.rs | 2 +- xtask/Cargo.toml | 14 +- xtask/src/cargo.rs | 4 +- xtask/src/devspace.rs | 498 ++++++++++++++++++++++++++++++++ xtask/src/image.rs | 172 ++++++----- xtask/src/main.rs | 21 +- 10 files changed, 630 insertions(+), 90 deletions(-) create mode 100644 .gitmodules create mode 100644 xtask/src/devspace.rs diff --git a/.cargo/config.toml b/.cargo/config.toml index 55866245..676b0e8a 100644 --- a/.cargo/config.toml +++ b/.cargo/config.toml @@ -1,6 +1,3 @@ -# [patch."https://github.com/arceos-hypervisor/axvm.git".axvm] -# path = "crates/axvm" - [target.aarch64-unknown-none-softfloat] rustflags = [ "-Clink-args=-no-pie", @@ -20,3 +17,4 @@ rustflags = [ [alias] xtask = "run --package xtask --" + diff --git a/.gitignore b/.gitignore index 772fd371..0dc4eff8 100644 --- a/.gitignore +++ b/.gitignore @@ -30,6 +30,7 @@ __pycache__/ /crates/* !/crates/nop/ !/crates/nop/** +.devspace/ /Cargo.toml.bk diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 00000000..e69de29b diff --git a/modules/axconfig/src/lib.rs b/modules/axconfig/src/lib.rs index d7aa3e1b..82d22f6a 100644 --- a/modules/axconfig/src/lib.rs +++ b/modules/axconfig/src/lib.rs @@ -34,7 +34,7 @@ pub mod devices { #[doc = " End PCI bus number."] pub const PCI_BUS_END: usize = 0xff; #[doc = " Base physical address of the PCIe ECAM space."] - pub const PCI_ECAM_BASE: usize = 0xb000_0000 ; + pub const PCI_ECAM_BASE: usize = 0xb000_0000; #[doc = " PCI device memory ranges."] pub const PCI_RANGES: &[(usize, usize)] = &[]; #[doc = " Timer interrupt num (PPI, physical timer)."] @@ -55,7 +55,7 @@ pub mod plat { #[doc = " Kernel address space size."] pub const KERNEL_ASPACE_SIZE: usize = 0x0000_7fff_ffff_f000; #[doc = " No need."] - pub const KERNEL_BASE_PADDR: usize = 0x20_0000 ; + pub const KERNEL_BASE_PADDR: usize = 0x20_0000; #[doc = " Base virtual address of the kernel image."] pub const KERNEL_BASE_VADDR: usize = 0xffff_8000_0020_0000; #[doc = " Offset of bus address and phys address. some boards, the bus address is"] diff --git a/platform/x86-qemu-q35/src/mp.rs b/platform/x86-qemu-q35/src/mp.rs index 30d24eb6..14e8e671 100644 --- a/platform/x86-qemu-q35/src/mp.rs +++ b/platform/x86-qemu-q35/src/mp.rs @@ -12,7 +12,7 @@ core::arch::global_asm!( ); unsafe fn setup_startup_page(stack_top: PhysAddr) { - unsafe extern "C" { + unsafe extern { fn ap_entry32(); fn ap_start(); fn ap_end(); diff --git a/xtask/Cargo.toml b/xtask/Cargo.toml index eca6c7f2..8fe39b17 100644 --- a/xtask/Cargo.toml +++ b/xtask/Cargo.toml @@ -8,18 +8,16 @@ version = "0.1.0" anyhow = "1.0" cargo_metadata = "0.23" chrono = "0.4" -clap = { version = "4.4", features = ["derive"] } +clap = {version = "4.4", features = ["derive"]} colored = "3" -ostool = "0.8" jkconfig = "0.1" +ostool = "0.8" reqwest = "0.12" -schemars = { version = "1", features = ["derive"] } -serde = { version = "1.0", features = ["derive"] } +schemars = {version = "1", features = ["derive"]} +serde = {version = "1.0", features = ["derive"]} serde_json = "1" sha2 = "0.10" -tokio = { version = "1", features = ["full"] } +tokio = {version = "1", features = ["full"]} toml.workspace = true -axvmconfig = { workspace = true, features = ["std"] } - - +axvmconfig = {workspace = true, features = ["std"]} diff --git a/xtask/src/cargo.rs b/xtask/src/cargo.rs index 8cfb1a8c..faaef975 100644 --- a/xtask/src/cargo.rs +++ b/xtask/src/cargo.rs @@ -7,7 +7,7 @@ use crate::ctx::Context; impl Context { pub async fn run_qemu(&mut self, config_path: Option) -> anyhow::Result<()> { let build_config = self.load_config()?; - + let arch = if build_config.target.contains("aarch64") { Arch::Aarch64 } else if build_config.target.contains("x86_64") { @@ -18,7 +18,7 @@ impl Context { build_config.target )); }; - + let config_path = if let Some(path) = config_path { path } else { diff --git a/xtask/src/devspace.rs b/xtask/src/devspace.rs new file mode 100644 index 00000000..feae20b6 --- /dev/null +++ b/xtask/src/devspace.rs @@ -0,0 +1,498 @@ +use std::collections::{BTreeMap, HashMap, hash_map::Entry}; +use std::fs; +use std::path::{Path, PathBuf}; +use std::process::Command; + +use anyhow::{Context, Result, anyhow}; +use cargo_metadata::{Metadata, MetadataCommand, Package}; +use serde::{Deserialize, Serialize}; + +const STATE_DIR: &str = ".devspace"; +const STATE_FILE: &str = ".devspace/state.json"; +const PATCH_BEGIN_MARKER: &str = "# >>> devspace patches >>>"; +const PATCH_END_MARKER: &str = "# <<< devspace patches <<<"; +const CRATES_IO_SOURCE_KEY: &str = "crates-io"; + +const DEVSPACE_REPOS: &[&str] = &[ + "arm_vcpu", + "arm_vgic", + "axaddrspace", + "axdevice_base", + "x86_vcpu", + "x86_vlapic", +]; + +const DEVSPACE_REPO_OVERRIDES: &[(&str, &str)] = &[( + "axdevice_base", + "https://github.com/arceos-hypervisor/axdevice_base.git", +)]; + +pub fn start() -> Result<()> { + let metadata = MetadataCommand::new() + .exec() + .context("Failed to run cargo metadata")?; + let repos = resolve_dev_repos(&metadata)?; + + let mut state = load_state()?; + ensure_submodules(&mut state, &repos)?; + save_state(&state)?; + + let specs = compute_patch_specs(&metadata, &repos)?; + apply_patches(&specs)?; + + state.patches = specs + .into_iter() + .map(|spec| PatchRecord { + source: spec.source, + crate_name: spec.crate_name, + }) + .collect(); + save_state(&state)?; + + println!("devspace start completed"); + Ok(()) +} + +pub fn stop() -> Result<()> { + let mut state = load_state()?; + + if !state.patches.is_empty() { + remove_patches(&state.patches)?; + state.patches.clear(); + } + + if !state.modules.is_empty() { + remove_submodules(&state.modules)?; + state.modules.clear(); + } + + save_state(&state)?; + println!("devspace stop completed"); + Ok(()) +} + +fn ensure_submodules(state: &mut DevspaceState, repos: &[DevRepo]) -> Result<()> { + for repo in repos { + let dest_path = Path::new(&repo.dest); + if dest_path.exists() { + continue; + } + + println!("Adding submodule {} -> {}", repo.git_url, repo.dest); + run_git(&[ + "submodule", + "add", + "--force", + repo.git_url.as_str(), + repo.dest.as_str(), + ])?; + run_git(&[ + "submodule", + "update", + "--init", + "--recursive", + repo.dest.as_str(), + ])?; + match state.modules.entry(repo.name.clone()) { + Entry::Occupied(mut entry) => { + entry.get_mut().path = repo.dest.clone(); + } + Entry::Vacant(entry) => { + entry.insert(ManagedModule { + name: repo.name.clone(), + path: repo.dest.clone(), + }); + } + } + } + + Ok(()) +} + +fn remove_submodules(modules: &HashMap) -> Result<()> { + for module in modules.values() { + println!("Removing submodule {}", module.path); + let path = module.path.as_str(); + let _ = run_git(&["submodule", "deinit", "-f", "--", path]); + let git_modules_dir = Path::new(".git/modules").join(path); + if git_modules_dir.exists() { + fs::remove_dir_all(&git_modules_dir) + .with_context(|| format!("Failed to remove {:?}", git_modules_dir))?; + } + if Path::new(path).exists() { + let _ = run_git(&["rm", "-f", "--", path]); + if Path::new(path).exists() { + fs::remove_dir_all(path).with_context(|| format!("Failed to remove {}", path))?; + } + } + } + Ok(()) +} + +fn compute_patch_specs(metadata: &Metadata, repos: &[DevRepo]) -> Result> { + let repo_map = build_repo_lookup(repos); + let mut specs = BTreeMap::new(); + + for pkg in &metadata.packages { + if let Some(spec) = package_patch_spec(&pkg, &repo_map) { + specs + .entry((spec.source.clone(), spec.crate_name.clone())) + .or_insert(spec); + } + } + + for repo in repos { + if !specs.contains_key(&(repo.source.clone(), repo.name.clone())) { + return Err(anyhow!( + "Failed to prepare patch for crate {} (source: {})", + repo.name, + repo.source + )); + } + } + + Ok(specs.into_values().collect()) +} + +fn package_patch_spec<'a>( + pkg: &Package, + repo_map: &HashMap, +) -> Option { + let source_raw = pkg.source.as_ref()?.to_string(); + let normalized = normalize_source(&source_raw)?; + let key = repo_lookup_key(&normalized, pkg.name.as_str()); + let repo = repo_map.get(&key)?; + let manifest = Path::new(pkg.manifest_path.as_str()); + let local_path = manifest_relative_dir(manifest) + .map(|subdir| Path::new(&repo.dest).join(subdir)) + .unwrap_or_else(|| PathBuf::from(&repo.dest)); + Some(PatchSpec { + source: repo.source.to_string(), + crate_name: pkg.name.to_string(), + path: to_unix_path(&local_path), + }) +} + +fn apply_patches(specs: &[PatchSpec]) -> Result<()> { + if specs.is_empty() { + println!("No git dependencies matched managed repos; skipping patch stage"); + return Ok(()); + } + + let config_path = Path::new(".cargo/config.toml"); + let mut contents = if config_path.exists() { + fs::read_to_string(config_path) + .with_context(|| format!("Failed to read {:?}", config_path))? + } else { + String::new() + }; + + let (cleaned, _) = strip_devspace_section(&contents); + contents = cleaned; + + if !contents.is_empty() && !contents.ends_with('\n') { + contents.push('\n'); + } + if !contents.is_empty() && !contents.ends_with("\n\n") { + contents.push('\n'); + } + + contents.push_str(&render_devspace_section(specs)); + if !contents.ends_with('\n') { + contents.push('\n'); + } + + fs::write(config_path, contents) + .with_context(|| format!("Failed to write {:?}", config_path))?; + Ok(()) +} + +fn remove_patches(_: &[PatchRecord]) -> Result<()> { + let config_path = Path::new(".cargo/config.toml"); + if !config_path.exists() { + return Ok(()); + } + + let original = fs::read_to_string(config_path) + .with_context(|| format!("Failed to read {:?}", config_path))?; + let (cleaned, removed) = strip_devspace_section(&original); + + if removed { + fs::write(config_path, cleaned) + .with_context(|| format!("Failed to write {:?}", config_path))?; + } + Ok(()) +} + +fn load_state() -> Result { + let path = Path::new(STATE_FILE); + if !path.exists() { + return Ok(DevspaceState::default()); + } + + let contents = + fs::read_to_string(path).with_context(|| format!("Failed to read {:?}", path))?; + let state = + serde_json::from_str(&contents).with_context(|| format!("Failed to parse {:?}", path))?; + Ok(state) +} + +fn save_state(state: &DevspaceState) -> Result<()> { + fs::create_dir_all(STATE_DIR).context("Failed to create devspace state dir")?; + let data = serde_json::to_string_pretty(state)?; + fs::write(STATE_FILE, data).context("Failed to write devspace state")?; + Ok(()) +} + +fn resolve_dev_repos(metadata: &Metadata) -> Result> { + let override_map: HashMap<&str, &str> = DEVSPACE_REPO_OVERRIDES.iter().copied().collect(); + + DEVSPACE_REPOS + .iter() + .map(|crate_name| { + let override_url = override_map.get(*crate_name).copied(); + + let matches: Vec<&Package> = metadata + .packages + .iter() + .filter(|pkg| pkg.name == *crate_name) + .collect(); + + if matches.is_empty() { + return Err(anyhow!( + "crate {} not found in workspace metadata", + crate_name + )); + } + + let pkg = matches + .iter() + .copied() + .find(|pkg| { + pkg.source + .as_ref() + .map(|src| src.to_string().starts_with("git+")) + .unwrap_or(false) + }) + .unwrap_or(*matches.first().unwrap()); + + let source_raw = pkg + .source + .as_ref() + .map(|s| s.to_string()) + .ok_or_else(|| anyhow!("crate {} has no source information", crate_name))?; + + let (patch_source, git_url) = if source_raw.starts_with("git+") { + let normalized = normalize_source(&source_raw).ok_or_else(|| { + anyhow!( + "crate {} has unsupported source {}", + crate_name, + source_raw.clone() + ) + })?; + let git_url = extract_git_url(&source_raw).ok_or_else(|| { + anyhow!( + "crate {} has unsupported source {}", + crate_name, + source_raw.clone() + ) + })?; + (normalized, git_url) + } else if source_raw == "registry+https://github.com/rust-lang/crates.io-index" { + let repo_url = if let Some(url) = pkg.repository.clone() { + url + } else if let Some(url) = override_url { + println!( + "crate {} is missing repository metadata; using override {}", + crate_name, url + ); + url.to_string() + } else { + return Err(anyhow!( + "crate {} is from crates.io but missing repository metadata", + crate_name + )); + }; + (CRATES_IO_SOURCE_KEY.to_string(), repo_url) + } else { + return Err(anyhow!( + "crate {} uses unsupported source {}", + crate_name, + source_raw + )); + }; + + Ok(DevRepo { + name: crate_name.to_string(), + git_url, + source: patch_source, + dest: format!("modules/{}", crate_name), + }) + }) + .collect() +} + +fn build_repo_lookup<'a>(repos: &'a [DevRepo]) -> HashMap { + repos + .iter() + .map(|repo| (repo_lookup_key(&repo.source, &repo.name), repo)) + .collect() +} + +fn manifest_relative_path(path: &Path) -> Option { + let components: Vec<_> = path.components().collect(); + let idx = components + .iter() + .position(|comp| comp.as_os_str() == "checkouts")?; + if idx + 3 >= components.len() { + return None; + } + let mut rel = PathBuf::new(); + for comp in &components[idx + 3..] { + rel.push(comp.as_os_str()); + } + Some(rel) +} + +fn manifest_relative_dir(path: &Path) -> Option { + let mut rel = manifest_relative_path(path)?; + if rel.pop() { + Some(rel) + } else { + Some(PathBuf::new()) + } +} + +fn normalize_source(raw: &str) -> Option { + if raw.starts_with("git+") { + let trimmed = &raw[4..]; + let no_fragment = trimmed.split('#').next().unwrap_or(trimmed); + let no_query = no_fragment.split('?').next().unwrap_or(no_fragment); + let without_git = no_query.trim_end_matches(".git"); + let normalized = without_git.trim_end_matches('/'); + Some(normalized.to_string()) + } else if raw == "registry+https://github.com/rust-lang/crates.io-index" { + Some(CRATES_IO_SOURCE_KEY.to_string()) + } else { + None + } +} + +fn extract_git_url(raw: &str) -> Option { + if !raw.starts_with("git+") { + return None; + } + let trimmed = &raw[4..]; + let no_query = trimmed.split('?').next().unwrap_or(trimmed); + let no_fragment = no_query.split('#').next().unwrap_or(no_query); + Some(no_fragment.to_string()) +} + +fn render_devspace_section(specs: &[PatchSpec]) -> String { + let mut grouped: BTreeMap> = BTreeMap::new(); + for spec in specs { + grouped + .entry(spec.source.clone()) + .or_default() + .insert(spec.crate_name.clone(), spec.path.clone()); + } + + let mut section = String::new(); + section.push_str(PATCH_BEGIN_MARKER); + section.push('\n'); + section.push_str("# Managed by `cargo xtask devspace`"); + section.push('\n'); + + let mut iter = grouped.iter().peekable(); + while let Some((source, crates)) = iter.next() { + section.push_str(&format!("[patch.\"{}\"]\n", source)); + for (crate_name, path) in crates { + section.push_str(&format!("{} = {{ path = \"{}\" }}\n", crate_name, path)); + } + if iter.peek().is_some() { + section.push('\n'); + } + } + + section.push('\n'); + section.push_str(PATCH_END_MARKER); + section.push('\n'); + section +} + +fn strip_devspace_section(contents: &str) -> (String, bool) { + if let Some(start_idx) = contents.find(PATCH_BEGIN_MARKER) { + if let Some(end_rel) = contents[start_idx..].find(PATCH_END_MARKER) { + let end_idx = start_idx + end_rel + PATCH_END_MARKER.len(); + let mut removal_end = end_idx; + let tail = &contents[removal_end..]; + if tail.starts_with("\r\n") { + removal_end += 2; + } else if tail.starts_with('\n') { + removal_end += 1; + } + let mut result = String::with_capacity(contents.len()); + result.push_str(&contents[..start_idx]); + result.push_str(&contents[removal_end..]); + return (result, true); + } + } + (contents.to_string(), false) +} + +fn to_unix_path(path: &Path) -> String { + path.to_string_lossy().replace('\\', "/") +} + +fn run_git(args: &[&str]) -> Result<()> { + let status = Command::new("git") + .current_dir(workspace_root()?) + .args(args) + .status() + .with_context(|| format!("Failed to run git {}", args.join(" ")))?; + if !status.success() { + return Err(anyhow!("git command failed: git {}", args.join(" "))); + } + Ok(()) +} + +fn workspace_root() -> Result { + std::env::current_dir().context("Failed to resolve workspace root") +} + +#[derive(Default, Serialize, Deserialize)] +struct DevspaceState { + modules: HashMap, + patches: Vec, +} + +#[derive(Clone, Serialize, Deserialize)] +struct ManagedModule { + name: String, + path: String, +} + +#[derive(Clone, Serialize, Deserialize)] +struct PatchRecord { + source: String, + crate_name: String, +} + +#[derive(Clone)] +struct PatchSpec { + source: String, + crate_name: String, + path: String, +} + +#[derive(Clone)] +struct DevRepo { + name: String, + git_url: String, + source: String, + dest: String, +} + +fn repo_lookup_key(source: &str, crate_name: &str) -> String { + format!("{}::{}", source, crate_name) +} diff --git a/xtask/src/image.rs b/xtask/src/image.rs index 66f4d08c..b180b20d 100644 --- a/xtask/src/image.rs +++ b/xtask/src/image.rs @@ -21,16 +21,17 @@ use anyhow::{Result, anyhow}; use clap::{Parser, Subcommand}; -use sha2::{Sha256, Digest}; -use std::path::{Path}; -use std::process::Command; -use std::fs; +use sha2::{Digest, Sha256}; use std::env; +use std::fs; use std::io::Read; +use std::path::Path; +use std::process::Command; use tokio::io::{AsyncWriteExt, BufWriter}; /// Base URL for downloading images -const IMAGE_URL_BASE: &str = "https://github.com/arceos-hypervisor/axvisor-guest/releases/download/v0.0.20/"; +const IMAGE_URL_BASE: &str = + "https://github.com/arceos-hypervisor/axvisor-guest/releases/download/v0.0.20/"; /// Image management command line arguments. #[derive(Parser)] @@ -50,13 +51,15 @@ pub enum ImageCommands { image_name: String, #[arg(short, long)] output_dir: Option, - #[arg(short, long, help = "Automatically extract after download (default: true)")] + #[arg( + short, + long, + help = "Automatically extract after download (default: true)" + )] extract: Option, }, /// Remove the specified image from temp directory - Rm { - image_name: String, - }, + Rm { image_name: String }, } /// Representation of a guest image @@ -76,42 +79,42 @@ impl Image { sha256: "c9f197408f14f2cd9d3b9d2e077a9e91d233479713cb24d5280f7dc5562ae800", arch: "aarch64", }; - + pub const EVM3588_LINUX: Self = Self { name: "evm3588_linux", description: "Linux for EVM3588 development board", sha256: "cc12be121e75b0eb6588a774106582ee7c7b279895d73558f31ce34712a8fea3", arch: "aarch64", }; - + pub const ORANGEPI_ARCEOS: Self = Self { name: "orangepi_arceos", description: "ArceOS for Orange Pi development board", sha256: "2a95477e1e18d9ca95f666de93cd8ba53ffafb3f285fbdf4fde1e0cdfb0d8f1d", arch: "aarch64", }; - + pub const ORANGEPI_LINUX: Self = Self { name: "orangepi_linux", description: "Linux for Orange Pi development board", sha256: "7a1fd69f10dd223988c436ea461bed15ddae4351fc7a47fb7b3fee9792afac86", arch: "aarch64", }; - + pub const PHYTIUMPI_ARCEOS: Self = Self { name: "phytiumpi_arceos", description: "ArceOS for Phytium Pi development board", sha256: "c774824e36319f2f20575e488861a61c6ef7a5d2e5f219edd03a2c3c29ca3d05", arch: "aarch64", }; - + pub const PHYTIUMPI_LINUX: Self = Self { name: "phytiumpi_linux", description: "Linux for Phytium Pi development board", sha256: "78a27021b76b6d20a5420938473cf92ac59dc4674d528295b75ecfabdf9bea69", arch: "aarch64", }; - + pub const QEMU_AARCH64_ARCEOS: Self = Self { name: "qemu_aarch64_arceos", description: "ArceOS for QEMU aarch64 virtualization", @@ -153,7 +156,7 @@ impl Image { sha256: "064f75df290905687221b2554dd4e4efc077a6a95cafcbf7f98e2181441c24e3", arch: "riscv64", }; - + pub const QEMU_X86_64_ARCEOS: Self = Self { name: "qemu_x86_64_arceos", description: "ArceOS for QEMU x86_64 virtualization", @@ -174,35 +177,35 @@ impl Image { sha256: "55d73898f9f98fca80e15387b1e5149ba6bbf74d3631281ea1ece75de3529078", arch: "x86_64", }; - + pub const ROC_RK3568_PC_ARCEOS: Self = Self { name: "roc-rk3568-pc_arceos", description: "ArceOS for ROC-RK3568-PC development board", sha256: "4dd2f727c2a46ff1e64632616c308c9504ef5ddb4b519acf3f69c928e4475ca7", arch: "aarch64", }; - + pub const ROC_RK3568_PC_LINUX: Self = Self { name: "roc-rk3568-pc_linux", description: "Linux for ROC-RK3568-PC development board", sha256: "73feb8b84473603252dbadc4c81446f9a68098bd899fd524ec26f68761a35cf8", arch: "aarch64", }; - + pub const TAC_E400_PLC_ARCEOS: Self = Self { name: "tac-e400-plc_arceos", description: "ArceOS for TAC-E400-PLC industrial control board", sha256: "a2504506c81871c84ba421a94f77028f067c5589886f37c0c389a545d7e57aeb", arch: "aarch64", }; - + pub const TAC_E400_PLC_LINUX: Self = Self { name: "tac-e400-plc_linux", description: "Linux for TAC-E400-PLC industrial control board", sha256: "920743161a73da228e714d71f55d8ba77b91ed37092d4f80e774f4e809b34403", arch: "aarch64", }; - + /// Get all supported images pub fn all() -> &'static [Image] { &[ @@ -227,7 +230,7 @@ impl Image { Self::TAC_E400_PLC_LINUX, ] } - + /// Find image by name pub fn find_by_name(name: &str) -> Option<&'static Image> { Self::all().iter().find(|image| image.name == name) @@ -246,7 +249,7 @@ fn image_verify_sha256(file_path: &Path, expected_sha256: &str) -> Result let mut file = fs::File::open(file_path)?; let mut hasher = Sha256::new(); let mut buffer = [0; 8192]; - + loop { let bytes_read = file.read(&mut buffer)?; if bytes_read == 0 { @@ -254,10 +257,10 @@ fn image_verify_sha256(file_path: &Path, expected_sha256: &str) -> Result } hasher.update(&buffer[..bytes_read]); } - + let result = hasher.finalize(); let actual_sha256 = format!("{result:x}"); - + Ok(actual_sha256 == expected_sha256) } @@ -274,23 +277,28 @@ fn image_verify_sha256(file_path: &Path, expected_sha256: &str) -> Result fn image_list() -> Result<()> { // Retrieve all images from the database or storage let images = Image::all(); - + // Print table headers with specific column widths - println!("{:<25} {:<30} {:<50}", "Name", "Architecture", "Description"); + println!( + "{:<25} {:<30} {:<50}", + "Name", "Architecture", "Description" + ); // Print a separator line for better readability println!("{}", "-".repeat(90)); - + // Iterate through each image and print its details for image in images { // Print image information formatted to match column widths - println!("{:<25} {:<15} {:<50}", - // Image name - image.name, - // Architecture type - image.arch, - image.description); + println!( + "{:<25} {:<15} {:<50}", + // Image name + image.name, + // Architecture type + image.arch, + image.description + ); } - + Ok(()) } @@ -311,9 +319,10 @@ fn image_list() -> Result<()> { /// xtask image pull evm3588_arceos --output-dir ./images /// ``` async fn image_download(image_name: &str, output_dir: Option, extract: bool) -> Result<()> { - let image = Image::find_by_name(image_name) - .ok_or_else(|| anyhow!("Image not found: {image_name}. Use 'xtask image ls' to view available images"))?; - + let image = Image::find_by_name(image_name).ok_or_else(|| { + anyhow!("Image not found: {image_name}. Use 'xtask image ls' to view available images") + })?; + let output_path = match output_dir { Some(dir) => { // Check if it's an absolute path @@ -330,10 +339,12 @@ async fn image_download(image_name: &str, output_dir: Option, extract: b None => { // If not specified, use system temporary directory let temp_dir = env::temp_dir(); - temp_dir.join("axvisor").join(format!("{image_name}.tar.gz")) + temp_dir + .join("axvisor") + .join(format!("{image_name}.tar.gz")) } }; - + // Check if file exists, if so verify SHA256 if output_path.exists() { match image_verify_sha256(&output_path, image.sha256) { @@ -353,22 +364,25 @@ async fn image_download(image_name: &str, output_dir: Option, extract: b } } } - + // Ensure target directory exists if let Some(parent) = output_path.parent() { fs::create_dir_all(parent)?; } - + // Build download URL let download_url = format!("{}{}.tar.gz", IMAGE_URL_BASE, image.name); println!("Downloading: {download_url}"); - + // Use reqwest to download the file let mut response = reqwest::get(&download_url).await?; if !response.status().is_success() { - return Err(anyhow!("Failed to download file: HTTP {}", response.status())); + return Err(anyhow!( + "Failed to download file: HTTP {}", + response.status() + )); } - + // Create file with buffered writer for efficient streaming let file = tokio::fs::OpenOptions::new() .write(true) @@ -377,35 +391,41 @@ async fn image_download(image_name: &str, output_dir: Option, extract: b .open(&output_path) .await?; let mut writer = BufWriter::new(file); - + // Get content length for progress reporting (if available) let content_length = response.content_length(); let mut downloaded = 0u64; - + // Stream the response body to file using chunks while let Some(chunk) = response.chunk().await? { // Write chunk to file - writer.write_all(&chunk).await + writer + .write_all(&chunk) + .await .map_err(|e| anyhow!("Error writing to file: {}", e))?; - + // Update progress downloaded += chunk.len() as u64; if let Some(total) = content_length { let percent = (downloaded * 100) / total; - print!("\rDownloading: {}% ({}/{} bytes)", percent, downloaded, total); + print!( + "\rDownloading: {}% ({}/{} bytes)", + percent, downloaded, total + ); } else { print!("\rDownloaded: {} bytes", downloaded); } std::io::Write::flush(&mut std::io::stdout()).unwrap(); } - + // Flush the writer to ensure all data is written to disk - writer.flush().await + writer + .flush() + .await .map_err(|e| anyhow!("Error flushing file: {}", e))?; - + println!("\nDownload completed"); - - + // Verify downloaded file match image_verify_sha256(&output_path, image.sha256) { Ok(true) => { @@ -422,19 +442,20 @@ async fn image_download(image_name: &str, output_dir: Option, extract: b return Err(anyhow!("Error verifying downloaded file: {e}")); } } - + // If extract flag is true, extract the downloaded file if extract { println!("Extracting image..."); - + // Determine extraction output directory - let extract_dir = output_path.parent() + let extract_dir = output_path + .parent() .ok_or_else(|| anyhow!("Unable to determine parent directory of downloaded file"))? .join(image_name); - + // Ensure extraction directory exists fs::create_dir_all(&extract_dir)?; - + // Use tar command to extract file let mut child = Command::new("tar") .arg("-xzf") @@ -442,15 +463,15 @@ async fn image_download(image_name: &str, output_dir: Option, extract: b .arg("-C") .arg(&extract_dir) .spawn()?; - + let status = child.wait()?; if !status.success() { return Err(anyhow!("Extraction failed, tar exit code: {status}")); } - + println!("Image extracted to: {}", extract_dir.display()); } - + Ok(()) } @@ -468,33 +489,34 @@ async fn image_download(image_name: &str, output_dir: Option, extract: b /// ``` fn image_remove(image_name: &str) -> Result<()> { // Check if the image name is valid by looking it up - let _image = Image::find_by_name(image_name) - .ok_or_else(|| anyhow!("Image not found: {image_name}. Use 'xtask image ls' to view available images"))?; - + let _image = Image::find_by_name(image_name).ok_or_else(|| { + anyhow!("Image not found: {image_name}. Use 'xtask image ls' to view available images") + })?; + let temp_dir = env::temp_dir().join("axvisor"); let tar_file = temp_dir.join(format!("{image_name}.tar.gz")); let extract_dir = temp_dir.join(image_name); - + let mut removed = false; - + // Remove the tar file if it exists if tar_file.exists() { fs::remove_file(&tar_file)?; removed = true; } - + // Remove the extracted directory if it exists if extract_dir.exists() { fs::remove_dir_all(&extract_dir)?; removed = true; } - + if !removed { println!("No files found for image: {image_name}"); } else { println!("Image removed successfully"); } - + Ok(()) } @@ -519,13 +541,17 @@ pub async fn run_image(args: ImageArgs) -> Result<()> { ImageCommands::Ls => { image_list()?; } - ImageCommands::Download { image_name, output_dir, extract } => { + ImageCommands::Download { + image_name, + output_dir, + extract, + } => { image_download(&image_name, output_dir, extract.unwrap_or(true)).await?; } ImageCommands::Rm { image_name } => { image_remove(&image_name)?; } } - + Ok(()) } diff --git a/xtask/src/main.rs b/xtask/src/main.rs index ccc3bf17..0a398c8c 100644 --- a/xtask/src/main.rs +++ b/xtask/src/main.rs @@ -4,13 +4,14 @@ use anyhow::{Context, Result, anyhow}; use chrono::Utc; -use clap::{Parser, Subcommand}; +use clap::{Args, Parser, Subcommand}; use std::fs; use std::path::{Path, PathBuf}; mod cargo; mod clippy; mod ctx; +mod devspace; mod image; mod menuconfig; mod tbuld; @@ -40,6 +41,8 @@ enum Commands { Menuconfig, /// Guest Image management Image(image::ImageArgs), + /// Manage local devspace dependencies + Devspace(DevspaceArgs), } #[derive(Parser)] @@ -84,6 +87,18 @@ struct UbootArgs { vmconfigs: Vec, } +#[derive(Args)] +struct DevspaceArgs { + #[command(subcommand)] + action: DevspaceCommand, +} + +#[derive(Subcommand)] +enum DevspaceCommand { + Start, + Stop, +} + #[tokio::main] async fn main() -> Result<()> { let cli = Cli::parse(); @@ -121,6 +136,10 @@ async fn main() -> Result<()> { Commands::Image(args) => { image::run_image(args).await?; } + Commands::Devspace(args) => match args.action { + DevspaceCommand::Start => devspace::start()?, + DevspaceCommand::Stop => devspace::stop()?, + }, } Ok(()) From eb8d7dac0048e0056b2e457fcd9f68375c10ea40 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=91=A8=E7=9D=BF?= Date: Fri, 28 Nov 2025 13:19:24 +0800 Subject: [PATCH 2/2] refactor: simplify error messages and string formatting in devspace and image modules --- xtask/src/devspace.rs | 81 +++++++++++++++++++------------------------ xtask/src/image.rs | 11 +++--- xtask/src/tbuld.rs | 2 +- 3 files changed, 40 insertions(+), 54 deletions(-) diff --git a/xtask/src/devspace.rs b/xtask/src/devspace.rs index feae20b6..0bcaae63 100644 --- a/xtask/src/devspace.rs +++ b/xtask/src/devspace.rs @@ -117,12 +117,12 @@ fn remove_submodules(modules: &HashMap) -> Result<()> { let git_modules_dir = Path::new(".git/modules").join(path); if git_modules_dir.exists() { fs::remove_dir_all(&git_modules_dir) - .with_context(|| format!("Failed to remove {:?}", git_modules_dir))?; + .with_context(|| format!("Failed to remove {git_modules_dir:?}"))?; } if Path::new(path).exists() { let _ = run_git(&["rm", "-f", "--", path]); if Path::new(path).exists() { - fs::remove_dir_all(path).with_context(|| format!("Failed to remove {}", path))?; + fs::remove_dir_all(path).with_context(|| format!("Failed to remove {path}"))?; } } } @@ -134,7 +134,7 @@ fn compute_patch_specs(metadata: &Metadata, repos: &[DevRepo]) -> Result Result( - pkg: &Package, - repo_map: &HashMap, -) -> Option { +fn package_patch_spec(pkg: &Package, repo_map: &HashMap) -> Option { let source_raw = pkg.source.as_ref()?.to_string(); let normalized = normalize_source(&source_raw)?; let key = repo_lookup_key(&normalized, pkg.name.as_str()); @@ -182,7 +179,7 @@ fn apply_patches(specs: &[PatchSpec]) -> Result<()> { let config_path = Path::new(".cargo/config.toml"); let mut contents = if config_path.exists() { fs::read_to_string(config_path) - .with_context(|| format!("Failed to read {:?}", config_path))? + .with_context(|| format!("Failed to read {config_path:?}"))? } else { String::new() }; @@ -202,8 +199,7 @@ fn apply_patches(specs: &[PatchSpec]) -> Result<()> { contents.push('\n'); } - fs::write(config_path, contents) - .with_context(|| format!("Failed to write {:?}", config_path))?; + fs::write(config_path, contents).with_context(|| format!("Failed to write {config_path:?}"))?; Ok(()) } @@ -214,12 +210,12 @@ fn remove_patches(_: &[PatchRecord]) -> Result<()> { } let original = fs::read_to_string(config_path) - .with_context(|| format!("Failed to read {:?}", config_path))?; + .with_context(|| format!("Failed to read {config_path:?}"))?; let (cleaned, removed) = strip_devspace_section(&original); if removed { fs::write(config_path, cleaned) - .with_context(|| format!("Failed to write {:?}", config_path))?; + .with_context(|| format!("Failed to write {config_path:?}"))?; } Ok(()) } @@ -230,10 +226,9 @@ fn load_state() -> Result { return Ok(DevspaceState::default()); } - let contents = - fs::read_to_string(path).with_context(|| format!("Failed to read {:?}", path))?; + let contents = fs::read_to_string(path).with_context(|| format!("Failed to read {path:?}"))?; let state = - serde_json::from_str(&contents).with_context(|| format!("Failed to parse {:?}", path))?; + serde_json::from_str(&contents).with_context(|| format!("Failed to parse {path:?}"))?; Ok(state) } @@ -260,8 +255,7 @@ fn resolve_dev_repos(metadata: &Metadata) -> Result> { if matches.is_empty() { return Err(anyhow!( - "crate {} not found in workspace metadata", - crate_name + "crate {crate_name} not found in workspace metadata" )); } @@ -280,7 +274,7 @@ fn resolve_dev_repos(metadata: &Metadata) -> Result> { .source .as_ref() .map(|s| s.to_string()) - .ok_or_else(|| anyhow!("crate {} has no source information", crate_name))?; + .ok_or_else(|| anyhow!("crate {crate_name} has no source information"))?; let (patch_source, git_url) = if source_raw.starts_with("git+") { let normalized = normalize_source(&source_raw).ok_or_else(|| { @@ -303,22 +297,18 @@ fn resolve_dev_repos(metadata: &Metadata) -> Result> { url } else if let Some(url) = override_url { println!( - "crate {} is missing repository metadata; using override {}", - crate_name, url + "crate {crate_name} is missing repository metadata; using override {url}" ); url.to_string() } else { return Err(anyhow!( - "crate {} is from crates.io but missing repository metadata", - crate_name + "crate {crate_name} is from crates.io but missing repository metadata" )); }; (CRATES_IO_SOURCE_KEY.to_string(), repo_url) } else { return Err(anyhow!( - "crate {} uses unsupported source {}", - crate_name, - source_raw + "crate {crate_name} uses unsupported source {source_raw}" )); }; @@ -326,13 +316,13 @@ fn resolve_dev_repos(metadata: &Metadata) -> Result> { name: crate_name.to_string(), git_url, source: patch_source, - dest: format!("modules/{}", crate_name), + dest: format!("modules/{crate_name}"), }) }) .collect() } -fn build_repo_lookup<'a>(repos: &'a [DevRepo]) -> HashMap { +fn build_repo_lookup(repos: &[DevRepo]) -> HashMap { repos .iter() .map(|repo| (repo_lookup_key(&repo.source, &repo.name), repo)) @@ -364,8 +354,7 @@ fn manifest_relative_dir(path: &Path) -> Option { } fn normalize_source(raw: &str) -> Option { - if raw.starts_with("git+") { - let trimmed = &raw[4..]; + if let Some(trimmed) = raw.strip_prefix("git+") { let no_fragment = trimmed.split('#').next().unwrap_or(trimmed); let no_query = no_fragment.split('?').next().unwrap_or(no_fragment); let without_git = no_query.trim_end_matches(".git"); @@ -405,9 +394,9 @@ fn render_devspace_section(specs: &[PatchSpec]) -> String { let mut iter = grouped.iter().peekable(); while let Some((source, crates)) = iter.next() { - section.push_str(&format!("[patch.\"{}\"]\n", source)); + section.push_str(&format!("[patch.\"{source}\"]\n")); for (crate_name, path) in crates { - section.push_str(&format!("{} = {{ path = \"{}\" }}\n", crate_name, path)); + section.push_str(&format!("{crate_name} = {{ path = \"{path}\" }}\n")); } if iter.peek().is_some() { section.push('\n'); @@ -421,21 +410,21 @@ fn render_devspace_section(specs: &[PatchSpec]) -> String { } fn strip_devspace_section(contents: &str) -> (String, bool) { - if let Some(start_idx) = contents.find(PATCH_BEGIN_MARKER) { - if let Some(end_rel) = contents[start_idx..].find(PATCH_END_MARKER) { - let end_idx = start_idx + end_rel + PATCH_END_MARKER.len(); - let mut removal_end = end_idx; - let tail = &contents[removal_end..]; - if tail.starts_with("\r\n") { - removal_end += 2; - } else if tail.starts_with('\n') { - removal_end += 1; - } - let mut result = String::with_capacity(contents.len()); - result.push_str(&contents[..start_idx]); - result.push_str(&contents[removal_end..]); - return (result, true); + if let Some(start_idx) = contents.find(PATCH_BEGIN_MARKER) + && let Some(end_rel) = contents[start_idx..].find(PATCH_END_MARKER) + { + let end_idx = start_idx + end_rel + PATCH_END_MARKER.len(); + let mut removal_end = end_idx; + let tail = &contents[removal_end..]; + if tail.starts_with("\r\n") { + removal_end += 2; + } else if tail.starts_with('\n') { + removal_end += 1; } + let mut result = String::with_capacity(contents.len()); + result.push_str(&contents[..start_idx]); + result.push_str(&contents[removal_end..]); + return (result, true); } (contents.to_string(), false) } @@ -494,5 +483,5 @@ struct DevRepo { } fn repo_lookup_key(source: &str, crate_name: &str) -> String { - format!("{}::{}", source, crate_name) + format!("{source}::{crate_name}") } diff --git a/xtask/src/image.rs b/xtask/src/image.rs index b180b20d..6cfcbfe5 100644 --- a/xtask/src/image.rs +++ b/xtask/src/image.rs @@ -402,18 +402,15 @@ async fn image_download(image_name: &str, output_dir: Option, extract: b writer .write_all(&chunk) .await - .map_err(|e| anyhow!("Error writing to file: {}", e))?; + .map_err(|e| anyhow!("Error writing to file: {e}"))?; // Update progress downloaded += chunk.len() as u64; if let Some(total) = content_length { let percent = (downloaded * 100) / total; - print!( - "\rDownloading: {}% ({}/{} bytes)", - percent, downloaded, total - ); + print!("\rDownloading: {percent}% ({downloaded}/{total} bytes)"); } else { - print!("\rDownloaded: {} bytes", downloaded); + print!("\rDownloaded: {downloaded} bytes"); } std::io::Write::flush(&mut std::io::stdout()).unwrap(); } @@ -422,7 +419,7 @@ async fn image_download(image_name: &str, output_dir: Option, extract: b writer .flush() .await - .map_err(|e| anyhow!("Error flushing file: {}", e))?; + .map_err(|e| anyhow!("Error flushing file: {e}"))?; println!("\nDownload completed"); diff --git a/xtask/src/tbuld.rs b/xtask/src/tbuld.rs index bcb9a555..f9e0ec79 100644 --- a/xtask/src/tbuld.rs +++ b/xtask/src/tbuld.rs @@ -79,7 +79,7 @@ impl Context { if !vm_config_paths.is_empty() { let value = std::env::join_paths(&vm_config_paths) - .map_err(|e| anyhow::anyhow!("Failed to join VM config paths: {}", e))? + .map_err(|e| anyhow::anyhow!("Failed to join VM config paths: {e}"))? .to_string_lossy() .into_owned(); cargo.env.insert("AXVISOR_VM_CONFIGS".to_string(), value);