|
| 1 | +use std::collections::BTreeMap; |
| 2 | +use std::path::{Path, PathBuf}; |
| 3 | +use std::io::{prelude::*, SeekFrom}; |
| 4 | +use std::time::SystemTime; |
| 5 | + |
| 6 | +use clap::ValueHint; |
| 7 | +use serde::de::{self, Deserialize}; |
| 8 | +use eyre_span::emit; |
| 9 | + |
| 10 | +use themelios_archive::dirdat::{self, DirEntry, Name}; |
| 11 | + |
| 12 | +#[derive(Debug, Clone, clap::Args)] |
| 13 | +#[command(arg_required_else_help = true)] |
| 14 | +pub struct Command { |
| 15 | + /// Directory to place resulting .dir/.dat in |
| 16 | + #[clap(long, short, value_hint = ValueHint::DirPath)] |
| 17 | + output: Option<PathBuf>, |
| 18 | + |
| 19 | + /// The .json indexes to reconstruct |
| 20 | + #[clap(value_hint = ValueHint::FilePath, required = true)] |
| 21 | + json_file: Vec<PathBuf>, |
| 22 | +} |
| 23 | + |
| 24 | +#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] |
| 25 | +struct FileId(u16); |
| 26 | + |
| 27 | +#[derive(Debug, Clone, serde::Deserialize)] |
| 28 | +#[serde(remote = "Entry")] |
| 29 | +struct Entry { |
| 30 | + path: Option<PathBuf>, |
| 31 | + name: Option<String>, |
| 32 | + #[serde(default, deserialize_with="parse_compress_mode")] |
| 33 | + compress: Option<bzip::CompressMode>, |
| 34 | + reserve: Option<usize>, |
| 35 | + #[serde(default)] |
| 36 | + unknown1: u32, |
| 37 | + #[serde(default)] |
| 38 | + unknown2: usize, |
| 39 | +} |
| 40 | + |
| 41 | +pub fn run(cmd: &Command) -> eyre::Result<()> { |
| 42 | + for json_file in &cmd.json_file { |
| 43 | + emit(create(cmd, json_file)); |
| 44 | + } |
| 45 | + Ok(()) |
| 46 | +} |
| 47 | + |
| 48 | +#[tracing::instrument(skip_all, fields(path=%json_file.display(), out))] |
| 49 | +fn create(cmd: &Command, json_file: &Path) -> eyre::Result<()> { |
| 50 | + let json: BTreeMap<FileId, Option<Entry>> |
| 51 | + = serde_json::from_reader(std::fs::File::open(json_file)?)?; |
| 52 | + |
| 53 | + let out_dir = cmd.output.as_ref() |
| 54 | + .map_or_else(|| json_file.parent().unwrap(), |v| v.as_path()) |
| 55 | + .join(json_file.file_name().unwrap()) |
| 56 | + .with_extension("dir"); |
| 57 | + |
| 58 | + tracing::Span::current().record("out", tracing::field::display(out_dir.display())); |
| 59 | + std::fs::create_dir_all(out_dir.parent().unwrap())?; |
| 60 | + |
| 61 | + let size = json.last_key_value().map(|a| a.0.0 + 1).unwrap_or_default() as usize; |
| 62 | + let mut entries = vec![None; size]; |
| 63 | + for (k, v) in json { |
| 64 | + entries[k.0 as usize] = v |
| 65 | + } |
| 66 | + |
| 67 | + // TODO lots of duplicated code between here and rebuild |
| 68 | + |
| 69 | + let mut out_dat = std::fs::File::create(out_dir.with_extension("dat.tmp"))?; |
| 70 | + out_dat.write_all(b"LB DAT\x1A\0")?; |
| 71 | + out_dat.write_all(&u64::to_le_bytes(size as u64))?; |
| 72 | + for _ in 0..=size { |
| 73 | + out_dat.write_all(&u32::to_le_bytes(0))?; |
| 74 | + } |
| 75 | + |
| 76 | + let mut dir = Vec::with_capacity(size); |
| 77 | + for (id, e) in entries.into_iter().enumerate() { |
| 78 | + let mut ent = DirEntry::default(); |
| 79 | + if let Some(e) = e { |
| 80 | + let name = match &e { |
| 81 | + Entry { name: Some(name), .. } => name.as_str(), |
| 82 | + Entry { path: Some(path), .. } => path.file_name().unwrap().to_str().unwrap(), |
| 83 | + _ => unreachable!() |
| 84 | + }; |
| 85 | + let _span = tracing::info_span!("file", name=%name, path=tracing::field::Empty).entered(); |
| 86 | + ent.name = Name::try_from(name)?; |
| 87 | + ent.unk1 = e.unknown1; |
| 88 | + ent.unk2 = e.unknown2; |
| 89 | + |
| 90 | + let pos = out_dat.seek(SeekFrom::End(0))?; |
| 91 | + ent.offset = pos as usize; |
| 92 | + |
| 93 | + if let Some(path) = &e.path { |
| 94 | + let path = json_file.parent().unwrap().join(path); |
| 95 | + _span.record("path", tracing::field::display(path.display())); |
| 96 | + |
| 97 | + let data = std::fs::read(&path)?; |
| 98 | + let mut data = match e.compress { |
| 99 | + Some(method) => bzip::compress_ed6_to_vec(&data, method), |
| 100 | + None => data, |
| 101 | + }; |
| 102 | + ent.size = data.len(); |
| 103 | + ent.reserved_size = e.reserve.unwrap_or(data.len()); |
| 104 | + |
| 105 | + while data.len() < e.reserve.unwrap_or(0) { |
| 106 | + data.push(0); |
| 107 | + } |
| 108 | + out_dat.write_all(&data)?; |
| 109 | + |
| 110 | + let timestamp = std::fs::metadata(path)? |
| 111 | + .modified() |
| 112 | + .unwrap_or_else(|_| SystemTime::now()); |
| 113 | + ent.timestamp = timestamp.duration_since(SystemTime::UNIX_EPOCH)?.as_secs() as u32; |
| 114 | + } |
| 115 | + |
| 116 | + let pos2 = out_dat.seek(SeekFrom::End(0))?; |
| 117 | + out_dat.seek(SeekFrom::Start(16 + 4 * id as u64))?; |
| 118 | + out_dat.write_all(&u32::to_le_bytes(pos as u32))?; |
| 119 | + out_dat.write_all(&u32::to_le_bytes(pos2 as u32))?; |
| 120 | + } |
| 121 | + dir.push(ent) |
| 122 | + } |
| 123 | + |
| 124 | + std::fs::rename(out_dir.with_extension("dat.tmp"), out_dir.with_extension("dat"))?; |
| 125 | + std::fs::write(&out_dir, dirdat::write_dir(&dir))?; |
| 126 | + |
| 127 | + tracing::info!("created"); |
| 128 | + |
| 129 | + Ok(()) |
| 130 | +} |
| 131 | + |
| 132 | +fn parse_compress_mode<'de, D: serde::Deserializer<'de>>(des: D) -> Result<Option<bzip::CompressMode>, D::Error> { |
| 133 | + match <Option<u8>>::deserialize(des)? { |
| 134 | + Some(1) => Ok(Some(bzip::CompressMode::Mode1)), |
| 135 | + Some(2) => Ok(Some(bzip::CompressMode::Mode2)), |
| 136 | + None => Ok(None), |
| 137 | + Some(v) => Err(de::Error::invalid_value( |
| 138 | + de::Unexpected::Unsigned(v as _), |
| 139 | + &"1, 2, or null"), |
| 140 | + ), |
| 141 | + } |
| 142 | +} |
| 143 | + |
| 144 | +impl std::str::FromStr for Entry { |
| 145 | + type Err = std::convert::Infallible; |
| 146 | + |
| 147 | + fn from_str(s: &str) -> Result<Self, Self::Err> { |
| 148 | + Ok(Entry { |
| 149 | + path: Some(PathBuf::from(s)), |
| 150 | + name: None, |
| 151 | + compress: None, |
| 152 | + reserve: None, |
| 153 | + unknown1: 0, |
| 154 | + unknown2: 0, |
| 155 | + }) |
| 156 | + } |
| 157 | +} |
| 158 | + |
| 159 | +impl<'de> Deserialize<'de> for Entry { |
| 160 | + fn deserialize<D: de::Deserializer<'de>>(des: D) -> Result<Self, D::Error> { |
| 161 | + struct V; |
| 162 | + impl<'de> de::Visitor<'de> for V { |
| 163 | + type Value = Entry; |
| 164 | + |
| 165 | + fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result { |
| 166 | + formatter.write_str("string or map") |
| 167 | + } |
| 168 | + |
| 169 | + fn visit_str<E: de::Error>(self, value: &str) -> Result<Self::Value, E> { |
| 170 | + std::str::FromStr::from_str(value).map_err(de::Error::custom) |
| 171 | + } |
| 172 | + |
| 173 | + fn visit_map<M: de::MapAccess<'de>>(self, map: M) -> Result<Self::Value, M::Error> { |
| 174 | + Entry::deserialize(de::value::MapAccessDeserializer::new(map)) |
| 175 | + } |
| 176 | + } |
| 177 | + |
| 178 | + let v = des.deserialize_any(V)?; |
| 179 | + if v.path.is_none() && v.name.is_none() { |
| 180 | + return Err(de::Error::custom("at least one of `path` and `name` must be present")) |
| 181 | + } |
| 182 | + Ok(v) |
| 183 | + } |
| 184 | +} |
| 185 | + |
| 186 | +impl<'de> Deserialize<'de> for FileId { |
| 187 | + fn deserialize<D: de::Deserializer<'de>>(des: D) -> Result<Self, D::Error> { |
| 188 | + let s = String::deserialize(des)?; |
| 189 | + let err = || de::Error::invalid_value( |
| 190 | + de::Unexpected::Str(&s), |
| 191 | + &"a hexadecimal number", |
| 192 | + ); |
| 193 | + |
| 194 | + let s = s.strip_prefix("0x").ok_or_else(err)?; |
| 195 | + let v = u32::from_str_radix(s, 16).map_err(|_| err())?; |
| 196 | + Ok(FileId(v as u16)) |
| 197 | + } |
| 198 | +} |
0 commit comments