Skip to content
This repository has been archived by the owner on Apr 21, 2023. It is now read-only.

Commit

Permalink
Deny unwrap
Browse files Browse the repository at this point in the history
  • Loading branch information
NiceneNerd committed Dec 23, 2022
1 parent 1e7accb commit 7bfc61f
Show file tree
Hide file tree
Showing 8 changed files with 137 additions and 79 deletions.
16 changes: 10 additions & 6 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
#![feature(let_chains)]
#![deny(clippy::unwrap_used)]
pub mod manager;
pub mod mergers;
pub mod settings;
Expand Down Expand Up @@ -38,14 +39,14 @@ fn find_modified_files(py: Python, mod_dir: String) -> PyResult<Vec<String>> {
let content = mod_dir.join(util::content());
let dlc = mod_dir.join(util::dlc());
let files: Vec<PathBuf> = py.allow_threads(|| {
glob::glob(mod_dir.join("**/*").to_str().unwrap())
.unwrap()
glob::glob(&mod_dir.join("**/*").to_string_lossy())
.expect("Bad glob?!?!?!")
.filter_map(std::result::Result::ok)
.par_bridge()
.filter(|f| {
f.is_file()
&& (f.starts_with(&content) || f.starts_with(&dlc))
&& util::get_canon_name(f.strip_prefix(mod_dir).unwrap())
&& util::get_canon_name(unsafe { f.strip_prefix(mod_dir).unwrap_unchecked() })
.and_then(|canon| {
fs::read(f)
.ok()
Expand All @@ -60,7 +61,7 @@ fn find_modified_files(py: Python, mod_dir: String) -> PyResult<Vec<String>> {
Ok(files
.par_iter()
.filter(|f| {
fs::metadata(f).unwrap().len() > 4
fs::metadata(f).expect("No file metadata!?!?!?!").len() > 4
&& f.extension()
.and_then(|ext| ext.to_str())
.map(|ext| botw_utils::extensions::SARC_EXTS.contains(&ext))
Expand All @@ -71,7 +72,7 @@ fn find_modified_files(py: Python, mod_dir: String) -> PyResult<Vec<String>> {
find_modded_sarc_files(
&sarc,
file.starts_with(&dlc),
&file.strip_prefix(mod_dir).unwrap().to_slash_lossy(),
&unsafe { file.strip_prefix(mod_dir).unwrap_unchecked() }.to_slash_lossy(),
)
})
.collect::<Result<Vec<_>>>()?
Expand Down Expand Up @@ -110,7 +111,10 @@ fn find_modded_sarc_files(sarc: &Sarc, aoc: bool, path: &str) -> Result<Vec<Stri
modded_files.extend(find_modded_sarc_files(
&sarc,
aoc,
modded_files.first().as_ref().unwrap(),
modded_files
.first()
.as_ref()
.expect("What a strange filename"),
)?);
}
Ok(modded_files)
Expand Down
14 changes: 7 additions & 7 deletions src/manager.rs
Original file line number Diff line number Diff line change
Expand Up @@ -93,7 +93,7 @@ impl<'py, 'set> ModLinker<'py, 'set> {
}
let mod_folders: Vec<PathBuf> =
glob::glob(&settings.mods_dir().join("*").to_string_lossy())
.unwrap()
.expect("Bad glob?!?!?")
.filter_map(|p| p.ok())
.filter(|p| p.is_dir() && !p.join(".disabled").exists())
.collect::<std::collections::BTreeSet<PathBuf>>()
Expand All @@ -103,7 +103,7 @@ impl<'py, 'set> ModLinker<'py, 'set> {
std::iter::once(p)
.chain(
glob::glob(&glob_str)
.unwrap()
.expect("Bad glob?!?!?")
.filter_map(|p| p.ok())
.filter(|p| p.is_dir()),
)
Expand All @@ -118,10 +118,10 @@ impl<'py, 'set> ModLinker<'py, 'set> {
.try_for_each(|folder| -> Result<()> {
let mod_files: Vec<(PathBuf, PathBuf)> =
glob::glob(&folder.join("**/*").to_string_lossy())
.unwrap()
.expect("Bad glob?!?!?!")
.filter_map(|p| {
p.ok().map(|p| {
(p.clone(), p.strip_prefix(&folder).unwrap().to_owned())
(p.clone(), unsafe {p.strip_prefix(&folder).unwrap_unchecked()}.to_owned())
})
})
.filter(|(item, rel)| {
Expand All @@ -145,7 +145,7 @@ impl<'py, 'set> ModLinker<'py, 'set> {
.map(fs::create_dir_all)
.transpose()
.with_context(|| jstr!("Failed to create parent folder for file {rel.to_str().unwrap()}"))?
.unwrap();
.expect("Whoa, why is there no parent folder?");
fs::hard_link(&item, &out)
.with_context(|| jstr!("Failed to hard link {rel.to_str().unwrap()} to {out.to_str().unwrap()}"))
.or_else(|_| {
Expand Down Expand Up @@ -302,11 +302,11 @@ impl<'py, 'set> ModLinker<'py, 'set> {
}
}
if glob::glob(&output.join("*").to_string_lossy())
.unwrap()
.expect("Bad glob?!?!?!")
.filter_map(|p| p.ok())
.count()
== 0
&& std::fs::read_dir(settings.mods_dir()).unwrap().count() > 1
&& std::fs::read_dir(settings.mods_dir())?.count() > 1
{
Err(anyhow::anyhow!("Output folder is empty"))
} else {
Expand Down
14 changes: 7 additions & 7 deletions src/mergers/actorinfo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ static STOCK_ACTORINFO: Lazy<Result<Arc<ActorMap>>> = Lazy::new(|| {
.iter()
.map(|actor| -> Result<(u32, Byml)> {
Ok((
roead::aamp::hash_name(actor.as_hash().unwrap()["name"].as_string()?),
roead::aamp::hash_name(actor.as_hash()?["name"].as_string()?),
actor.clone(),
))
})
Expand Down Expand Up @@ -107,7 +107,7 @@ fn diff_actorinfo(py: Python, actorinfo_path: String) -> PyResult<PyObject> {
})
})
.collect();
Ok(Byml::Hash(diff).to_text().unwrap().as_bytes().to_vec())
Ok(Byml::Hash(diff).to_text()?.as_bytes().to_vec())
} else {
anyhow::bail!("Modded actor info is not a hash???")
}
Expand All @@ -120,11 +120,11 @@ fn merge_actorinfo(py: Python, modded_actors: Vec<u8>) -> PyResult<()> {
let merge = || -> Result<()> {
let modded_actor_root = Byml::from_binary(&modded_actors)?;
let modded_actors: ActorMap = py.allow_threads(|| -> Result<ActorMap> {
Ok(modded_actor_root
modded_actor_root
.as_hash()?
.into_par_iter()
.map(|(h, a)| (h.parse::<u32>().unwrap(), a.clone()))
.collect())
.map(|(h, a)| Ok((h.parse::<u32>()?, a.clone())))
.collect()
})?;
let mut merged_actors = stock_actorinfo()?.as_ref().clone();
merge_actormap(&mut merged_actors, &modded_actors);
Expand Down Expand Up @@ -152,8 +152,8 @@ fn merge_actorinfo(py: Python, modded_actors: Vec<u8>) -> PyResult<()> {
let output = util::settings()
.master_content_dir()
.join("Actor/ActorInfo.product.sbyml");
if !output.parent().unwrap().exists() {
fs::create_dir_all(output.parent().unwrap())?;
if !output.parent().expect("No parent folder?!?!?").exists() {
fs::create_dir_all(output.parent().expect("No parent folder?!?!?"))?;
}
fs::write(
output,
Expand Down
67 changes: 37 additions & 30 deletions src/mergers/maps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -62,11 +62,16 @@ impl Display for MapUnit {
impl TryFrom<&Path> for MapUnit {
type Error = anyhow::Error;
fn try_from(value: &Path) -> Result<Self> {
let mut split = value.file_stem().unwrap().to_str().unwrap().split('_');
let mut split = value
.file_stem()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.split('_');
Ok(MapUnit {
unit: split.next().context("Not a map unitt")?.into(),
kind: split.next().context("Not a map unitt")?.into(),
aocfield: value.to_str().unwrap().contains("AocField"),
aocfield: value.to_str().unwrap_or_default().contains("AocField"),
})
}
}
Expand Down Expand Up @@ -96,7 +101,10 @@ impl MapUnit {
}

fn get_aoc_path(&self) -> PathBuf {
util::settings().dlc_dir().unwrap().join(self.get_path())
util::settings()
.dlc_dir()
.expect("There's no DLC folder")
.join(self.get_path())
}

fn get_resource_path(&self) -> String {
Expand Down Expand Up @@ -126,8 +134,7 @@ impl MapUnit {
MapUnitType::Static => {
let pack = util::get_stock_pack("TitleBG")?;
Ok(Byml::from_binary(&decompress(
pack
.get_data(&self.get_path())
pack.get_data(&self.get_path())
.with_context(|| {
jstr!("Failed to read {&self.get_path()} from TitleBG.pack")
})?
Expand All @@ -146,8 +153,7 @@ impl MapUnit {
MapUnitType::Static => {
let pack = util::get_stock_pack("AocMainField")?;
Ok(Byml::from_binary(&decompress(
pack
.get_data(&self.get_path())
pack.get_data(&self.get_path())
.with_context(|| {
jstr!("Failed to read {&self.get_path()} from TitleBG.pack")
})?
Expand All @@ -161,11 +167,11 @@ impl MapUnit {
fn merge_entries(diff: &Hash, entries: &mut Vec<Byml>) -> Result<()> {
let stock_hashes: Vec<u32> = entries
.iter()
.map(|e| e["HashId"].as_u32().unwrap())
.collect();
.map(|e| Ok(e["HashId"].as_u32()?))
.collect::<Result<_>>()?;
let mut orphans: Vec<Byml> = vec![];
for (hash, entry) in diff["mod"].as_hash()? {
let hash = hash.parse::<u32>().unwrap();
let hash = hash.parse::<u32>()?;
if let Some(idx) = stock_hashes.iter().position(|h| *h == hash) {
entries[idx] = entry.clone();
} else {
Expand All @@ -188,19 +194,18 @@ fn merge_entries(diff: &Hash, entries: &mut Vec<Byml>) -> Result<()> {
.cloned()
.chain(orphans.into_iter())
.filter(|e| {
!stock_hashes.contains(
&(e["HashId"]
.as_u32()
.or_else(|_| e["HashId"].as_i32().map(|i| i as u32))
.unwrap()),
)
e["HashId"]
.as_u32()
.or_else(|_| e["HashId"].as_i32().map(|i| i as u32))
.map(|h| !stock_hashes.contains(&h))
.unwrap_or(false)
}),
);
entries.sort_by_cached_key(|e| {
e["HashId"]
.as_u32()
.or_else(|_| e["HashId"].as_i32().map(|i| i as u32))
.unwrap()
.unwrap_or(0)
});
Ok(())
}
Expand All @@ -218,16 +223,18 @@ fn merge_map(map_unit: MapUnit, diff: &Hash, settings: &Settings) -> Result<(Str
merge_entries(diff["Rails"].as_hash()?, rails)?;
}
let data = new_map.to_binary(settings.endian());
let size = rstb::calc::calc_from_size_and_name(
data.len(),
"dummy.mubin",
if settings.wiiu {
rstb::Endian::Big
} else {
rstb::Endian::Little
},
)
.unwrap();
let size = unsafe {
rstb::calc::calc_from_size_and_name(
data.len(),
"dummy.mubin",
if settings.wiiu {
rstb::Endian::Big
} else {
rstb::Endian::Little
},
)
.unwrap_unchecked()
};
let out = settings
.master_mod_dir()
.join(if util::settings().dlc_dir().is_some() {
Expand All @@ -236,8 +243,8 @@ fn merge_map(map_unit: MapUnit, diff: &Hash, settings: &Settings) -> Result<(Str
util::content()
})
.join(map_unit.get_path());
if !out.parent().unwrap().exists() {
fs::create_dir_all(out.parent().unwrap())?;
if !out.parent().expect("Folder has no parent?!?").exists() {
fs::create_dir_all(out.parent().expect("Folder has no parent?!?"))?;
}
fs::write(out, compress(data))?;
Ok((
Expand All @@ -252,7 +259,7 @@ fn merge_map(map_unit: MapUnit, diff: &Hash, settings: &Settings) -> Result<(Str

#[pyfunction]
pub fn merge_maps(py: Python, diff_bytes: Vec<u8>) -> PyResult<PyObject> {
let diffs = Byml::from_binary(&diff_bytes).unwrap();
let diffs = Byml::from_binary(&diff_bytes).map_err(anyhow::Error::from)?;
let rstb_values: HashMap<String, u32> = if let Byml::Hash(diffs) = diffs {
py.allow_threads(|| -> Result<HashMap<String, u32>> {
let settings = util::settings().clone();
Expand Down
12 changes: 9 additions & 3 deletions src/mergers/pack.rs
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ fn merge_sarc(sarcs: Vec<Sarc>, endian: Endian) -> Result<Vec<u8>> {
let mut merged = merge_sarc(nest_sarcs, endian)?;
if file_path
.extension()
.map(|e| e.to_str().unwrap().starts_with('s'))
.map(|e| e.to_str().unwrap_or_default().starts_with('s'))
.unwrap_or_default()
{
merged = compress(&merged);
Expand Down Expand Up @@ -133,10 +133,16 @@ pub fn merge_sarcs(py: Python, diffs: HashMap<PathBuf, Vec<PathBuf>>) -> PyResul
})
.collect::<Result<Vec<Sarc>>>()?;
let mut merged = merge_sarc(sarcs, settings.endian())?;
if out.extension().unwrap().to_str().unwrap().starts_with('s') {
if out
.extension()
.unwrap_or_default()
.to_str()
.unwrap_or_default()
.starts_with('s')
{
merged = compress(merged);
}
fs::create_dir_all(out.parent().unwrap())?;
fs::create_dir_all(out.parent().expect("No parent folder??!?"))?;
fs::write(out, merged)?;
Ok(())
})?;
Expand Down
17 changes: 9 additions & 8 deletions src/mergers/texts.rs
Original file line number Diff line number Diff line change
Expand Up @@ -31,9 +31,9 @@ pub fn diff_language(
let diff = py.allow_threads(|| -> Result<IndexMap<String, Diff>> {
let language = &Path::new(&mod_bootup_path)
.file_stem()
.unwrap()
.expect("Okay, how does this path have no name?")
.to_str()
.unwrap()[7..];
.expect("And this should definitely work, too")[7..];
let mod_bootup = Sarc::new(fs::read(&mod_bootup_path)?)?;
let stock_bootup = Sarc::new(fs::read(&stock_bootup_path)?)?;
let message_path = jstr!("Message/Msg_{&language}.product.ssarc");
Expand Down Expand Up @@ -67,7 +67,8 @@ pub fn diff_language(
.with_context(|| jstr!("Invalid MSBT file: {&path}"))?;
if let Some(stock_text) = stock_message
.get_data(&path)
.unwrap()
.ok()
.flatten()
.and_then(|data| Msyt::from_msbt_bytes(data).ok())
{
if mod_text == stock_text {
Expand All @@ -80,8 +81,7 @@ pub fn diff_language(
if only_new_keys {
!stock_text.entries.contains_key(*e)
} else {
!stock_text.entries.contains_key(*e)
|| *t != stock_text.entries.get(*e).unwrap()
stock_text.entries.get(*e) != Some(t)
}
})
.map(|(e, t)| (e.to_owned(), t.clone()))
Expand All @@ -105,7 +105,8 @@ pub fn diff_language(
.collect();
Ok(diffs)
})?;
let diff_text = serde_json::to_string(&diff).unwrap();
let diff_text =
serde_json::to_string(&diff).expect("It's whack if this diff doesn't serialize");
let json = PyModule::import(py, "json")?;
#[allow(deprecated)]
let dict = json.call_method1("loads", (&diff_text,))?;
Expand All @@ -130,9 +131,9 @@ pub fn merge_language(
py.allow_threads(|| -> Result<()> {
let language = &Path::new(&stock_bootup_path)
.file_stem()
.unwrap()
.expect("Okay, how does this path have no name?")
.to_str()
.unwrap()[7..];
.expect("And this should definitely work, too")[7..];
let stock_bootup = Sarc::new(fs::read(&stock_bootup_path)?)?;
let message_path = format!("Message/Msg_{}.product.ssarc", &language);
let stock_message = Sarc::new(decompress(
Expand Down
Loading

0 comments on commit 7bfc61f

Please sign in to comment.