One major point of Elpe is to allow the least expressive possible language to alter upstream packages from a distribution.
However, there are times where that is necessary: for example, rustc
finds the path to stdlib is with a function calling dlinfo on
itself: this returns the library in package libstd-rust (which rustc
is dynamically linked against), whereas the one required is in
libstd-rust-dev.
This is solved by copying everything from libstd-rust to libstd-rust-dev. However, since that can be quite costly in terms of space, we let the user specify custom packages on which it must be done.
LIUJQXB752UIHJFF32LKJ7ECUZ2VDIFDDHBLAUVNBRTPDYNCAUYQC let p = download_extract_deps(&index, &self.deb_client, &r.name)
let link_extra: Vec<_> = r.link_extra.into_iter().map(|l| {(regex::Regex::new(&l.pkg).unwrap(),regex::Regex::new(&l.dep).unwrap(),)}).collect();let p = download_extract_deps(&index, &self.deb_client, &r.name, &link_extra)
let mut context_hasher = blake3::Hasher::new();context_hasher.update(vertices[*v].pkg.sha256.unwrap().as_bytes());for d in vertices[*v].deps.iter() {let w = &vertices[*d];if w.scc == vertices[*v].scc {// If in the same SCC, we can't know anything other than the SHA256.context_hasher.update(w.pkg.sha256.unwrap().as_bytes());} else {context_hasher.update(w.final_path.as_ref().unwrap().to_str().unwrap().as_bytes());}}vertices[*v].context_path =Some(Arc::new(client.store_path.join(data_encoding::HEXLOWER.encode(context_hasher.finalize().as_bytes()),)));
hash_context(&mut vertices, *v, &client.store_path, link_extra);
fn hash_context(vertices: &mut [Vertex],v: usize,store_path: &Path,link_extra: &[(regex::Regex, regex::Regex)],) {let mut context_hasher = blake3::Hasher::new();context_hasher.update(vertices[v].pkg.sha256.unwrap().as_bytes());for d in vertices[v].deps.iter() {let w = &vertices[*d];if w.scc == vertices[v].scc {// If in the same SCC, we can't know anything other than the SHA256.context_hasher.update(w.pkg.sha256.unwrap().as_bytes());} else {context_hasher.update(w.final_path.as_ref().unwrap().to_str().unwrap().as_bytes());}}for (a, b) in link_extra {context_hasher.update(a.to_string().as_bytes());context_hasher.update(b.to_string().as_bytes());}vertices[v].context_path = Some(Arc::new(store_path.join(data_encoding::HEXLOWER.encode(context_hasher.finalize().as_bytes())),));}
async fn patch_elf(&mut self,f: &Path,dest_path: &Path,files: &Files,) -> Result<bool, Error> {use elfedit::*;info!("patch_elf {:?}", f);let file = std::fs::OpenOptions::new().read(true).write(true).open(&f)?;
async fn patch_elf<'a>(vertices: &mut [Vertex<'a>],v: usize,f: &Path,dest_path: &Path,files: &Files,) -> Result<bool, Error> {use elfedit::*;info!("patch_elf {:?}", f);let file = std::fs::OpenOptions::new().read(true).write(true).open(&f)?;
let mut elf = match Elf::open(&file) {Ok(elf) => elf,Err(e) => {info!("error opening {:?}: {:?}", file, e);return Ok(false);}};info!("patching {:?}", f);let Some(parsed) = elf.parse().unwrap() else {info!("No dynamic section");
let mut elf = match Elf::open(&file) {Ok(elf) => elf,Err(e) => {info!("error opening {:?}: {:?}", file, e);
};let needed: Vec<_> = parsed.needed().map(|x| x.unwrap().to_str().unwrap().to_string()).collect();let interp = parsed.interpreter();if let Some(interp) = interp.unwrap() {let interp = interp.to_str().unwrap();let files = files.lock().unwrap();
}};info!("patching {:?}", f);let Some(parsed) = elf.parse().unwrap() else {info!("No dynamic section");return Ok(false);};let needed: Vec<_> = parsed.needed().map(|x| x.unwrap().to_str().unwrap().to_string()).collect();let interp = parsed.interpreter();if let Some(interp) = interp.unwrap() {let interp = interp.to_str().unwrap();let files = files.lock().unwrap();
let subst = if let Some(subst) = files.get(p) {subst.join(p)} else if interp.starts_with("/usr")|| interp.starts_with("/lib")|| interp.starts_with("/bin"){// https://www.freedesktop.org/wiki/Software/systemd/TheCaseForTheUsrMerge/let p2 = "usr".to_string() + interp;let p2 = Path::new(&p2);debug!("{:?}", p2);if let Some(subst) = files.get(p2) {subst.join(p2)} else {error!("No subst for {:?}", p2);let mut p2 = p2.to_path_buf();while p2.pop() {debug!("p2 = {:?} {:?}", p2, files.get(&p2));}return Ok(false);}
let subst = if let Some(subst) = files.get(p) {subst.join(p)} else if interp.starts_with("/usr")|| interp.starts_with("/lib")|| interp.starts_with("/bin"){// https://www.freedesktop.org/wiki/Software/systemd/TheCaseForTheUsrMerge/let p2 = "usr".to_string() + interp;let p2 = Path::new(&p2);debug!("{:?}", p2);if let Some(subst) = files.get(p2) {subst.join(p2)
// TODO: not sure what else to do here, we// might want to set the interpreter to a// different value (equivalent to "recompiling// everything" on Nix).info!("Interpreter is {interp}. Already patched?");
error!("No subst for {:?}", p2);let mut p2 = p2.to_path_buf();while p2.pop() {debug!("p2 = {:?} {:?}", p2, files.get(&p2));}
};let subst = CString::new(subst.to_str().unwrap()).unwrap();info!("set interpreter {:?}", subst);elf.set_interpreter(subst.to_bytes_with_nul());} else if needed.is_empty() {// No need to patch
}} else {// TODO: not sure what else to do here, we// might want to set the interpreter to a// different value (equivalent to "recompiling// everything" on Nix).info!("Interpreter is {interp}. Already patched?");
}let mut deps_h = BTreeSet::new();let mut path = String::new();debug!("needed: {:?} -> {:?}", f, needed);
};let subst = CString::new(subst.to_str().unwrap()).unwrap();info!("set interpreter {:?}", subst);elf.set_interpreter(subst.to_bytes_with_nul());} else if needed.is_empty() {// No need to patchreturn Ok(false);}
for dep in self.transitive_deps.iter().rev() {debug!("dep of {:?}: {:?}", f, dep);if !deps_h.insert(dep) {
let mut deps_h = BTreeSet::new();let mut path = String::new();debug!("needed: {:?} -> {:?}", f, needed);let mut satisfied = HashMap::new();// First, prefer local deps.for ld in vertices[v].ld_path.iter() {let mut p = vertices[v].context_path.clone().unwrap().join(ld.strip_prefix("/").unwrap());let mut at_least_one = false;for n in needed.iter() {let e = satisfied.entry(n);use std::collections::hash_map::Entry;if let Entry::Occupied(_) = e {
let mut dep = dep.to_path_buf();let mut is_needed = false;for n in needed.iter() {dep.push(&n);let exists = tokio::fs::metadata(&dep).await.is_ok();if exists {debug!("exists {:?}", dep);
p.push(&n);debug!("trying local path {:?}", p);if tokio::fs::metadata(&p).await.is_ok() {if let Entry::Vacant(e) = e {e.insert(());
// Add potential local libs.let mut last_added = None;for (d, _) in find_files(self.downloaded.path.clone())? {if last_added.as_deref() == d.parent() {
for dep_ in vertices[v].transitive_deps.iter().rev() {let (dep, dep_path) = dep_.as_ref();if !deps_h.insert(dep) {continue;}let mut dep = if vertices[*dep].scc == vertices[v].scc {vertices[*dep].context_path.clone().unwrap().join(dep_path)} else {vertices[*dep].final_path.clone().unwrap().join(dep_path)};debug!("dep of {:?}: {:?}", f, dep);let mut is_needed = false;for n in needed.iter() {let e = satisfied.entry(n);use std::collections::hash_map::Entry;if let Entry::Occupied(_) = e {
for n in needed.iter() {if d.file_name().unwrap().to_str().unwrap() == n {last_added = d.parent().map(|x| x.to_path_buf());if !path.is_empty() {path.push(':')}let suffix = d.parent().unwrap().strip_prefix(&self.downloaded.path).unwrap();path.push_str(self.context_path.as_ref().unwrap().join(suffix).to_str().unwrap(),);
dep.push(&n);let exists = tokio::fs::metadata(&dep).await.is_ok();if exists {if let Entry::Vacant(e) = e {e.insert(());
path.push('\0');info!("Setting path {:?}", path);if path.len() > 1 {elf.set_runpath(&path.as_bytes());
/*// Add potential local libs.let mut last_added = None;for (d, _) in find_files(vertices[v].downloaded.path.clone())? {if last_added.as_deref() == d.parent() {continue;
debug!("patching into {:?}", dest_path);Ok(elf.update(Some(dest_path)).unwrap()) // map_err(From::from)
for n in needed.iter() {if d.file_name().unwrap().to_str().unwrap() == n {last_added = d.parent().map(|x| x.to_path_buf());if !path.is_empty() {path.push(':')}let suffix = d.parent().unwrap().strip_prefix(&vertices[v].downloaded.path).unwrap();path.push_str(vertices[v].context_path.as_ref().unwrap().join(suffix).to_str().unwrap(),);}}
deps_paths.push(if vertices[*dep].scc == vertices[v].scc {
debug!("adding direct dep: {:?} {:?} {:?}",vertices[*dep].scc, vertices[v].scc, direct);deps_paths.push(direct)}debug!("transitive {:#?}", transitive_deps);// Direct deps should come after transitive ones, in order to be// picked up earlier when patching the ELFs.for dep in vertices[v].deps.iter() {let direct = if vertices[*dep].scc == vertices[v].scc {
})
};for ld in vertices[*dep].ld_path.iter() {let l = ld.strip_prefix("/").unwrap();if std::fs::metadata(&direct.join(l)).is_ok() {let path = Arc::new((*dep, l.to_path_buf()));// Push even if it already exists since we need direct// deps to come after indirect ones.transitive_deps.push(path);}}
info!("create final path for {dest:?}");match vertices[v].create_final_path(client, &files, &dest, &base_package_name).await
info!("create final path for {dest:?} ({:?})",vertices[v].pkg.package);match create_final_path(client,&files,vertices,v,&dest,&base_package_name,link_extra,).await
impl<'a> Vertex<'a> {async fn create_final_path(&mut self,client: &Client,files: &Files,dest: &Path,base_package_name: &str,) -> Result<PathBuf, Error> {// Link the required libexec before hashing.let tmp = async_tempfile::TempDir::new_in(dest.parent().unwrap()).await?;self.link_extra(&tmp.dir_path(), &["usr/libexec", "usr/lib/gcc"]).await?;// Patch the ELFs, now that we have all the deps.let mut hashing = Vec::new();let mut hashes = Vec::with_capacity(hashing.len());debug!("create_final_path {:?}", self.downloaded.path);for (f, meta) in find_files(self.downloaded.path.to_path_buf())? {debug!("f = {:?}", f);let rel = f.strip_prefix(&self.downloaded.path).unwrap();let dest_path = tmp.dir_path().join(&rel);
if meta.is_dir() {tokio::fs::create_dir_all(dest_path).await.unwrap_or(());continue;
async fn copy(from: &Path, to: &Path) -> Result<(), std::io::Error> {let mut stack = vec![from.to_path_buf()];while let Some(elt) = stack.pop() {if let Ok(mut dir) = tokio::fs::read_dir(&elt).await {let p = elt.strip_prefix(&from).unwrap();tokio::fs::create_dir_all(&to.join(&p)).await.unwrap();while let Some(e) = dir.next_entry().await.unwrap() {stack.push(e.path());
if meta.is_symlink() {// Relink the file to the subst.let target = tokio::fs::read_link(&f).await?;debug!("relink {:?} -> {:?} {:?}", f, target, target.is_relative());let subst = {let l = files.lock().unwrap();let target_rel = rel.parent().unwrap().join(&target);if let Some(subst) = l.get(&target_rel) {Some(subst.join(&target_rel))} else {None}};if let Some(subst) = subst {debug!("relink to {:?}", dest_path);tokio::fs::symlink(&subst, &dest_path).await?;hashes.push((f, subst.to_str().unwrap().to_string()))} else {// Leave the symlink untouched// tokio::fs::create_dir_all(dest_path.parent().unwrap()).await.unwrap_or(());debug!("symlink {:?} {:?} {:?}", target, dest_path, f);tokio::fs::symlink(&target, &dest_path).await?;hashes.push((f, target.to_str().unwrap().to_string()));
async fn create_final_path<'a>(client: &Client,files: &Files,vertices: &mut [Vertex<'a>],v: usize,dest: &Path,base_package_name: &str,link_extra: &[(regex::Regex, regex::Regex)],) -> Result<PathBuf, Error> {// Link the required libexec before hashing.let tmp = async_tempfile::TempDir::new_in(dest.parent().unwrap()).await?;vertices[v].link_extra(&tmp.dir_path(), &["usr/libexec", "usr/lib/gcc"]).await.unwrap();for (pkg, dep) in link_extra {if pkg.is_match(vertices[v].pkg.package) {for d in (0..v).rev() {if dep.is_match(vertices[d].pkg.package) {debug!("match, copying {:?} to {:?}",vertices[d].context_path, vertices[v].context_path);copy(&vertices[d].context_path.clone().unwrap(), &tmp.dir_path()).await?
if !self.patch_elf(&f, &dest_path, &files).await.unwrap_or(false){// Hard linkdebug!("hard link {:?} {:?}", f, dest_path);tokio::fs::hard_link(&f, &dest_path).await.unwrap_or(());}
// Patch the ELFs, now that we have all the deps.let mut hashing = Vec::new();let mut hashes = Vec::with_capacity(hashing.len());debug!("create_final_path {:?}", vertices[v].downloaded.path);for (f, meta) in find_files(vertices[v].downloaded.path.to_path_buf())? {debug!("f = {:?}", f);let rel = f.strip_prefix(&vertices[v].downloaded.path).unwrap();let dest_path = tmp.dir_path().join(&rel);
hashing.push(tokio::spawn(async move {// hash + writeinfo!("hashing {:?}", f);if let Ok(file) = tokio::fs::File::open(&dest_path).await {let mut hasher = blake3::Hasher::new();hash_reader(file, &mut hasher).await.unwrap();let hex = data_encoding::HEXLOWER.encode(hasher.finalize().as_bytes());Ok::<_, Error>(Some((f, hex)))} else {Ok(None)}}));
if meta.is_dir() {tokio::fs::create_dir_all(dest_path).await.unwrap_or(());continue;
for h in hashing.into_iter() {if let Some(h) = h.await.unwrap().unwrap() {hashes.push(h)
if meta.is_symlink() {// Relink the file to the subst.let target = tokio::fs::read_link(&f).await?;debug!("relink {:?} -> {:?} {:?}", f, target, target.is_relative());let subst = {let l = files.lock().unwrap();let target_rel = rel.parent().unwrap().join(&target);if let Some(subst) = l.get(&target_rel) {Some(subst.join(&target_rel))} else {None}};if let Some(subst) = subst {debug!("relink to {:?}", dest_path);tokio::fs::symlink(&subst, &dest_path).await?;hashes.push((f, subst.to_str().unwrap().to_string()))} else {// Leave the symlink untouched// tokio::fs::create_dir_all(dest_path.parent().unwrap()).await.unwrap_or(());debug!("symlink {:?} {:?} {:?}", target, dest_path, f);tokio::fs::symlink(&target, &dest_path).await?;hashes.push((f, target.to_str().unwrap().to_string()));
let blakesums = tmp.dir_path().join("blake3sums");let mut file = tokio::fs::File::create(&blakesums).await?;for (path, hash) in hashes {let path = path.strip_prefix(&self.downloaded.path).unwrap().to_str().unwrap();file.write_all(hash.as_bytes()).await?;file.write_all(b" ").await?;file.write_all(path.as_bytes()).await?;file.write_all(b"\n").await?;writeln!(output_hasher, "{} {}", hash, path)?;}
// Hard linkdebug!("hard link {:?} {:?}", f, dest_path);tokio::fs::hard_link(&f, &dest_path).await.unwrap_or(());
{let transitive = tmp.dir_path().join("paths");let mut file = tokio::fs::File::create(&transitive).await?;for path in self.files.iter() {file.write_all(path.as_bytes()).await?;file.write_all(b"\n").await?;
hashing.push(tokio::spawn(async move {// hash + writeinfo!("hashing {:?}", f);if let Ok(file) = tokio::fs::File::open(&dest_path).await {let mut hasher = blake3::Hasher::new();hash_reader(file, &mut hasher).await.unwrap();let hex = data_encoding::HEXLOWER.encode(hasher.finalize().as_bytes());Ok::<_, Error>(Some((f, hex)))} else {Ok(None)
for (f, _) in find_dirs(self.downloaded.path.to_path_buf())? {let rel = f.strip_prefix(&self.downloaded.path).unwrap();self.files.insert(Arc::new(final_path.join(rel).to_str().unwrap().to_string()));
let mut output_hasher = blake3::Hasher::new();{let blakesums = tmp.dir_path().join("blake3sums");let mut file = tokio::fs::File::create(&blakesums).await?;for (path, hash) in hashes {let path = path.strip_prefix(&vertices[v].downloaded.path).unwrap().to_str().unwrap();file.write_all(hash.as_bytes()).await?;file.write_all(b" ").await?;file.write_all(path.as_bytes()).await?;file.write_all(b"\n").await?;writeln!(output_hasher, "{} {}", hash, path)?;
info!("rename {:?} to {:?}", tmp.dir_path(), dest);tokio::fs::rename(tmp.dir_path(), dest).await?;std::mem::forget(tmp);
{let transitive = tmp.dir_path().join("paths");let mut file = tokio::fs::File::create(&transitive).await?;for path in vertices[v].files.iter() {file.write_all(path.as_bytes()).await?;file.write_all(b"\n").await?;}}