use std::fmt::{self, Debug, Formatter};
use std::fs;
use std::path::{Path, PathBuf};
use filetime::FileTime;
use ignore::gitignore::GitignoreBuilder;
use ignore::Match;
use log::{trace, warn};
use crate::core::source::MaybePackage;
use crate::core::{Dependency, Package, PackageId, Source, SourceId, Summary};
use crate::ops;
use crate::util::{internal, paths, CargoResult, CargoResultExt, Config};
pub struct PathSource<'cfg> {
source_id: SourceId,
path: PathBuf,
updated: bool,
packages: Vec<Package>,
config: &'cfg Config,
recursive: bool,
}
impl<'cfg> PathSource<'cfg> {
pub fn new(path: &Path, source_id: SourceId, config: &'cfg Config) -> PathSource<'cfg> {
PathSource {
source_id,
path: path.to_path_buf(),
updated: false,
packages: Vec::new(),
config,
recursive: false,
}
}
pub fn new_recursive(root: &Path, id: SourceId, config: &'cfg Config) -> PathSource<'cfg> {
PathSource {
recursive: true,
..PathSource::new(root, id, config)
}
}
pub fn preload_with(&mut self, pkg: Package) {
assert!(!self.updated);
assert!(!self.recursive);
assert!(self.packages.is_empty());
self.updated = true;
self.packages.push(pkg);
}
pub fn root_package(&mut self) -> CargoResult<Package> {
trace!("root_package; source={:?}", self);
self.update()?;
match self.packages.iter().find(|p| p.root() == &*self.path) {
Some(pkg) => Ok(pkg.clone()),
None => Err(internal("no package found in source")),
}
}
pub fn read_packages(&self) -> CargoResult<Vec<Package>> {
if self.updated {
Ok(self.packages.clone())
} else if self.recursive {
ops::read_packages(&self.path, self.source_id, self.config)
} else {
let path = self.path.join("Cargo.toml");
let (pkg, _) = ops::read_package(&path, self.source_id, self.config)?;
Ok(vec![pkg])
}
}
pub fn list_files(&self, pkg: &Package) -> CargoResult<Vec<PathBuf>> {
let root = pkg.root();
let no_include_option = pkg.manifest().include().is_empty();
let mut exclude_builder = GitignoreBuilder::new(root);
for rule in pkg.manifest().exclude() {
exclude_builder.add_line(None, rule)?;
}
let ignore_exclude = exclude_builder.build()?;
let mut include_builder = GitignoreBuilder::new(root);
for rule in pkg.manifest().include() {
include_builder.add_line(None, rule)?;
}
let ignore_include = include_builder.build()?;
let ignore_should_package = |relative_path: &Path| -> CargoResult<bool> {
if no_include_option {
match ignore_exclude
.matched_path_or_any_parents(relative_path, false)
{
Match::None => Ok(true),
Match::Ignore(_) => Ok(false),
Match::Whitelist(_) => Ok(true),
}
} else {
match ignore_include
.matched_path_or_any_parents(relative_path, false)
{
Match::None => Ok(false),
Match::Ignore(_) => Ok(true),
Match::Whitelist(_) => Ok(false),
}
}
};
let mut filter = |path: &Path| -> CargoResult<bool> {
let relative_path = path.strip_prefix(root)?;
let rel = relative_path.as_os_str();
if rel == "Cargo.lock" {
return Ok(pkg.include_lockfile());
} else if rel == "Cargo.toml" {
return Ok(true);
}
ignore_should_package(relative_path)
};
if no_include_option {
if let Some(result) = self.discover_git_and_list_files(pkg, root, &mut filter) {
return result;
}
}
self.list_files_walk(pkg, &mut filter)
}
fn discover_git_and_list_files(
&self,
pkg: &Package,
root: &Path,
filter: &mut dyn FnMut(&Path) -> CargoResult<bool>,
) -> Option<CargoResult<Vec<PathBuf>>> {
let mut cur = root;
loop {
if cur.join("Cargo.toml").is_file() {
if let Ok(repo) = git2::Repository::open(cur) {
let index = match repo.index() {
Ok(index) => index,
Err(err) => return Some(Err(err.into())),
};
let path = root.strip_prefix(cur).unwrap().join("Cargo.toml");
if index.get_path(&path, 0).is_some() {
return Some(self.list_files_git(pkg, &repo, filter));
}
}
}
if cur.join(".git").is_dir() {
break;
}
match cur.parent() {
Some(parent) => cur = parent,
None => break,
}
}
None
}
fn list_files_git(
&self,
pkg: &Package,
repo: &git2::Repository,
filter: &mut dyn FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<Vec<PathBuf>> {
warn!("list_files_git {}", pkg.package_id());
let index = repo.index()?;
let root = repo
.workdir()
.ok_or_else(|| internal("Can't list files on a bare repository."))?;
let pkg_path = pkg.root();
let mut ret = Vec::<PathBuf>::new();
let index_files = index.iter().map(|entry| {
use libgit2_sys::{GIT_FILEMODE_COMMIT, GIT_FILEMODE_LINK};
let is_dir = if entry.mode == GIT_FILEMODE_LINK as u32 {
None
} else {
Some(entry.mode == GIT_FILEMODE_COMMIT as u32)
};
(join(root, &entry.path), is_dir)
});
let mut opts = git2::StatusOptions::new();
opts.include_untracked(true);
if let Ok(suffix) = pkg_path.strip_prefix(root) {
opts.pathspec(suffix);
}
let statuses = repo.statuses(Some(&mut opts))?;
let untracked = statuses.iter().filter_map(|entry| match entry.status() {
git2::Status::WT_NEW if entry.path() != Some("Cargo.lock") => {
Some((join(root, entry.path_bytes()), None))
}
_ => None,
});
let mut subpackages_found = Vec::new();
for (file_path, is_dir) in index_files.chain(untracked) {
let file_path = file_path?;
if !file_path.starts_with(pkg_path) {
continue;
}
match file_path.file_name().and_then(|s| s.to_str()) {
Some("target") => continue,
Some("Cargo.toml") => {
let path = file_path.parent().unwrap();
if path != pkg_path {
warn!("subpackage found: {}", path.display());
ret.retain(|p| !p.starts_with(path));
subpackages_found.push(path.to_path_buf());
continue;
}
}
_ => {}
}
if subpackages_found.iter().any(|p| file_path.starts_with(p)) {
continue;
}
if is_dir.unwrap_or_else(|| file_path.is_dir()) {
warn!(" found submodule {}", file_path.display());
let rel = file_path.strip_prefix(root)?;
let rel = rel.to_str().ok_or_else(|| {
failure::format_err!("invalid utf-8 filename: {}", rel.display())
})?;
let rel = rel.replace(r"\", "/");
match repo.find_submodule(&rel).and_then(|s| s.open()) {
Ok(repo) => {
let files = self.list_files_git(pkg, &repo, filter)?;
ret.extend(files.into_iter());
}
Err(..) => {
PathSource::walk(&file_path, &mut ret, false, filter)?;
}
}
} else if (*filter)(&file_path)? {
warn!(" found {}", file_path.display());
ret.push(file_path);
}
}
return Ok(ret);
#[cfg(unix)]
fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
use std::ffi::OsStr;
use std::os::unix::prelude::*;
Ok(path.join(<OsStr as OsStrExt>::from_bytes(data)))
}
#[cfg(windows)]
fn join(path: &Path, data: &[u8]) -> CargoResult<PathBuf> {
use std::str;
match str::from_utf8(data) {
Ok(s) => Ok(path.join(s)),
Err(..) => Err(internal(
"cannot process path in git with a non \
unicode filename",
)),
}
}
}
fn list_files_walk(
&self,
pkg: &Package,
filter: &mut dyn FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<Vec<PathBuf>> {
let mut ret = Vec::new();
PathSource::walk(pkg.root(), &mut ret, true, filter)?;
Ok(ret)
}
fn walk(
path: &Path,
ret: &mut Vec<PathBuf>,
is_root: bool,
filter: &mut dyn FnMut(&Path) -> CargoResult<bool>,
) -> CargoResult<()> {
if !fs::metadata(&path).map(|m| m.is_dir()).unwrap_or(false) {
if (*filter)(path)? {
ret.push(path.to_path_buf());
}
return Ok(());
}
if !is_root && fs::metadata(&path.join("Cargo.toml")).is_ok() {
return Ok(());
}
let mut entries: Vec<PathBuf> = fs::read_dir(path)
.chain_err(|| format!("cannot read {:?}", path))?
.map(|e| e.unwrap().path())
.collect();
entries.sort_unstable_by(|a, b| a.as_os_str().cmp(b.as_os_str()));
for path in entries {
let name = path.file_name().and_then(|s| s.to_str());
if name.map(|s| s.starts_with('.')) == Some(true) {
continue;
}
if is_root && name == Some("target") {
continue;
}
PathSource::walk(&path, ret, false, filter)?;
}
Ok(())
}
pub fn last_modified_file(&self, pkg: &Package) -> CargoResult<(FileTime, PathBuf)> {
if !self.updated {
return Err(internal("BUG: source was not updated"));
}
let mut max = FileTime::zero();
let mut max_path = PathBuf::new();
for file in self.list_files(pkg)? {
let mtime = paths::mtime(&file).unwrap_or_else(|_| FileTime::zero());
if mtime > max {
max = mtime;
max_path = file;
}
}
trace!("last modified file {}: {}", self.path.display(), max);
Ok((max, max_path))
}
pub fn path(&self) -> &Path {
&self.path
}
}
impl<'cfg> Debug for PathSource<'cfg> {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "the paths source")
}
}
impl<'cfg> Source for PathSource<'cfg> {
fn query(&mut self, dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> {
for s in self.packages.iter().map(|p| p.summary()) {
if dep.matches(s) {
f(s.clone())
}
}
Ok(())
}
fn fuzzy_query(&mut self, _dep: &Dependency, f: &mut dyn FnMut(Summary)) -> CargoResult<()> {
for s in self.packages.iter().map(|p| p.summary()) {
f(s.clone())
}
Ok(())
}
fn supports_checksums(&self) -> bool {
false
}
fn requires_precise(&self) -> bool {
false
}
fn source_id(&self) -> SourceId {
self.source_id
}
fn update(&mut self) -> CargoResult<()> {
if !self.updated {
let packages = self.read_packages()?;
self.packages.extend(packages.into_iter());
self.updated = true;
}
Ok(())
}
fn download(&mut self, id: PackageId) -> CargoResult<MaybePackage> {
trace!("getting packages; id={}", id);
let pkg = self.packages.iter().find(|pkg| pkg.package_id() == id);
pkg.cloned()
.map(MaybePackage::Ready)
.ok_or_else(|| internal(format!("failed to find {} in path source", id)))
}
fn finish_download(&mut self, _id: PackageId, _data: Vec<u8>) -> CargoResult<Package> {
panic!("no download should have started")
}
fn fingerprint(&self, pkg: &Package) -> CargoResult<String> {
let (max, max_path) = self.last_modified_file(pkg)?;
Ok(format!("{} ({})", max, max_path.display()))
}
fn describe(&self) -> String {
match self.source_id.url().to_file_path() {
Ok(path) => path.display().to_string(),
Err(_) => self.source_id.to_string(),
}
}
fn add_to_yanked_whitelist(&mut self, _pkgs: &[PackageId]) {}
fn is_yanked(&mut self, _pkg: PackageId) -> CargoResult<bool> {
Ok(false)
}
}