2019-09-30 12:10:35 +00:00
|
|
|
use crate::config::StarshipConfig;
|
2019-06-10 14:56:17 +00:00
|
|
|
use crate::module::Module;
|
|
|
|
|
2020-01-02 04:19:08 +00:00
|
|
|
use crate::modules;
|
2019-04-19 20:57:14 +00:00
|
|
|
use clap::ArgMatches;
|
2020-05-06 09:19:53 +00:00
|
|
|
use git2::{ErrorCode::UnbornBranch, Repository, RepositoryState};
|
2019-09-09 23:14:38 +00:00
|
|
|
use once_cell::sync::OnceCell;
|
2020-02-03 21:57:48 +00:00
|
|
|
use std::collections::{HashMap, HashSet};
|
2019-04-19 20:57:14 +00:00
|
|
|
use std::env;
|
2020-08-07 19:13:12 +00:00
|
|
|
use std::ffi::OsString;
|
2019-04-23 18:51:08 +00:00
|
|
|
use std::fs;
|
2019-09-09 23:14:38 +00:00
|
|
|
use std::path::{Path, PathBuf};
|
2019-10-20 08:26:27 +00:00
|
|
|
use std::string::String;
|
2020-10-17 19:36:21 +00:00
|
|
|
use std::time::{Duration, Instant};
|
2019-04-19 20:57:14 +00:00
|
|
|
|
2019-07-15 22:18:19 +00:00
|
|
|
/// Context contains data or common methods that may be used by multiple modules.
|
|
|
|
/// The data contained within Context will be relevant to this particular rendering
|
|
|
|
/// of the prompt.
|
2019-04-19 20:57:14 +00:00
|
|
|
pub struct Context<'a> {
|
2019-07-15 22:18:19 +00:00
|
|
|
/// The deserialized configuration map from the user's `starship.toml` file.
|
2019-09-30 12:10:35 +00:00
|
|
|
pub config: StarshipConfig,
|
2019-07-15 22:18:19 +00:00
|
|
|
|
|
|
|
/// The current working directory that starship is being called in.
|
2019-04-19 20:57:14 +00:00
|
|
|
pub current_dir: PathBuf,
|
2019-07-15 22:18:19 +00:00
|
|
|
|
2020-02-03 21:57:48 +00:00
|
|
|
/// A struct containing directory contents in a lookup-optimised format.
|
|
|
|
dir_contents: OnceCell<DirContents>,
|
2019-07-15 22:18:19 +00:00
|
|
|
|
2019-10-20 08:26:27 +00:00
|
|
|
/// Properties to provide to modules.
|
|
|
|
pub properties: HashMap<&'a str, String>,
|
2019-07-15 22:18:19 +00:00
|
|
|
|
2019-09-09 23:14:38 +00:00
|
|
|
/// Private field to store Git information for modules who need it
|
|
|
|
repo: OnceCell<Repo>,
|
2020-01-26 22:37:18 +00:00
|
|
|
|
|
|
|
/// The shell the user is assumed to be running
|
|
|
|
pub shell: Shell,
|
2020-08-07 19:13:12 +00:00
|
|
|
|
|
|
|
/// A HashMap of environment variable mocks
|
|
|
|
pub env: HashMap<&'a str, String>,
|
2019-04-19 20:57:14 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> Context<'a> {
|
2019-07-15 22:18:19 +00:00
|
|
|
/// Identify the current working directory and create an instance of Context
|
|
|
|
/// for it.
|
2019-04-19 20:57:14 +00:00
|
|
|
pub fn new(arguments: ArgMatches) -> Context {
|
2019-08-18 03:50:42 +00:00
|
|
|
// Retrieve the "path" flag. If unavailable, use the current directory instead.
|
2019-06-06 12:18:00 +00:00
|
|
|
let path = arguments
|
|
|
|
.value_of("path")
|
|
|
|
.map(From::from)
|
2019-10-24 10:37:44 +00:00
|
|
|
.unwrap_or_else(|| {
|
|
|
|
env::var("PWD").map(PathBuf::from).unwrap_or_else(|err| {
|
|
|
|
log::debug!("Unable to get path from $PWD: {}", err);
|
2019-12-22 04:26:57 +00:00
|
|
|
env::current_dir().expect("Unable to identify current directory. Error")
|
2019-10-24 10:37:44 +00:00
|
|
|
})
|
|
|
|
});
|
2019-06-06 12:18:00 +00:00
|
|
|
|
|
|
|
Context::new_with_dir(arguments, path)
|
2019-04-19 20:57:14 +00:00
|
|
|
}
|
|
|
|
|
2019-07-15 22:18:19 +00:00
|
|
|
/// Create a new instance of Context for the provided directory
|
2019-04-19 20:57:14 +00:00
|
|
|
pub fn new_with_dir<T>(arguments: ArgMatches, dir: T) -> Context
|
|
|
|
where
|
|
|
|
T: Into<PathBuf>,
|
|
|
|
{
|
2019-09-30 12:10:35 +00:00
|
|
|
let config = StarshipConfig::initialize();
|
2019-06-10 14:56:17 +00:00
|
|
|
|
2019-10-20 08:26:27 +00:00
|
|
|
// Unwrap the clap arguments into a simple hashtable
|
|
|
|
// we only care about single arguments at this point, there isn't a
|
|
|
|
// use-case for a list of arguments yet.
|
|
|
|
let properties: HashMap<&str, std::string::String> = arguments
|
|
|
|
.args
|
|
|
|
.iter()
|
|
|
|
.filter(|(_, v)| !v.vals.is_empty())
|
|
|
|
.map(|(a, b)| (*a, b.vals.first().cloned().unwrap().into_string().unwrap()))
|
|
|
|
.collect();
|
|
|
|
|
2019-04-23 18:51:08 +00:00
|
|
|
// TODO: Currently gets the physical directory. Get the logical directory.
|
|
|
|
let current_dir = Context::expand_tilde(dir.into());
|
|
|
|
|
2020-01-26 22:37:18 +00:00
|
|
|
let shell = Context::get_shell();
|
|
|
|
|
2019-04-19 20:57:14 +00:00
|
|
|
Context {
|
2019-06-10 14:56:17 +00:00
|
|
|
config,
|
2019-10-20 08:26:27 +00:00
|
|
|
properties,
|
2019-04-27 02:07:07 +00:00
|
|
|
current_dir,
|
2020-02-03 21:57:48 +00:00
|
|
|
dir_contents: OnceCell::new(),
|
2019-09-09 23:14:38 +00:00
|
|
|
repo: OnceCell::new(),
|
2020-01-26 22:37:18 +00:00
|
|
|
shell,
|
2020-08-07 19:13:12 +00:00
|
|
|
env: HashMap::new(),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Retrives a environment variable from the os or from a table if in testing mode
|
|
|
|
pub fn get_env<K: AsRef<str>>(&self, key: K) -> Option<String> {
|
|
|
|
if cfg!(test) {
|
|
|
|
self.env.get(key.as_ref()).map(|val| val.to_string())
|
|
|
|
} else {
|
|
|
|
env::var(key.as_ref()).ok()
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Retrives a environment variable from the os or from a table if in testing mode (os version)
|
|
|
|
pub fn get_env_os<K: AsRef<str>>(&self, key: K) -> Option<OsString> {
|
|
|
|
if cfg!(test) {
|
|
|
|
self.env.get(key.as_ref()).map(OsString::from)
|
|
|
|
} else {
|
|
|
|
env::var_os(key.as_ref())
|
2019-04-23 18:51:08 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
/// Convert a `~` in a path to the home directory
|
2020-08-07 19:13:12 +00:00
|
|
|
pub fn expand_tilde(dir: PathBuf) -> PathBuf {
|
2019-04-23 18:51:08 +00:00
|
|
|
if dir.starts_with("~") {
|
|
|
|
let without_home = dir.strip_prefix("~").unwrap();
|
2020-06-20 17:59:35 +00:00
|
|
|
return dirs_next::home_dir().unwrap().join(without_home);
|
2019-04-19 20:57:14 +00:00
|
|
|
}
|
2019-04-23 18:51:08 +00:00
|
|
|
dir
|
2019-04-19 20:57:14 +00:00
|
|
|
}
|
2019-05-12 17:37:23 +00:00
|
|
|
|
2019-07-02 20:12:53 +00:00
|
|
|
/// Create a new module
|
2019-09-09 23:14:38 +00:00
|
|
|
pub fn new_module(&self, name: &str) -> Module {
|
|
|
|
let config = self.config.get_module_config(name);
|
2020-01-02 04:19:08 +00:00
|
|
|
let desc = modules::description(name);
|
2019-09-09 23:14:38 +00:00
|
|
|
|
2020-01-02 04:19:08 +00:00
|
|
|
Module::new(name, desc, config)
|
2019-09-09 23:14:38 +00:00
|
|
|
}
|
|
|
|
|
2019-10-05 14:10:16 +00:00
|
|
|
/// Check if `disabled` option of the module is true in configuration file.
|
|
|
|
pub fn is_module_disabled_in_config(&self, name: &str) -> bool {
|
2019-07-02 20:12:53 +00:00
|
|
|
let config = self.config.get_module_config(name);
|
|
|
|
|
|
|
|
// If the segment has "disabled" set to "true", don't show it
|
2019-09-30 12:10:35 +00:00
|
|
|
let disabled = config.and_then(|table| table.as_table()?.get("disabled")?.as_bool());
|
2019-07-02 20:12:53 +00:00
|
|
|
|
2019-10-05 14:10:16 +00:00
|
|
|
disabled == Some(true)
|
2019-06-10 14:56:17 +00:00
|
|
|
}
|
|
|
|
|
2020-04-11 16:37:24 +00:00
|
|
|
/// Return whether the specified custom module has a `disabled` option set to true.
|
|
|
|
/// If it doesn't exist, `None` is returned.
|
|
|
|
pub fn is_custom_module_disabled_in_config(&self, name: &str) -> Option<bool> {
|
|
|
|
let config = self.config.get_custom_module_config(name)?;
|
|
|
|
let disabled = Some(config).and_then(|table| table.as_table()?.get("disabled")?.as_bool());
|
|
|
|
|
|
|
|
Some(disabled == Some(true))
|
|
|
|
}
|
|
|
|
|
2019-05-12 17:37:23 +00:00
|
|
|
// returns a new ScanDir struct with reference to current dir_files of context
|
|
|
|
// see ScanDir for methods
|
2019-09-14 14:23:53 +00:00
|
|
|
pub fn try_begin_scan(&'a self) -> Option<ScanDir<'a>> {
|
|
|
|
Some(ScanDir {
|
2020-02-03 21:57:48 +00:00
|
|
|
dir_contents: self.dir_contents().ok()?,
|
2019-05-12 17:37:23 +00:00
|
|
|
files: &[],
|
|
|
|
folders: &[],
|
|
|
|
extensions: &[],
|
2019-09-14 14:23:53 +00:00
|
|
|
})
|
2019-05-12 17:37:23 +00:00
|
|
|
}
|
2019-09-09 23:14:38 +00:00
|
|
|
|
|
|
|
/// Will lazily get repo root and branch when a module requests it.
|
|
|
|
pub fn get_repo(&self) -> Result<&Repo, std::io::Error> {
|
2019-09-14 14:23:53 +00:00
|
|
|
self.repo
|
2019-09-09 23:14:38 +00:00
|
|
|
.get_or_try_init(|| -> Result<Repo, std::io::Error> {
|
2020-06-24 21:13:47 +00:00
|
|
|
let repository = if env::var("GIT_DIR").is_ok() {
|
|
|
|
Repository::open_from_env().ok()
|
|
|
|
} else {
|
|
|
|
Repository::discover(&self.current_dir).ok()
|
|
|
|
};
|
2019-09-09 23:14:38 +00:00
|
|
|
let branch = repository
|
|
|
|
.as_ref()
|
|
|
|
.and_then(|repo| get_current_branch(repo));
|
|
|
|
let root = repository
|
|
|
|
.as_ref()
|
|
|
|
.and_then(|repo| repo.workdir().map(Path::to_path_buf));
|
|
|
|
let state = repository.as_ref().map(|repo| repo.state());
|
2020-11-23 21:07:16 +00:00
|
|
|
let remote = repository.as_ref().and_then(|repo| get_remote_branch(repo));
|
2019-09-09 23:14:38 +00:00
|
|
|
Ok(Repo {
|
|
|
|
branch,
|
|
|
|
root,
|
|
|
|
state,
|
2020-11-23 21:07:16 +00:00
|
|
|
remote,
|
2019-09-09 23:14:38 +00:00
|
|
|
})
|
2019-09-14 14:23:53 +00:00
|
|
|
})
|
|
|
|
}
|
2019-09-09 23:14:38 +00:00
|
|
|
|
2020-02-03 21:57:48 +00:00
|
|
|
pub fn dir_contents(&self) -> Result<&DirContents, std::io::Error> {
|
|
|
|
self.dir_contents.get_or_try_init(|| {
|
|
|
|
let timeout = Duration::from_millis(self.config.get_root_config().scan_timeout);
|
|
|
|
DirContents::from_path_with_timeout(&self.current_dir, timeout)
|
|
|
|
})
|
|
|
|
}
|
2020-02-03 22:01:50 +00:00
|
|
|
|
|
|
|
fn get_shell() -> Shell {
|
2020-08-07 19:13:12 +00:00
|
|
|
let shell = env::var("STARSHIP_SHELL").unwrap_or_default();
|
2020-02-03 22:01:50 +00:00
|
|
|
match shell.as_str() {
|
|
|
|
"bash" => Shell::Bash,
|
|
|
|
"fish" => Shell::Fish,
|
|
|
|
"ion" => Shell::Ion,
|
|
|
|
"powershell" => Shell::PowerShell,
|
|
|
|
"zsh" => Shell::Zsh,
|
|
|
|
_ => Shell::Unknown,
|
|
|
|
}
|
|
|
|
}
|
2020-08-11 16:44:25 +00:00
|
|
|
|
|
|
|
pub fn get_cmd_duration(&self) -> Option<u128> {
|
|
|
|
self.properties.get("cmd_duration")?.parse::<u128>().ok()
|
|
|
|
}
|
2020-02-03 21:57:48 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[derive(Debug)]
|
|
|
|
pub struct DirContents {
|
|
|
|
// HashSet of all files, no folders, relative to the base directory given at construction.
|
|
|
|
files: HashSet<PathBuf>,
|
|
|
|
// HashSet of all file names, e.g. the last section without any folders, as strings.
|
|
|
|
file_names: HashSet<String>,
|
|
|
|
// HashSet of all folders, relative to the base directory given at construction.
|
|
|
|
folders: HashSet<PathBuf>,
|
|
|
|
// HashSet of all extensions found, without dots, e.g. "js" instead of ".js".
|
|
|
|
extensions: HashSet<String>,
|
|
|
|
}
|
|
|
|
|
|
|
|
impl DirContents {
|
2020-02-06 04:03:26 +00:00
|
|
|
#[cfg(test)]
|
2020-02-03 22:13:59 +00:00
|
|
|
fn from_path(base: &PathBuf) -> Result<Self, std::io::Error> {
|
|
|
|
Self::from_path_with_timeout(base, Duration::from_secs(30))
|
|
|
|
}
|
|
|
|
|
2020-02-03 21:57:48 +00:00
|
|
|
fn from_path_with_timeout(base: &PathBuf, timeout: Duration) -> Result<Self, std::io::Error> {
|
2020-10-17 19:36:21 +00:00
|
|
|
let start = Instant::now();
|
2020-02-03 21:57:48 +00:00
|
|
|
|
|
|
|
let mut folders: HashSet<PathBuf> = HashSet::new();
|
|
|
|
let mut files: HashSet<PathBuf> = HashSet::new();
|
|
|
|
let mut file_names: HashSet<String> = HashSet::new();
|
|
|
|
let mut extensions: HashSet<String> = HashSet::new();
|
|
|
|
|
|
|
|
fs::read_dir(base)?
|
2020-07-28 20:26:00 +00:00
|
|
|
.enumerate()
|
|
|
|
.take_while(|(n, _)| {
|
2020-10-17 19:36:21 +00:00
|
|
|
n & 0xFF != 0 // only check timeout once every 2^8 entries
|
|
|
|
|| start.elapsed() < timeout
|
2020-07-28 20:26:00 +00:00
|
|
|
})
|
|
|
|
.filter_map(|(_, entry)| entry.ok())
|
2020-02-03 21:57:48 +00:00
|
|
|
.for_each(|entry| {
|
|
|
|
let path = PathBuf::from(entry.path().strip_prefix(base).unwrap());
|
|
|
|
if entry.path().is_dir() {
|
|
|
|
folders.insert(path);
|
|
|
|
} else {
|
|
|
|
if !path.to_string_lossy().starts_with('.') {
|
|
|
|
path.extension()
|
|
|
|
.map(|ext| extensions.insert(ext.to_string_lossy().to_string()));
|
|
|
|
}
|
|
|
|
if let Some(file_name) = path.file_name() {
|
|
|
|
file_names.insert(file_name.to_string_lossy().to_string());
|
|
|
|
}
|
|
|
|
files.insert(path);
|
|
|
|
}
|
|
|
|
});
|
|
|
|
|
|
|
|
log::trace!(
|
|
|
|
"Building HashSets of directory files, folders and extensions took {:?}",
|
2020-10-17 19:36:21 +00:00
|
|
|
start.elapsed()
|
2020-02-03 21:57:48 +00:00
|
|
|
);
|
|
|
|
|
|
|
|
Ok(DirContents {
|
|
|
|
folders,
|
|
|
|
files,
|
|
|
|
file_names,
|
|
|
|
extensions,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn files(&self) -> impl Iterator<Item = &PathBuf> {
|
|
|
|
self.files.iter()
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn has_file(&self, path: &str) -> bool {
|
|
|
|
self.files.contains(Path::new(path))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn has_file_name(&self, name: &str) -> bool {
|
|
|
|
self.file_names.contains(name)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn has_any_file_name(&self, names: &[&str]) -> bool {
|
|
|
|
names.iter().any(|name| self.has_file_name(name))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn has_folder(&self, path: &str) -> bool {
|
|
|
|
self.folders.contains(Path::new(path))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn has_any_folder(&self, paths: &[&str]) -> bool {
|
|
|
|
paths.iter().any(|path| self.has_folder(path))
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn has_extension(&self, ext: &str) -> bool {
|
|
|
|
self.extensions.contains(ext)
|
|
|
|
}
|
|
|
|
|
|
|
|
pub fn has_any_extension(&self, exts: &[&str]) -> bool {
|
|
|
|
exts.iter().any(|ext| self.has_extension(ext))
|
2019-09-09 23:14:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
pub struct Repo {
|
|
|
|
/// If `current_dir` is a git repository or is contained within one,
|
|
|
|
/// this is the current branch name of that repo.
|
|
|
|
pub branch: Option<String>,
|
|
|
|
|
|
|
|
/// If `current_dir` is a git repository or is contained within one,
|
|
|
|
/// this is the path to the root of that repo.
|
|
|
|
pub root: Option<PathBuf>,
|
|
|
|
|
|
|
|
/// State
|
|
|
|
pub state: Option<RepositoryState>,
|
2020-11-23 21:07:16 +00:00
|
|
|
|
|
|
|
/// Remote branch name
|
|
|
|
pub remote: Option<String>,
|
2019-05-12 17:37:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// A struct of Criteria which will be used to verify current PathBuf is
|
|
|
|
// of X language, criteria can be set via the builder pattern
|
|
|
|
pub struct ScanDir<'a> {
|
2020-02-03 21:57:48 +00:00
|
|
|
dir_contents: &'a DirContents,
|
2019-05-12 17:37:23 +00:00
|
|
|
files: &'a [&'a str],
|
|
|
|
folders: &'a [&'a str],
|
|
|
|
extensions: &'a [&'a str],
|
|
|
|
}
|
|
|
|
|
|
|
|
impl<'a> ScanDir<'a> {
|
2019-07-31 23:48:51 +00:00
|
|
|
pub const fn set_files(mut self, files: &'a [&'a str]) -> Self {
|
2019-05-12 17:37:23 +00:00
|
|
|
self.files = files;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2019-07-31 23:48:51 +00:00
|
|
|
pub const fn set_extensions(mut self, extensions: &'a [&'a str]) -> Self {
|
2019-05-12 17:37:23 +00:00
|
|
|
self.extensions = extensions;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2019-07-31 23:48:51 +00:00
|
|
|
pub const fn set_folders(mut self, folders: &'a [&'a str]) -> Self {
|
2019-05-12 17:37:23 +00:00
|
|
|
self.folders = folders;
|
|
|
|
self
|
|
|
|
}
|
|
|
|
|
2020-08-07 19:13:12 +00:00
|
|
|
/// based on the current PathBuf check to see
|
2019-05-12 17:37:23 +00:00
|
|
|
/// if any of this criteria match or exist and returning a boolean
|
2019-09-14 14:23:53 +00:00
|
|
|
pub fn is_match(&self) -> bool {
|
2020-02-03 21:57:48 +00:00
|
|
|
self.dir_contents.has_any_extension(self.extensions)
|
|
|
|
|| self.dir_contents.has_any_folder(self.folders)
|
|
|
|
|| self.dir_contents.has_any_file_name(self.files)
|
2019-05-12 17:37:23 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2019-05-22 16:04:51 +00:00
|
|
|
fn get_current_branch(repository: &Repository) -> Option<String> {
|
2020-05-06 09:19:53 +00:00
|
|
|
let head = match repository.head() {
|
|
|
|
Ok(reference) => reference,
|
|
|
|
Err(e) => {
|
|
|
|
return if e.code() == UnbornBranch {
|
|
|
|
// HEAD should only be an unborn branch if the repository is fresh,
|
2020-07-05 17:22:14 +00:00
|
|
|
// in that case read directly from `.git/HEAD`
|
|
|
|
let mut head_path = repository.path().to_path_buf();
|
|
|
|
head_path.push("HEAD");
|
|
|
|
|
|
|
|
// get first line, then last path segment
|
|
|
|
fs::read_to_string(&head_path)
|
|
|
|
.ok()?
|
|
|
|
.lines()
|
|
|
|
.next()?
|
|
|
|
.trim()
|
|
|
|
.split('/')
|
|
|
|
.last()
|
|
|
|
.map(|r| r.to_owned())
|
2020-05-06 09:19:53 +00:00
|
|
|
} else {
|
|
|
|
None
|
|
|
|
};
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2019-05-22 16:04:51 +00:00
|
|
|
let shorthand = head.shorthand();
|
|
|
|
|
2019-07-31 23:48:51 +00:00
|
|
|
shorthand.map(std::string::ToString::to_string)
|
2019-05-22 16:04:51 +00:00
|
|
|
}
|
|
|
|
|
2020-11-23 21:07:16 +00:00
|
|
|
fn get_remote_branch(repository: &Repository) -> Option<String> {
|
|
|
|
if let Ok(head) = repository.head() {
|
|
|
|
if let Some(local_branch_ref) = head.name() {
|
|
|
|
let remote_ref = match repository.branch_upstream_name(local_branch_ref) {
|
|
|
|
Ok(remote_ref) => remote_ref.as_str()?.to_owned(),
|
|
|
|
Err(_) => return None,
|
|
|
|
};
|
|
|
|
|
|
|
|
let remote = remote_ref.split('/').last().map(|r| r.to_owned())?;
|
|
|
|
return Some(remote);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
None
|
|
|
|
}
|
|
|
|
|
2020-02-06 16:10:59 +00:00
|
|
|
#[derive(Debug, Clone, Copy, PartialEq)]
|
2020-01-26 22:37:18 +00:00
|
|
|
pub enum Shell {
|
|
|
|
Bash,
|
|
|
|
Fish,
|
|
|
|
Ion,
|
|
|
|
PowerShell,
|
|
|
|
Zsh,
|
|
|
|
Unknown,
|
|
|
|
}
|
|
|
|
|
2019-05-12 17:37:23 +00:00
|
|
|
#[cfg(test)]
|
|
|
|
mod tests {
|
|
|
|
use super::*;
|
|
|
|
|
2020-02-03 21:57:48 +00:00
|
|
|
fn testdir(paths: &[&str]) -> Result<tempfile::TempDir, std::io::Error> {
|
|
|
|
let dir = tempfile::tempdir()?;
|
|
|
|
for path in paths {
|
|
|
|
let p = dir.path().join(Path::new(path));
|
|
|
|
if let Some(parent) = p.parent() {
|
|
|
|
fs::create_dir_all(parent)?;
|
|
|
|
}
|
|
|
|
fs::File::create(p)?.sync_all()?;
|
|
|
|
}
|
|
|
|
Ok(dir)
|
2019-05-12 17:37:23 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
#[test]
|
2020-02-03 21:57:48 +00:00
|
|
|
fn test_scan_dir() -> Result<(), Box<dyn std::error::Error>> {
|
|
|
|
let empty = testdir(&[])?;
|
|
|
|
let empty_dc = DirContents::from_path(&PathBuf::from(empty.path()))?;
|
|
|
|
|
|
|
|
assert_eq!(
|
|
|
|
ScanDir {
|
|
|
|
dir_contents: &empty_dc,
|
|
|
|
files: &["package.json"],
|
|
|
|
extensions: &["js"],
|
|
|
|
folders: &["node_modules"],
|
|
|
|
}
|
|
|
|
.is_match(),
|
|
|
|
false
|
|
|
|
);
|
2020-03-15 17:12:25 +00:00
|
|
|
empty.close()?;
|
2020-02-03 21:57:48 +00:00
|
|
|
|
|
|
|
let rust = testdir(&["README.md", "Cargo.toml", "src/main.rs"])?;
|
|
|
|
let rust_dc = DirContents::from_path(&PathBuf::from(rust.path()))?;
|
|
|
|
assert_eq!(
|
|
|
|
ScanDir {
|
|
|
|
dir_contents: &rust_dc,
|
|
|
|
files: &["package.json"],
|
|
|
|
extensions: &["js"],
|
|
|
|
folders: &["node_modules"],
|
|
|
|
}
|
|
|
|
.is_match(),
|
|
|
|
false
|
|
|
|
);
|
2020-03-15 17:12:25 +00:00
|
|
|
rust.close()?;
|
2020-02-03 21:57:48 +00:00
|
|
|
|
|
|
|
let java = testdir(&["README.md", "src/com/test/Main.java", "pom.xml"])?;
|
|
|
|
let java_dc = DirContents::from_path(&PathBuf::from(java.path()))?;
|
|
|
|
assert_eq!(
|
|
|
|
ScanDir {
|
|
|
|
dir_contents: &java_dc,
|
|
|
|
files: &["package.json"],
|
|
|
|
extensions: &["js"],
|
|
|
|
folders: &["node_modules"],
|
|
|
|
}
|
|
|
|
.is_match(),
|
|
|
|
false
|
|
|
|
);
|
2020-03-15 17:12:25 +00:00
|
|
|
java.close()?;
|
2020-02-03 21:57:48 +00:00
|
|
|
|
|
|
|
let node = testdir(&["README.md", "node_modules/lodash/main.js", "package.json"])?;
|
|
|
|
let node_dc = DirContents::from_path(&PathBuf::from(node.path()))?;
|
|
|
|
assert_eq!(
|
|
|
|
ScanDir {
|
|
|
|
dir_contents: &node_dc,
|
|
|
|
files: &["package.json"],
|
|
|
|
extensions: &["js"],
|
|
|
|
folders: &["node_modules"],
|
|
|
|
}
|
|
|
|
.is_match(),
|
|
|
|
true
|
|
|
|
);
|
2020-03-15 17:12:25 +00:00
|
|
|
node.close()?;
|
2019-05-22 16:04:51 +00:00
|
|
|
|
2020-02-03 21:57:48 +00:00
|
|
|
Ok(())
|
2019-05-12 17:37:23 +00:00
|
|
|
}
|
2019-04-19 20:57:14 +00:00
|
|
|
}
|