add ignore file to add command with force option

This commit is contained in:
grimhilt
2023-06-16 18:37:09 +02:00
parent ec6f1ebc96
commit 9956727cc9
6 changed files with 122 additions and 69 deletions

View File

@@ -1,42 +1,74 @@
use clap::Values;
use crate::utils;
use crate::utils::{self, nextsyncignore};
use crate::store;
use std::path::Path;
use std::path::{Path, PathBuf};
use std::io::Write;
use glob::glob;
pub fn add(files: Values<'_>) {
let root = match utils::path::nextsync_root() {
Some(path) => path,
None => {
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
std::process::exit(1);
}
};
pub struct AddArgs<'a> {
pub files: Values<'a>,
pub force: bool,
}
let mut index_path = root.clone();
index_path.push(".nextsync");
let mut index_file = store::index::open(index_path);
// todo avoid duplicate
// ./folder ./folder/file
pub fn add(args: AddArgs) {
let mut index_file = store::index::open();
let mut added_files: Vec<String> = vec![];
let file_vec: Vec<&str> = files.collect();
let file_vec: Vec<&str> = args.files.collect();
for file in file_vec {
let path = Path::new(file);
println!("{}", file);
match path.exists() {
true => {
match writeln!(index_file, "{}", path.display()) {
Ok(()) => (),
Err(err) => eprintln!("{}", err),
if path.is_dir() {
added_files.push(String::from(path.to_str().unwrap()));
add_folder_content(path.to_path_buf(), &mut added_files);
} else {
added_files.push(String::from(path.to_str().unwrap()));
}
},
false => {
match writeln!(index_file, "{}", path.display()) {
Ok(()) => (),
Err(err) => eprintln!("{}", err),
}
// todo can be regex
// todo deleted file/folder verif if exists
added_files.push(String::from(path.to_str().unwrap()));
}
}
}
// check ignored file if not forced
if !args.force {
let (ignored, ignored_files) = nextsyncignore::ignore_files(&mut added_files);
if ignored {
// todo multiple nextsyncignore
println!("The following paths are ignored by your .nextsyncignore file:");
for file in ignored_files {
println!("{}", file);
}
}
}
// save all added_files in index
for file in added_files {
match writeln!(index_file, "{}", file) {
Ok(()) => (),
Err(err) => eprintln!("{}", err),
}
}
drop(index_file);
}
fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
let mut folders: Vec<PathBuf> = vec![];
folders.push(path);
while let Some(folder) = folders.pop() {
if let Ok(entries) = utils::read::read_folder(folder.clone()) {
for entry in entries {
let path_entry = PathBuf::from(entry);
if path_entry.is_dir() {
folders.push(path_entry.clone());
}
added_files.push(String::from(path_entry.to_str().unwrap()));
}
}
}
}

View File

@@ -15,9 +15,7 @@ enum RemoveSide {
Right,
}
#[derive(PartialEq)]
#[derive(Debug)]
#[derive(Clone)]
#[derive(PartialEq, Debug, Clone)]
pub enum State {
Default,
New,
@@ -39,8 +37,7 @@ pub fn status() {
print_status(staged_objs, objs);
}
#[derive(Debug)]
#[derive(Clone)]
#[derive(Debug, Clone)]
pub struct Obj {
pub otype: String,
pub name: String,
@@ -324,10 +321,16 @@ mod tests {
let hash4 = hasher.result_str();
hasher.reset();
let mut hashes = HashSet::new();
hashes.insert(hash1.clone());
hashes.insert(hash2.clone());
hashes.insert(hash4.clone());
let mut hashes = HashMap::new();
let default_obj = Obj {
otype: String::from("tree"),
name: String::from("test"),
path: PathBuf::from(""),
state: State::Default,
};
hashes.insert(hash1.clone(), default_obj.clone());
hashes.insert(hash2.clone(), default_obj.clone());
hashes.insert(hash4.clone(), default_obj.clone());
let mut objects: Vec<String> = vec![];
objects.push(String::from("file1"));
@@ -336,7 +339,7 @@ mod tests {
remove_duplicate(&mut hashes, &mut objects, RemoveSide::Both);
dbg!(hashes.clone());
dbg!(objects.clone());
assert_eq!(hashes.contains(&hash4), true);
assert_eq!(hashes.contains_key(&hash4), true);
assert_eq!(hashes.len(), 1);
assert_eq!(objects, vec!["file3"]);
}