Compare commits

..

5 Commits

Author SHA1 Message Date
grimhilt
a35c7b20d8 cleaning warnings 2023-08-27 22:57:05 +02:00
grimhilt
863e3bd68a find deletion on pull 2023-08-27 22:50:51 +02:00
grimhilt
57647e5df2 implement -all option to add 2023-08-25 18:52:29 +02:00
grimhilt
41c4796555 push copy file 2023-08-25 16:34:16 +02:00
grimhilt
aced8b992a create IntoPathBuf 2023-08-25 16:25:29 +02:00
20 changed files with 499 additions and 136 deletions

12
Cargo.lock generated
View File

@ -777,18 +777,18 @@ checksum = "767eb9f07d4a5ebcb39bbf2d452058a93c011373abf6832e24194a1c3f004794"
[[package]]
name = "proc-macro2"
version = "1.0.59"
version = "1.0.66"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6aeca18b86b413c660b781aa319e4e2648a3e6f9eadc9b47e9038e6fe9f3451b"
checksum = "18fb31db3f9bddb2ea821cde30a9f70117e3f119938b5ee630b7403aa6e2ead9"
dependencies = [
"unicode-ident",
]
[[package]]
name = "quote"
version = "1.0.28"
version = "1.0.32"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "1b9ab9c7eadfd8df19006f1cf1a4aed13540ed5cbc047010ece5826e10825488"
checksum = "50f3b39ccfb720540debaa0164757101c08ecb8d326b15358ce76a62c7e85965"
dependencies = [
"proc-macro2",
]
@ -1069,9 +1069,9 @@ checksum = "8ea5119cdb4c55b55d432abb513a0429384878c15dde60cc77b1c99de1a95a6a"
[[package]]
name = "syn"
version = "2.0.18"
version = "2.0.28"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "32d41677bcbe24c20c52e7c70b0d8db04134c5d1066bf98662e2871ad200ea3e"
checksum = "04361975b3f5e348b2189d8dc55bc942f278b2d482a6a0365de5bdd62d351567"
dependencies = [
"proc-macro2",
"quote",

View File

@ -1,20 +1,30 @@
use std::io::Write;
use std::fs::OpenOptions;
use std::path::{Path, PathBuf};
use clap::Values;
use crate::store::index;
use crate::store::{self, object::Object};
use crate::utils;
use crate::utils::nextsyncignore::{self, ignore_file};
use crate::utils::path::{normalize_relative, repo_root};
use crate::utils::path::{normalize_relative, repo_root, path_buf_to_string};
use super::status::get_all_objs;
pub struct AddArgs<'a> {
pub files: Values<'a>,
pub files: Option<Values<'a>>,
pub force: bool,
pub all: bool,
}
// todo match deleted files
// todo match weird reg expression
// todo -A == .
pub fn add(args: AddArgs) {
// write all modification in the index
if args.all {
write_all();
return;
}
let mut index_file = store::index::open();
let mut added_files: Vec<String> = vec![];
@ -24,7 +34,7 @@ pub fn add(args: AddArgs) {
};
let mut ignored_f = vec![];
let file_vec: Vec<&str> = args.files.collect();
let file_vec: Vec<&str> = args.files.unwrap().collect();
for file in file_vec {
let f = match normalize_relative(file) {
@ -93,5 +103,15 @@ fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
}
}
}
}
fn write_all() {
let objs = get_all_objs();
let mut index_file = OpenOptions::new()
.write(true)
.create(true)
.open(index::path()).expect("Cannot open index file");
for obj in objs {
let _ = writeln!(index_file, "{}", path_buf_to_string(obj.path.clone()));
}
}

View File

@ -70,7 +70,7 @@ pub fn clone(args: CloneArgs) {
let depth = &args.depth.clone().unwrap_or(DEPTH.to_string());
let (folders, files) = enumerate_remote(
|a| req(&api_props, depth, a),
&should_skip,
None,
EnumerateOptions {
depth: Some(depth.to_owned()),
relative_s: None
@ -91,10 +91,10 @@ pub fn clone(args: CloneArgs) {
}
}
let downloader = Downloader::new()
Downloader::new()
.set_api_props(api_props.clone())
.set_files(files)
//.should_log()
.should_log()
.download(ref_path.clone(), Some(&save_blob));
}
@ -107,10 +107,6 @@ fn save_blob(obj: ObjProps) {
}
}
fn should_skip(_: ObjProps) -> bool {
return false;
}
fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> {
ReqProps::new()
.set_request(relative_s, &api_props)

View File

@ -35,7 +35,7 @@ pub fn pull() {
}
}
let downloader = Downloader::new()
Downloader::new()
.set_api_props(get_api_props())
.set_files(files)
.should_log()

View File

@ -12,6 +12,7 @@ pub mod rm_dir;
pub mod deleted;
pub mod modified;
pub mod moved;
pub mod copied;
pub fn push() {
// todo err when pushing new folder
@ -27,6 +28,12 @@ pub fn push() {
let staged_objs = status::get_all_staged();
// exit if there is nothing to push
if staged_objs.len() == 0 {
println!("Everything up-to-date");
std::process::exit(0);
}
// path that certify that all its children can be push whithout hesistation
// (e.g. if remote dir has no changes since last sync all children
// can be pushed without verification)

View File

@ -0,0 +1,83 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::r#copy::Copy;
use crate::services::req_props::ReqProps;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
use crate::utils::path::path_buf_to_string;
pub struct Copied {
pub obj: LocalObj,
}
impl PushChange for Copied {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Conflict,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = Copy::new()
.set_url(
&path_buf_to_string(obj.path_from.clone().unwrap()),
obj.path.to_str().unwrap())
.send_with_err();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error copying file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error copying file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// create destination blob
if let Err(err) = Blob::new(obj.path.clone()).create(&lastmodified.to_string(), false) {
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
}
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

View File

@ -9,6 +9,7 @@ use crate::commands::push::rm_dir::RmDir;
use crate::commands::push::deleted::Deleted;
use crate::commands::push::modified::Modified;
use crate::commands::push::moved::Moved;
use crate::commands::push::copied::Copied;
use crate::store::object::blob::Blob;
#[derive(Debug)]
@ -93,8 +94,8 @@ impl PushFactory {
State::Modified => Box::new(Modified { obj }),
State::Deleted => Box::new(Deleted { obj }),
State::Moved => Box::new(Moved { obj }),
State::Copied => Box::new(Copied { obj }),
State::Default => todo!(),
_ => todo!(),
}
}

View File

@ -23,12 +23,12 @@ pub fn remote_diff() {
pub fn get_diff(path: PathBuf) -> (Vec<ObjProps>, Vec<ObjProps>) {
let depth = "2"; // todo
let depth = "2"; // todo opti
let api_props = get_api_props();
enumerate_remote(
|a| req(&api_props, depth, a),
&should_skip,
Some(&should_skip),
EnumerateOptions {
depth: Some(depth.to_owned()),
relative_s: Some(path.to_str().unwrap().to_owned())

View File

@ -1,6 +1,6 @@
use std::fs::File;
use std::path::PathBuf;
use std::io::{self, Lines, BufReader};
use std::io::{Lines, BufReader};
use std::collections::HashMap;
use crypto::digest::Digest;
use crypto::sha1::Sha1;
@ -32,6 +32,25 @@ pub enum State {
// todo: relative path, filename
// todo: not catch added empty folder
pub fn status() {
let mut all_hashes = get_all_objs_hashes();
let staged_objs = get_staged(&mut all_hashes);
let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
x.1.clone()
}).collect();
print_status(staged_objs, objs);
}
pub fn get_all_objs() -> Vec<LocalObj> {
let all_hashes = get_all_objs_hashes();
all_hashes.iter().map(|x| {
x.1.clone()
}).collect()
}
fn get_all_objs_hashes() -> HashMap<String, LocalObj> {
let (mut new_objs_hashes, mut del_objs_hashes, objs_modified) = get_diff();
let move_copy_hashes = get_move_copy_objs(&mut new_objs_hashes, &mut del_objs_hashes);
@ -58,14 +77,7 @@ pub fn status() {
all_hashes.extend(new_objs_hashes);
all_hashes.extend(modified_objs_hashes);
let staged_objs = get_staged(&mut all_hashes);
let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
x.1.clone()
}).collect();
print_status(staged_objs, objs);
all_hashes
}
fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> bool {
@ -145,7 +157,7 @@ pub fn get_all_staged() -> Vec<LocalObj> {
let mut staged_objs = vec![];
for line in lines {
let obj = Blob::new(PathBuf::from(line)).get_local_obj();
let obj = Blob::new(line).get_local_obj();
if obj.state != State::Default {
staged_objs.push(obj);
}

View File

@ -81,6 +81,7 @@ fn main() {
.arg(
Arg::with_name("files")
.required(true)
.conflicts_with("all")
.multiple(true)
.takes_value(true)
.value_name("FILE")
@ -92,6 +93,12 @@ fn main() {
.long("force")
.help("Allow adding otherwise ignored files."),
)
.arg(
Arg::with_name("all")
.short("A")
.long("all")
.help("This adds, modifies, and removes index entries to match the working tree"),
)
.about("Add changes to the index")
)
.subcommand(
@ -138,8 +145,15 @@ fn main() {
} else if let Some(matches) = matches.subcommand_matches("add") {
if let Some(files) = matches.values_of("files") {
commands::add::add(AddArgs {
files,
files: Some(files),
force: matches.is_present("force"),
all: matches.is_present("all"),
});
} else {
commands::add::add(AddArgs {
files: None,
force: matches.is_present("force"),
all: matches.is_present("all"),
});
}
} else if let Some(_) = matches.subcommand_matches("reset") {

View File

@ -5,3 +5,6 @@ pub mod req_props;
pub mod upload_file;
pub mod delete_path;
pub mod downloader;
pub mod r#move;
pub mod r#copy;
//pub mod bulk_upload;

61
src/services/copy.rs Normal file
View File

@ -0,0 +1,61 @@
use reqwest::{Method, Response, Error, header::HeaderValue};
use crate::services::api::{ApiBuilder, ApiError};
use crate::clone::get_url_props;
use crate::commands::config;
pub struct Copy {
api_builder: ApiBuilder,
}
impl Copy {
pub fn new() -> Self {
Copy {
api_builder: ApiBuilder::new(),
}
}
pub fn set_url(&mut self, url: &str, destination: &str) -> &mut Copy {
self.api_builder.build_request(Method::from_bytes(b"COPY").unwrap(), url);
let remote = match config::get("remote") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
let mut url = String::from(host);
url.push_str("/remote.php/dav/files/");
url.push_str(username.unwrap());
url.push_str(&root);
url.push_str("/");
if destination != "/" {
url.push_str(destination);
}
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
self
}
pub async fn send(&mut self) -> Result<Response, Error> {
self.api_builder.send().await
}
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Copy {
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
if overwrite { "T" } else { "F" }
}).unwrap());
self
}
pub fn send_with_err(&mut self) -> Result<(), ApiError> {
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
self.send().await
}).map_err(ApiError::RequestError)?;
if res.status().is_success() {
Ok(())
} else {
Err(ApiError::IncorrectRequest(res))
}
}
}

View File

@ -42,7 +42,7 @@ impl Downloader {
self
}
pub fn add_file(&mut self, file: ObjProps) -> &mut Downloader {
pub fn _add_file(&mut self, file: ObjProps) -> &mut Downloader {
self.files.push(file);
self
}

View File

@ -41,7 +41,7 @@ impl Move {
self.api_builder.send().await
}
pub fn overwrite(&mut self, overwrite: bool) -> &mut Move {
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Move {
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
if overwrite { "T" } else { "F" }
}).unwrap());

View File

@ -1,12 +1,17 @@
use std::io;
use std::path::PathBuf;
use std::fs::File;
use std::fs::OpenOptions;
use crate::utils::{read, path};
pub fn open() -> File {
pub fn path() -> PathBuf {
let mut path = path::nextsync();
path.push("index");
path
}
pub fn open() -> File {
let path = path();
OpenOptions::new()
.read(true)
.write(true)

View File

@ -58,9 +58,10 @@ impl Object {
match read::read_lines(&self.obj_p) {
Ok(mut reader) => {
if let Some(Ok(line)) = reader.next() {
let mut data = line.rsplit(' ');
if data.clone().count() >= 2 {
self.ts = Some(data.next().unwrap().parse::<i64>().unwrap())
let mut data = line.rsplit(' ').collect::<Vec<_>>();
data.reverse();
if data.clone().len() >= 2 {
self.ts = Some(data[1].parse::<i64>().unwrap())
}
}
},
@ -222,32 +223,3 @@ fn create_obj(name: String, content: &str) -> io::Result<()> {
Ok(())
}
// get the last time a blob synced with remote
pub fn get_timestamp(path_s: String) -> Option<i64> {
let mut obj_p = path::objects();
let (dir, res) = hash_obj(&path_s);
obj_p.push(dir);
obj_p.push(res);
match read::read_lines(obj_p) {
Ok(mut reader) => {
match reader.next() {
Some(Ok(line)) => {
let mut data = line.rsplit(' ');
if data.clone().count() >= 2 {
Some(data.nth_back(1).unwrap().parse::<i64>().unwrap())
} else {
None
}
},
_ => None,
}
},
Err(err) => {
eprintln!("error reading object: {}", err);
None
},
}
}

View File

@ -7,6 +7,7 @@ use std::time::SystemTime;
use crypto::sha1::Sha1;
use crypto::digest::Digest;
use crate::commands::status::{LocalObj, State};
use crate::utils::into::IntoPathBuf;
use crate::utils::path::path_buf_to_string;
use crate::utils::{path, read};
use crate::store::head;
@ -24,7 +25,8 @@ pub struct Blob {
}
impl Blob {
pub fn new(r_path: PathBuf) -> Blob {
pub fn new<S>(r_path: S) -> Blob where S: IntoPathBuf {
let r_path = r_path.into();
let mut hasher = Sha1::new();
hasher.input_str(r_path.to_str().unwrap());
let hash = hasher.result_str();
@ -111,7 +113,7 @@ impl Blob {
refs_p.push(dir);
if !refs_p.exists() {
fs::create_dir_all(refs_p.clone());
let _ = fs::create_dir_all(refs_p.clone());
}
refs_p.push(res);
refs_p
@ -250,6 +252,7 @@ impl Blob {
}
self.data.reverse();
// remove \n of last element
if let Some(last) = self.data.last_mut() {
if last.ends_with("\n") {
last.pop();
@ -340,9 +343,7 @@ impl Blob {
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
}
pub fn get_local_obj(&mut self) -> LocalObj {
let mut path_from = None;
let state = {
pub fn status(&mut self, path_from: &mut Option<PathBuf>) -> State {
let has_obj_ref = self.obj_p.clone().exists();
let blob_exists = self.a_path.clone().exists();
@ -351,13 +352,13 @@ impl Blob {
} else if !has_obj_ref && blob_exists {
let identical_blobs = self.get_all_identical_blobs();
if identical_blobs.len() != 0 {
let identical_blob = Blob::new(identical_blobs[0].clone().into())
let identical_blob = Blob::new(identical_blobs[0].clone())
.get_local_obj();
if identical_blob.state == State::Deleted {
path_from = Some(identical_blob.path);
*path_from = Some(identical_blob.path);
State::Moved
} else if identical_blob.state == State::Default {
path_from = Some(identical_blob.path);
*path_from = Some(identical_blob.path);
State::Copied
} else {
State::New
@ -372,7 +373,11 @@ impl Blob {
} else {
State::Default
}
};
}
pub fn get_local_obj(&mut self) -> LocalObj {
let mut path_from = None;
let state = self.status(&mut path_from);
LocalObj {
otype: String::from("blob"),

View File

@ -4,3 +4,4 @@ pub mod nextsyncignore;
pub mod api;
pub mod time;
pub mod remote;
pub mod into;

18
src/utils/into.rs Normal file
View File

@ -0,0 +1,18 @@
use std::path::PathBuf;
pub trait IntoPathBuf {
fn into(self) -> PathBuf;
}
impl IntoPathBuf for PathBuf {
fn into(self) -> PathBuf {
self
}
}
impl IntoPathBuf for String {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}

View File

@ -1,4 +1,8 @@
use crate::services::{req_props::ObjProps, api::ApiError};
use std::path::PathBuf;
use crate::{services::{req_props::ObjProps, api::ApiError}, store::object::{blob::Blob, Object}, commands::status::State};
use std::collections::HashMap;
use super::{path::{path_buf_to_string, self}, read};
pub struct EnumerateOptions {
pub depth: Option<String>,
@ -7,13 +11,18 @@ pub struct EnumerateOptions {
pub fn enumerate_remote(
req: impl Fn(&str) -> Result<Vec<ObjProps>, ApiError>,
should_skip: &dyn Fn(ObjProps) -> bool,
should_skip: Option<&dyn Fn(ObjProps) -> bool>,
options: EnumerateOptions
) -> (Vec<ObjProps>, Vec<ObjProps>) {
let mut folders: Vec<ObjProps> = vec![ObjProps::new()];
let mut all_folders: Vec<ObjProps> = vec![];
let mut deleted: Vec<PathBuf> = vec![];
let mut files: Vec<ObjProps> = vec![];
let mut objs_hashmap: HashMap<String, Vec<String>> = HashMap::new();
objs_hashmap.insert(
options.relative_s.clone().unwrap_or(String::new()),
Vec::new());
while folders.len() > 0 {
let folder = folders.pop().unwrap();
@ -44,14 +53,83 @@ pub fn enumerate_remote(
};
// separate folders and files in response
let mut iter = objs.iter();
// first element is not used as it is the fetched folder
let default_depth = calc_depth(iter.next().unwrap());
let d = options.depth.clone().unwrap_or("0".to_owned()).parse::<u16>().unwrap();
// first element is not used as it is the fetched folder
if let Some(should_skip_fct) = should_skip.clone() {
iter_with_skip_fct(
objs,
d,
&mut files,
&mut folders,
should_skip_fct,
&mut objs_hashmap,
&mut all_folders);
// check for deletion only when folder are not empty
// as the folder's content may not have been fetched yet
for (key, children) in objs_hashmap.clone() {
if children.len() != 0 {
get_deleted(key.clone(), children, &mut deleted);
objs_hashmap.remove(&key);
}
}
} else {
iter_without_skip_fct(
objs,
d,
&mut files,
&mut folders,
&mut all_folders);
}
}
// go through all folders not checked for deletion before
// as they were empty
if let Some(_) = should_skip.clone() {
for (key, children) in objs_hashmap.clone() {
get_deleted(key.clone(), children, &mut deleted);
objs_hashmap.remove(&key);
}
}
dbg!(deleted);
dbg!(objs_hashmap);
(all_folders, files)
}
fn calc_depth(obj: &ObjProps) -> u16 {
calc_depth_string(obj.relative_s.clone().unwrap_or(String::new()))
}
fn calc_depth_string(s: String) -> u16 {
s.split("/").count() as u16
}
fn iter_with_skip_fct(
objs: Vec<ObjProps>,
d: u16,
files: &mut Vec<ObjProps>,
folders: &mut Vec<ObjProps>,
should_skip: &dyn Fn(ObjProps) -> bool,
objs_hashmap: &mut HashMap<String, Vec<String>>,
all_folders: &mut Vec<ObjProps>) {
let mut iter = objs.iter();
let default_depth = calc_depth(iter.next().unwrap());
let mut skip_depth = 0;
for object in iter {
if object.is_dir() {
let current_depth = calc_depth(object);
if object.is_dir() {
// add folder to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
let r_ps = path_buf_to_string(r_path);
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
values.push(object.relative_s.clone().unwrap());
}
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
@ -61,6 +139,12 @@ pub fn enumerate_remote(
if should_skip {
skip_depth = current_depth;
} else {
// if this folder is not skipped then we initialised its vector
let r_ps_dir = object.relative_s.clone().unwrap();
let mut r_ps_key = r_ps_dir.chars();
r_ps_key.next_back();
objs_hashmap.insert(r_ps_key.as_str().to_owned(), Vec::new());
skip_depth = 0;
all_folders.push(object.clone());
}
@ -70,7 +154,14 @@ pub fn enumerate_remote(
folders.push(object.clone());
}
} else {
let current_depth = calc_depth(object);
// add file to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
let r_ps = path_buf_to_string(r_path);
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
values.push(object.relative_s.clone().unwrap());
}
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
@ -84,10 +175,84 @@ pub fn enumerate_remote(
}
}
(all_folders, files)
fn iter_without_skip_fct(
objs: Vec<ObjProps>,
d: u16,
files: &mut Vec<ObjProps>,
folders: &mut Vec<ObjProps>,
all_folders: &mut Vec<ObjProps>) {
let mut iter = objs.iter();
let default_depth = calc_depth(iter.next().unwrap());
for object in iter {
if object.is_dir() {
// should get content of this folder if it is not already in this reponse
let current_depth = calc_depth(object);
if current_depth - default_depth == d {
folders.push(object.clone());
}
all_folders.push(object.clone());
} else {
files.push(object.clone());
}
}
fn calc_depth(obj: &ObjProps) -> u16 {
obj.relative_s.clone().unwrap_or(String::new()).split("/").count() as u16
}
fn get_non_new_local_element(iter: &mut dyn Iterator<Item = &PathBuf>) -> Option<PathBuf> {
let mut el = iter.next();
while !el.is_none() && {
if el.unwrap().is_dir() {
// ignore newly created directory (not sync)
!Object::new(el.unwrap().clone().to_str().unwrap()).exists()
} else {
// ignore newly created file (not sync)
Blob::new(el.unwrap().clone()).status(&mut None) == State::New
}
} {
el = iter.next();
}
match el {
Some(e) => Some(e.to_owned()),
None => None
}
}
fn get_deleted(source: String, children: Vec<String>, deleted: &mut Vec<PathBuf>) {
let root = path::repo_root();
let abs_p = root.join(PathBuf::from(source.clone()));
let folder_read = read::read_folder(abs_p.clone());
if let Ok(mut local_objs) = folder_read {
// set path to be ref one not abs
local_objs.iter_mut().for_each(|e| {
*e = e.strip_prefix(path_buf_to_string(root.clone())).unwrap().to_path_buf();
});
let mut iter = local_objs.iter();
let mut local_element = get_non_new_local_element(&mut iter);
while let Some(local) = local_element {
if let None = children.iter().position(|child| {
let child_compared = {
// remove traling / of directory
if child.ends_with("/") {
let t = child.clone();
let mut ts = t.chars();
ts.next_back();
ts.as_str().to_owned()
} else {
child.clone()
}
};
child_compared == path_buf_to_string(local.clone())
}) {
deleted.push(local.clone());
}
local_element = get_non_new_local_element(&mut iter);
}
}
}