From 863e3bd68a80c41d31097c3d29d2001aba61b834 Mon Sep 17 00:00:00 2001 From: grimhilt Date: Sun, 27 Aug 2023 22:50:51 +0200 Subject: [PATCH] find deletion on pull --- src/commands/clone.rs | 6 +- src/commands/remote_diff.rs | 4 +- src/commands/status.rs | 28 +++-- src/store/index.rs | 2 +- src/store/object.rs | 7 +- src/store/object/blob.rs | 57 +++++---- src/utils/remote.rs | 237 ++++++++++++++++++++++++++++++------ 7 files changed, 260 insertions(+), 81 deletions(-) diff --git a/src/commands/clone.rs b/src/commands/clone.rs index db07d28..1bda8cf 100644 --- a/src/commands/clone.rs +++ b/src/commands/clone.rs @@ -70,7 +70,7 @@ pub fn clone(args: CloneArgs) { let depth = &args.depth.clone().unwrap_or(DEPTH.to_string()); let (folders, files) = enumerate_remote( |a| req(&api_props, depth, a), - &should_skip, + None, EnumerateOptions { depth: Some(depth.to_owned()), relative_s: None @@ -107,10 +107,6 @@ fn save_blob(obj: ObjProps) { } } -fn should_skip(_: ObjProps) -> bool { - return false; -} - fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result, ApiError> { ReqProps::new() .set_request(relative_s, &api_props) diff --git a/src/commands/remote_diff.rs b/src/commands/remote_diff.rs index bf40b73..aa8376d 100644 --- a/src/commands/remote_diff.rs +++ b/src/commands/remote_diff.rs @@ -12,6 +12,8 @@ pub fn remote_diff() { .unwrap() .strip_prefix(path::repo_root()).unwrap().to_path_buf(); let (folders, files) = get_diff(relative_p); + dbg!(files); + return; for folder in folders { println!("should pull {}", folder.clone().relative_s.unwrap()); @@ -28,7 +30,7 @@ pub fn get_diff(path: PathBuf) -> (Vec, Vec) { enumerate_remote( |a| req(&api_props, depth, a), - &should_skip, + Some(&should_skip), EnumerateOptions { depth: Some(depth.to_owned()), relative_s: Some(path.to_str().unwrap().to_owned()) diff --git a/src/commands/status.rs b/src/commands/status.rs index c6fe93b..0b64bf1 100644 --- a/src/commands/status.rs +++ b/src/commands/status.rs @@ -32,6 +32,25 @@ pub enum State { // todo: relative path, filename // todo: not catch added empty folder pub fn status() { + let mut all_hashes = get_all_objs_hashes(); + let staged_objs = get_staged(&mut all_hashes); + + let objs: Vec = all_hashes.iter().map(|x| { + x.1.clone() + }).collect(); + + + print_status(staged_objs, objs); +} + +pub fn get_all_objs() -> Vec { + let all_hashes = get_all_objs_hashes(); + all_hashes.iter().map(|x| { + x.1.clone() + }).collect() +} + +fn get_all_objs_hashes() -> HashMap { let (mut new_objs_hashes, mut del_objs_hashes, objs_modified) = get_diff(); let move_copy_hashes = get_move_copy_objs(&mut new_objs_hashes, &mut del_objs_hashes); @@ -58,14 +77,7 @@ pub fn status() { all_hashes.extend(new_objs_hashes); all_hashes.extend(modified_objs_hashes); - let staged_objs = get_staged(&mut all_hashes); - - let objs: Vec = all_hashes.iter().map(|x| { - x.1.clone() - }).collect(); - - - print_status(staged_objs, objs); + all_hashes } fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes: &mut HashMap, del_objs_h: &mut HashMap) -> bool { diff --git a/src/store/index.rs b/src/store/index.rs index 7ecca48..2af4d2b 100644 --- a/src/store/index.rs +++ b/src/store/index.rs @@ -11,7 +11,7 @@ pub fn path() -> PathBuf { } pub fn open() -> File { - let mut path = path(); + let path = path(); OpenOptions::new() .read(true) .write(true) diff --git a/src/store/object.rs b/src/store/object.rs index b49d338..4d2a422 100644 --- a/src/store/object.rs +++ b/src/store/object.rs @@ -58,9 +58,10 @@ impl Object { match read::read_lines(&self.obj_p) { Ok(mut reader) => { if let Some(Ok(line)) = reader.next() { - let mut data = line.rsplit(' '); - if data.clone().count() >= 2 { - self.ts = Some(data.next().unwrap().parse::().unwrap()) + let mut data = line.rsplit(' ').collect::>(); + data.reverse(); + if data.clone().len() >= 2 { + self.ts = Some(data[1].parse::().unwrap()) } } }, diff --git a/src/store/object/blob.rs b/src/store/object/blob.rs index 966ea93..523c587 100644 --- a/src/store/object/blob.rs +++ b/src/store/object/blob.rs @@ -252,6 +252,7 @@ impl Blob { } self.data.reverse(); + // remove \n of last element if let Some(last) = self.data.last_mut() { if last.ends_with("\n") { last.pop(); @@ -342,39 +343,41 @@ impl Blob { !self.has_same_size() || (self.is_newer() && !self.has_same_hash()) } - pub fn get_local_obj(&mut self) -> LocalObj { - let mut path_from = None; - let state = { - let has_obj_ref = self.obj_p.clone().exists(); - let blob_exists = self.a_path.clone().exists(); + pub fn status(&mut self, path_from: &mut Option) -> State { + let has_obj_ref = self.obj_p.clone().exists(); + let blob_exists = self.a_path.clone().exists(); - if has_obj_ref && !blob_exists { - State::Deleted - } else if !has_obj_ref && blob_exists { - let identical_blobs = self.get_all_identical_blobs(); - if identical_blobs.len() != 0 { - let identical_blob = Blob::new(identical_blobs[0].clone()) - .get_local_obj(); - if identical_blob.state == State::Deleted { - path_from = Some(identical_blob.path); - State::Moved - } else if identical_blob.state == State::Default { - path_from = Some(identical_blob.path); - State::Copied - } else { - State::New - } + if has_obj_ref && !blob_exists { + State::Deleted + } else if !has_obj_ref && blob_exists { + let identical_blobs = self.get_all_identical_blobs(); + if identical_blobs.len() != 0 { + let identical_blob = Blob::new(identical_blobs[0].clone()) + .get_local_obj(); + if identical_blob.state == State::Deleted { + *path_from = Some(identical_blob.path); + State::Moved + } else if identical_blob.state == State::Default { + *path_from = Some(identical_blob.path); + State::Copied } else { State::New } - } else if !has_obj_ref && !blob_exists { - State::Default - } else if self.has_change() { - State::Modified } else { - State::Default + State::New } - }; + } else if !has_obj_ref && !blob_exists { + State::Default + } else if self.has_change() { + State::Modified + } else { + State::Default + } + } + + pub fn get_local_obj(&mut self) -> LocalObj { + let mut path_from = None; + let state = self.status(&mut path_from); LocalObj { otype: String::from("blob"), diff --git a/src/utils/remote.rs b/src/utils/remote.rs index b88a2a8..3345c23 100644 --- a/src/utils/remote.rs +++ b/src/utils/remote.rs @@ -1,4 +1,8 @@ -use crate::services::{req_props::ObjProps, api::ApiError}; +use std::path::PathBuf; +use crate::{services::{req_props::{ObjProps, ReqProps}, api::ApiError}, store::object::{blob::Blob, Object}, commands::status::State}; +use std::collections::HashMap; + +use super::{path::{path_buf_to_string, self}, read}; pub struct EnumerateOptions { pub depth: Option, @@ -7,13 +11,18 @@ pub struct EnumerateOptions { pub fn enumerate_remote( req: impl Fn(&str) -> Result, ApiError>, - should_skip: &dyn Fn(ObjProps) -> bool, + should_skip: Option<&dyn Fn(ObjProps) -> bool>, options: EnumerateOptions ) -> (Vec, Vec) { let mut folders: Vec = vec![ObjProps::new()]; let mut all_folders: Vec = vec![]; + let mut deleted: Vec = vec![]; let mut files: Vec = vec![]; + let mut objs_hashmap: HashMap> = HashMap::new(); + objs_hashmap.insert( + options.relative_s.clone().unwrap_or(String::new()), + Vec::new()); while folders.len() > 0 { let folder = folders.pop().unwrap(); @@ -44,50 +53,206 @@ pub fn enumerate_remote( }; // separate folders and files in response - let mut iter = objs.iter(); - // first element is not used as it is the fetched folder - let default_depth = calc_depth(iter.next().unwrap()); let d = options.depth.clone().unwrap_or("0".to_owned()).parse::().unwrap(); - let mut skip_depth = 0; - for object in iter { - if object.is_dir() { - let current_depth = calc_depth(object); - // skip children of skiped folder - if skip_depth != 0 && skip_depth < current_depth { - continue; - } + // first element is not used as it is the fetched folder + if let Some(should_skip_fct) = should_skip.clone() { + iter_with_skip_fct( + objs, + d, + &mut files, + &mut folders, + should_skip_fct, + &mut objs_hashmap, + &mut all_folders); - let should_skip = should_skip(object.clone()); - if should_skip { - skip_depth = current_depth; - } else { - skip_depth = 0; - all_folders.push(object.clone()); - } - - // should get content of this folder if it is not already in this reponse - if current_depth - default_depth == d && !should_skip { - folders.push(object.clone()); - } - } else { - let current_depth = calc_depth(object); - // skip children of skiped folder - if skip_depth != 0 && skip_depth < current_depth { - continue; - } - - if !should_skip(object.clone()) { - skip_depth = 0; - files.push(object.clone()); + // check for deletion only when folder are not empty + // as the folder's content may not have been fetched yet + for (key, children) in objs_hashmap.clone() { + if children.len() != 0 { + get_deleted(key.clone(), children, &mut deleted); + objs_hashmap.remove(&key); } } + } else { + iter_without_skip_fct( + objs, + d, + &mut files, + &mut folders, + &mut all_folders); } } + // go through all folders not checked for deletion before + // as they were empty + if let Some(_) = should_skip.clone() { + for (key, children) in objs_hashmap.clone() { + get_deleted(key.clone(), children, &mut deleted); + objs_hashmap.remove(&key); + } + } + dbg!(deleted); + dbg!(objs_hashmap); (all_folders, files) } fn calc_depth(obj: &ObjProps) -> u16 { - obj.relative_s.clone().unwrap_or(String::new()).split("/").count() as u16 + calc_depth_string(obj.relative_s.clone().unwrap_or(String::new())) +} + +fn calc_depth_string(s: String) -> u16 { + s.split("/").count() as u16 +} + +fn iter_with_skip_fct( + objs: Vec, + d: u16, + files: &mut Vec, + folders: &mut Vec, + should_skip: &dyn Fn(ObjProps) -> bool, + objs_hashmap: &mut HashMap>, + all_folders: &mut Vec) { + + let mut iter = objs.iter(); + let default_depth = calc_depth(iter.next().unwrap()); + let mut skip_depth = 0; + + for object in iter { + let current_depth = calc_depth(object); + + if object.is_dir() { + + // add folder to parent folder only if exists + let mut r_path = PathBuf::from(object.relative_s.clone().unwrap()); + r_path.pop(); + let r_ps = path_buf_to_string(r_path); + if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) { + values.push(object.relative_s.clone().unwrap()); + } + + // skip children of skiped folder + if skip_depth != 0 && skip_depth < current_depth { + continue; + } + + let should_skip = should_skip(object.clone()); + if should_skip { + skip_depth = current_depth; + } else { + // if this folder is not skipped then we initialised its vector + let r_ps_dir = object.relative_s.clone().unwrap(); + let mut r_ps_key = r_ps_dir.chars(); + r_ps_key.next_back(); + objs_hashmap.insert(r_ps_key.as_str().to_owned(), Vec::new()); + + skip_depth = 0; + all_folders.push(object.clone()); + } + + // should get content of this folder if it is not already in this reponse + if current_depth - default_depth == d && !should_skip { + folders.push(object.clone()); + } + } else { + // add file to parent folder only if exists + let mut r_path = PathBuf::from(object.relative_s.clone().unwrap()); + r_path.pop(); + let r_ps = path_buf_to_string(r_path); + if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) { + values.push(object.relative_s.clone().unwrap()); + } + + // skip children of skiped folder + if skip_depth != 0 && skip_depth < current_depth { + continue; + } + + if !should_skip(object.clone()) { + skip_depth = 0; + files.push(object.clone()); + } + } + } +} + +fn iter_without_skip_fct( + objs: Vec, + d: u16, + files: &mut Vec, + folders: &mut Vec, + all_folders: &mut Vec) { + + let mut iter = objs.iter(); + let default_depth = calc_depth(iter.next().unwrap()); + + for object in iter { + if object.is_dir() { + // should get content of this folder if it is not already in this reponse + let current_depth = calc_depth(object); + if current_depth - default_depth == d { + folders.push(object.clone()); + } + all_folders.push(object.clone()); + } else { + files.push(object.clone()); + } + } + +} + +fn get_non_new_local_element(iter: &mut dyn Iterator) -> Option { + let mut el = iter.next(); + while !el.is_none() && { + if el.unwrap().is_dir() { + // ignore newly created directory (not sync) + !Object::new(el.unwrap().clone().to_str().unwrap()).exists() + } else { + // ignore newly created file (not sync) + Blob::new(el.unwrap().clone()).status(&mut None) == State::New + } + } { + el = iter.next(); + } + match el { + Some(e) => Some(e.to_owned()), + None => None + } +} + +fn get_deleted(source: String, children: Vec, deleted: &mut Vec) { + let root = path::repo_root(); + let abs_p = root.join(PathBuf::from(source.clone())); + + let folder_read = read::read_folder(abs_p.clone()); + if let Ok(mut local_objs) = folder_read { + // set path to be ref one not abs + local_objs.iter_mut().for_each(|e| { + *e = e.strip_prefix(path_buf_to_string(root.clone())).unwrap().to_path_buf(); + }); + + let mut iter = local_objs.iter(); + let mut local_element = get_non_new_local_element(&mut iter); + + while let Some(local) = local_element { + if let None = children.iter().position(|child| { + let child_compared = { + // remove traling / of directory + if child.ends_with("/") { + let t = child.clone(); + let mut ts = t.chars(); + ts.next_back(); + ts.as_str().to_owned() + } else { + child.clone() + } + }; + + child_compared == path_buf_to_string(local.clone()) + }) { + deleted.push(local.clone()); + } + local_element = get_non_new_local_element(&mut iter); + } + } }