find deletion on pull

This commit is contained in:
grimhilt 2023-08-27 22:50:51 +02:00
parent 57647e5df2
commit 863e3bd68a
7 changed files with 260 additions and 81 deletions

View File

@ -70,7 +70,7 @@ pub fn clone(args: CloneArgs) {
let depth = &args.depth.clone().unwrap_or(DEPTH.to_string());
let (folders, files) = enumerate_remote(
|a| req(&api_props, depth, a),
&should_skip,
None,
EnumerateOptions {
depth: Some(depth.to_owned()),
relative_s: None
@ -107,10 +107,6 @@ fn save_blob(obj: ObjProps) {
}
}
fn should_skip(_: ObjProps) -> bool {
return false;
}
fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> {
ReqProps::new()
.set_request(relative_s, &api_props)

View File

@ -12,6 +12,8 @@ pub fn remote_diff() {
.unwrap()
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
let (folders, files) = get_diff(relative_p);
dbg!(files);
return;
for folder in folders {
println!("should pull {}", folder.clone().relative_s.unwrap());
@ -28,7 +30,7 @@ pub fn get_diff(path: PathBuf) -> (Vec<ObjProps>, Vec<ObjProps>) {
enumerate_remote(
|a| req(&api_props, depth, a),
&should_skip,
Some(&should_skip),
EnumerateOptions {
depth: Some(depth.to_owned()),
relative_s: Some(path.to_str().unwrap().to_owned())

View File

@ -32,6 +32,25 @@ pub enum State {
// todo: relative path, filename
// todo: not catch added empty folder
pub fn status() {
let mut all_hashes = get_all_objs_hashes();
let staged_objs = get_staged(&mut all_hashes);
let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
x.1.clone()
}).collect();
print_status(staged_objs, objs);
}
pub fn get_all_objs() -> Vec<LocalObj> {
let all_hashes = get_all_objs_hashes();
all_hashes.iter().map(|x| {
x.1.clone()
}).collect()
}
fn get_all_objs_hashes() -> HashMap<String, LocalObj> {
let (mut new_objs_hashes, mut del_objs_hashes, objs_modified) = get_diff();
let move_copy_hashes = get_move_copy_objs(&mut new_objs_hashes, &mut del_objs_hashes);
@ -58,14 +77,7 @@ pub fn status() {
all_hashes.extend(new_objs_hashes);
all_hashes.extend(modified_objs_hashes);
let staged_objs = get_staged(&mut all_hashes);
let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
x.1.clone()
}).collect();
print_status(staged_objs, objs);
all_hashes
}
fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> bool {

View File

@ -11,7 +11,7 @@ pub fn path() -> PathBuf {
}
pub fn open() -> File {
let mut path = path();
let path = path();
OpenOptions::new()
.read(true)
.write(true)

View File

@ -58,9 +58,10 @@ impl Object {
match read::read_lines(&self.obj_p) {
Ok(mut reader) => {
if let Some(Ok(line)) = reader.next() {
let mut data = line.rsplit(' ');
if data.clone().count() >= 2 {
self.ts = Some(data.next().unwrap().parse::<i64>().unwrap())
let mut data = line.rsplit(' ').collect::<Vec<_>>();
data.reverse();
if data.clone().len() >= 2 {
self.ts = Some(data[1].parse::<i64>().unwrap())
}
}
},

View File

@ -252,6 +252,7 @@ impl Blob {
}
self.data.reverse();
// remove \n of last element
if let Some(last) = self.data.last_mut() {
if last.ends_with("\n") {
last.pop();
@ -342,39 +343,41 @@ impl Blob {
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
}
pub fn get_local_obj(&mut self) -> LocalObj {
let mut path_from = None;
let state = {
let has_obj_ref = self.obj_p.clone().exists();
let blob_exists = self.a_path.clone().exists();
pub fn status(&mut self, path_from: &mut Option<PathBuf>) -> State {
let has_obj_ref = self.obj_p.clone().exists();
let blob_exists = self.a_path.clone().exists();
if has_obj_ref && !blob_exists {
State::Deleted
} else if !has_obj_ref && blob_exists {
let identical_blobs = self.get_all_identical_blobs();
if identical_blobs.len() != 0 {
let identical_blob = Blob::new(identical_blobs[0].clone())
.get_local_obj();
if identical_blob.state == State::Deleted {
path_from = Some(identical_blob.path);
State::Moved
} else if identical_blob.state == State::Default {
path_from = Some(identical_blob.path);
State::Copied
} else {
State::New
}
if has_obj_ref && !blob_exists {
State::Deleted
} else if !has_obj_ref && blob_exists {
let identical_blobs = self.get_all_identical_blobs();
if identical_blobs.len() != 0 {
let identical_blob = Blob::new(identical_blobs[0].clone())
.get_local_obj();
if identical_blob.state == State::Deleted {
*path_from = Some(identical_blob.path);
State::Moved
} else if identical_blob.state == State::Default {
*path_from = Some(identical_blob.path);
State::Copied
} else {
State::New
}
} else if !has_obj_ref && !blob_exists {
State::Default
} else if self.has_change() {
State::Modified
} else {
State::Default
State::New
}
};
} else if !has_obj_ref && !blob_exists {
State::Default
} else if self.has_change() {
State::Modified
} else {
State::Default
}
}
pub fn get_local_obj(&mut self) -> LocalObj {
let mut path_from = None;
let state = self.status(&mut path_from);
LocalObj {
otype: String::from("blob"),

View File

@ -1,4 +1,8 @@
use crate::services::{req_props::ObjProps, api::ApiError};
use std::path::PathBuf;
use crate::{services::{req_props::{ObjProps, ReqProps}, api::ApiError}, store::object::{blob::Blob, Object}, commands::status::State};
use std::collections::HashMap;
use super::{path::{path_buf_to_string, self}, read};
pub struct EnumerateOptions {
pub depth: Option<String>,
@ -7,13 +11,18 @@ pub struct EnumerateOptions {
pub fn enumerate_remote(
req: impl Fn(&str) -> Result<Vec<ObjProps>, ApiError>,
should_skip: &dyn Fn(ObjProps) -> bool,
should_skip: Option<&dyn Fn(ObjProps) -> bool>,
options: EnumerateOptions
) -> (Vec<ObjProps>, Vec<ObjProps>) {
let mut folders: Vec<ObjProps> = vec![ObjProps::new()];
let mut all_folders: Vec<ObjProps> = vec![];
let mut deleted: Vec<PathBuf> = vec![];
let mut files: Vec<ObjProps> = vec![];
let mut objs_hashmap: HashMap<String, Vec<String>> = HashMap::new();
objs_hashmap.insert(
options.relative_s.clone().unwrap_or(String::new()),
Vec::new());
while folders.len() > 0 {
let folder = folders.pop().unwrap();
@ -44,50 +53,206 @@ pub fn enumerate_remote(
};
// separate folders and files in response
let mut iter = objs.iter();
// first element is not used as it is the fetched folder
let default_depth = calc_depth(iter.next().unwrap());
let d = options.depth.clone().unwrap_or("0".to_owned()).parse::<u16>().unwrap();
let mut skip_depth = 0;
for object in iter {
if object.is_dir() {
let current_depth = calc_depth(object);
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
// first element is not used as it is the fetched folder
if let Some(should_skip_fct) = should_skip.clone() {
iter_with_skip_fct(
objs,
d,
&mut files,
&mut folders,
should_skip_fct,
&mut objs_hashmap,
&mut all_folders);
let should_skip = should_skip(object.clone());
if should_skip {
skip_depth = current_depth;
} else {
skip_depth = 0;
all_folders.push(object.clone());
}
// should get content of this folder if it is not already in this reponse
if current_depth - default_depth == d && !should_skip {
folders.push(object.clone());
}
} else {
let current_depth = calc_depth(object);
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
if !should_skip(object.clone()) {
skip_depth = 0;
files.push(object.clone());
// check for deletion only when folder are not empty
// as the folder's content may not have been fetched yet
for (key, children) in objs_hashmap.clone() {
if children.len() != 0 {
get_deleted(key.clone(), children, &mut deleted);
objs_hashmap.remove(&key);
}
}
} else {
iter_without_skip_fct(
objs,
d,
&mut files,
&mut folders,
&mut all_folders);
}
}
// go through all folders not checked for deletion before
// as they were empty
if let Some(_) = should_skip.clone() {
for (key, children) in objs_hashmap.clone() {
get_deleted(key.clone(), children, &mut deleted);
objs_hashmap.remove(&key);
}
}
dbg!(deleted);
dbg!(objs_hashmap);
(all_folders, files)
}
fn calc_depth(obj: &ObjProps) -> u16 {
obj.relative_s.clone().unwrap_or(String::new()).split("/").count() as u16
calc_depth_string(obj.relative_s.clone().unwrap_or(String::new()))
}
fn calc_depth_string(s: String) -> u16 {
s.split("/").count() as u16
}
fn iter_with_skip_fct(
objs: Vec<ObjProps>,
d: u16,
files: &mut Vec<ObjProps>,
folders: &mut Vec<ObjProps>,
should_skip: &dyn Fn(ObjProps) -> bool,
objs_hashmap: &mut HashMap<String, Vec<String>>,
all_folders: &mut Vec<ObjProps>) {
let mut iter = objs.iter();
let default_depth = calc_depth(iter.next().unwrap());
let mut skip_depth = 0;
for object in iter {
let current_depth = calc_depth(object);
if object.is_dir() {
// add folder to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
let r_ps = path_buf_to_string(r_path);
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
values.push(object.relative_s.clone().unwrap());
}
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
let should_skip = should_skip(object.clone());
if should_skip {
skip_depth = current_depth;
} else {
// if this folder is not skipped then we initialised its vector
let r_ps_dir = object.relative_s.clone().unwrap();
let mut r_ps_key = r_ps_dir.chars();
r_ps_key.next_back();
objs_hashmap.insert(r_ps_key.as_str().to_owned(), Vec::new());
skip_depth = 0;
all_folders.push(object.clone());
}
// should get content of this folder if it is not already in this reponse
if current_depth - default_depth == d && !should_skip {
folders.push(object.clone());
}
} else {
// add file to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
let r_ps = path_buf_to_string(r_path);
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
values.push(object.relative_s.clone().unwrap());
}
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
if !should_skip(object.clone()) {
skip_depth = 0;
files.push(object.clone());
}
}
}
}
fn iter_without_skip_fct(
objs: Vec<ObjProps>,
d: u16,
files: &mut Vec<ObjProps>,
folders: &mut Vec<ObjProps>,
all_folders: &mut Vec<ObjProps>) {
let mut iter = objs.iter();
let default_depth = calc_depth(iter.next().unwrap());
for object in iter {
if object.is_dir() {
// should get content of this folder if it is not already in this reponse
let current_depth = calc_depth(object);
if current_depth - default_depth == d {
folders.push(object.clone());
}
all_folders.push(object.clone());
} else {
files.push(object.clone());
}
}
}
fn get_non_new_local_element(iter: &mut dyn Iterator<Item = &PathBuf>) -> Option<PathBuf> {
let mut el = iter.next();
while !el.is_none() && {
if el.unwrap().is_dir() {
// ignore newly created directory (not sync)
!Object::new(el.unwrap().clone().to_str().unwrap()).exists()
} else {
// ignore newly created file (not sync)
Blob::new(el.unwrap().clone()).status(&mut None) == State::New
}
} {
el = iter.next();
}
match el {
Some(e) => Some(e.to_owned()),
None => None
}
}
fn get_deleted(source: String, children: Vec<String>, deleted: &mut Vec<PathBuf>) {
let root = path::repo_root();
let abs_p = root.join(PathBuf::from(source.clone()));
let folder_read = read::read_folder(abs_p.clone());
if let Ok(mut local_objs) = folder_read {
// set path to be ref one not abs
local_objs.iter_mut().for_each(|e| {
*e = e.strip_prefix(path_buf_to_string(root.clone())).unwrap().to_path_buf();
});
let mut iter = local_objs.iter();
let mut local_element = get_non_new_local_element(&mut iter);
while let Some(local) = local_element {
if let None = children.iter().position(|child| {
let child_compared = {
// remove traling / of directory
if child.ends_with("/") {
let t = child.clone();
let mut ts = t.chars();
ts.next_back();
ts.as_str().to_owned()
} else {
child.clone()
}
};
child_compared == path_buf_to_string(local.clone())
}) {
deleted.push(local.clone());
}
local_element = get_non_new_local_element(&mut iter);
}
}
}