Compare commits

...

12 Commits

Author SHA1 Message Date
grimhilt
498fada9ec push modification 2023-08-24 20:59:41 +02:00
grimhilt
f64d719b31 find modified, copied, moved file in staged 2023-08-23 12:52:45 +02:00
grimhilt
dcf137667b clean code 2023-08-11 22:09:34 +02:00
grimhilt
5b46b1e2f1 not cleaned status with moved and copied 2023-08-11 18:21:29 +02:00
grimhilt
4b12edbe5c add refs to blob 2023-08-11 18:21:00 +02:00
grimhilt
16dbd25168 add modified files in status 2023-08-04 19:17:21 +02:00
grimhilt
91a29480df fix blob creation and get changes 2023-08-04 19:17:04 +02:00
grimhilt
ce047eba12 prevent crash of downloader when not logging 2023-08-04 19:16:44 +02:00
grimhilt
94220be935 add timestamp and hash in blob 2023-08-04 16:01:24 +02:00
grimhilt
d5097727cb create impl for blob 2023-08-04 15:25:51 +02:00
grimhilt
cb43a46456 draft of pull 2023-07-28 13:22:55 +02:00
grimhilt
4c34df7cfe remote-diff only a directory 2023-07-28 13:22:13 +02:00
22 changed files with 724 additions and 169 deletions

View File

@ -2,8 +2,10 @@
## Blob object ## Blob object
``` ```
file_name timestamp size hash file_name timestamp1 size timestamp2 hash
``` ```
timestamp1: timestamp of file on server to know if the server has an update
timestamp2: timestamp of file locally to know when the file has changed on the system
## Tree object ## Tree object
``` ```

View File

@ -6,3 +6,4 @@ pub mod clone;
pub mod push; pub mod push;
pub mod config; pub mod config;
pub mod remote_diff; pub mod remote_diff;
pub mod pull;

View File

@ -13,6 +13,7 @@ pub struct AddArgs<'a> {
// todo match deleted files // todo match deleted files
// todo match weird reg expression // todo match weird reg expression
// todo -A == .
pub fn add(args: AddArgs) { pub fn add(args: AddArgs) {
let mut index_file = store::index::open(); let mut index_file = store::index::open();
let mut added_files: Vec<String> = vec![]; let mut added_files: Vec<String> = vec![];
@ -43,11 +44,9 @@ pub fn add(args: AddArgs) {
match path.exists() { match path.exists() {
true => { true => {
if path.is_dir() { if path.is_dir() {
added_files.push(f);
add_folder_content(path.to_path_buf(), &mut added_files); add_folder_content(path.to_path_buf(), &mut added_files);
} else {
added_files.push(String::from(path.to_str().unwrap()));
} }
added_files.push(f);
}, },
false => { false => {
if Object::new(path.to_str().unwrap()).exists() { if Object::new(path.to_str().unwrap()).exists() {
@ -79,6 +78,7 @@ pub fn add(args: AddArgs) {
} }
fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) { fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
// todo check for changes
let mut folders: Vec<PathBuf> = vec![]; let mut folders: Vec<PathBuf> = vec![];
folders.push(path); folders.push(path);

View File

@ -6,11 +6,12 @@ use clap::Values;
use regex::Regex; use regex::Regex;
use crate::services::downloader::Downloader; use crate::services::downloader::Downloader;
use crate::utils::api::ApiProps; use crate::utils::api::ApiProps;
use crate::utils::path::path_buf_to_string;
use crate::utils::remote::{enumerate_remote, EnumerateOptions}; use crate::utils::remote::{enumerate_remote, EnumerateOptions};
use crate::global::global::{DIR_PATH, set_dir_path}; use crate::global::global::{DIR_PATH, set_dir_path};
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::req_props::{ReqProps, ObjProps}; use crate::services::req_props::{ReqProps, ObjProps};
use crate::store::object::{tree, blob}; use crate::store::object::{tree, blob::Blob};
use crate::commands::config; use crate::commands::config;
use crate::commands::init; use crate::commands::init;
@ -46,7 +47,7 @@ pub fn clone(args: CloneArgs) {
let iter = Path::new(dist_path_str).iter(); let iter = Path::new(dist_path_str).iter();
let dest_dir = iter.last().unwrap(); let dest_dir = iter.last().unwrap();
let lp = std::env::current_dir().unwrap().join(dest_dir); let lp = std::env::current_dir().unwrap().join(dest_dir);
set_dir_path(lp.to_str().unwrap().to_string()); set_dir_path(path_buf_to_string(lp.clone()));
lp lp
}, },
}; };
@ -93,7 +94,7 @@ pub fn clone(args: CloneArgs) {
let downloader = Downloader::new() let downloader = Downloader::new()
.set_api_props(api_props.clone()) .set_api_props(api_props.clone())
.set_files(files) .set_files(files)
.should_log() //.should_log()
.download(ref_path.clone(), Some(&save_blob)); .download(ref_path.clone(), Some(&save_blob));
} }
@ -101,7 +102,7 @@ fn save_blob(obj: ObjProps) {
let relative_s = &obj.clone().relative_s.unwrap(); let relative_s = &obj.clone().relative_s.unwrap();
let relative_p = PathBuf::from(&relative_s); let relative_p = PathBuf::from(&relative_s);
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis(); let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
if let Err(err) = blob::add(relative_p, &lastmodified.to_string(), false) { if let Err(err) = Blob::new(relative_p).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err); eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
} }
} }

View File

@ -38,6 +38,13 @@ pub fn init() {
}; };
path.pop(); path.pop();
path.push("refs");
match builder.create(path.clone()) {
Ok(()) => (),
Err(_) => println!("Error: cannot create refs"),
};
path.pop();
path.push("HEAD"); path.push("HEAD");
match File::create(path.clone()) { match File::create(path.clone()) {
Ok(_) => (), Ok(_) => (),

56
src/commands/pull.rs Normal file
View File

@ -0,0 +1,56 @@
use std::path::PathBuf;
use std::fs::DirBuilder;
use crate::services::downloader::Downloader;
use crate::services::req_props::ObjProps;
use crate::store::object::blob::Blob;
use crate::store::object::tree;
use crate::utils::api::get_api_props;
use crate::utils::path;
use crate::commands::remote_diff::get_diff;
pub fn pull() {
let relative_p = path::current()
.unwrap()
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
let (folders, files) = get_diff(relative_p);
let ref_p = path::nextsync();
for folder in folders {
let p = ref_p.clone().join(PathBuf::from(folder.relative_s.unwrap()));
if !p.exists() {
// create folder
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
eprintln!("err: cannot create directory {} ({})", p.display(), err);
}
// add tree
let path_folder = p.strip_prefix(ref_p.clone()).unwrap();
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
if let Err(err) = tree::add(path_folder.to_path_buf(), &lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
}
}
}
let downloader = Downloader::new()
.set_api_props(get_api_props())
.set_files(files)
.should_log()
.download(ref_p.clone(), Some(&update_blob));
// todo look if need to download or update
}
fn update_blob(obj: ObjProps) {
// todo update blob
return;
let relative_s = &obj.clone().relative_s.unwrap();
let relative_p = PathBuf::from(&relative_s);
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
// todo update function
if let Err(err) = Blob::new(relative_p).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
}
}

View File

@ -1,12 +1,16 @@
use std::path::PathBuf; use std::path::PathBuf;
use crate::commands::{status, config}; use crate::commands::{status, config};
use crate::commands::push::push_factory::{PushFactory, PushState}; use crate::commands::push::push_factory::{PushFactory, PushState};
use crate::store::index;
use super::status::LocalObj;
pub mod push_factory; pub mod push_factory;
pub mod new; pub mod new;
pub mod new_dir; pub mod new_dir;
pub mod rm_dir; pub mod rm_dir;
pub mod deleted; pub mod deleted;
pub mod modified;
pub fn push() { pub fn push() {
// todo err when pushing new folder // todo err when pushing new folder
@ -16,7 +20,7 @@ pub fn push() {
None => { None => {
eprintln!("fatal: no remote set in configuration"); eprintln!("fatal: no remote set in configuration");
//std::process::exit(1); //std::process::exit(1);
String::from("") String::new()
} }
}; };
@ -52,20 +56,24 @@ pub fn push() {
match push_factory.can_push(&mut whitelist) { match push_factory.can_push(&mut whitelist) {
PushState::Valid => { PushState::Valid => {
match push_factory.push() { match push_factory.push() {
Ok(()) => (), Ok(()) => remove_obj_from_index(obj.clone()),
Err(err) => { Err(err) => {
eprintln!("err: pushing {}: {}", obj.name, err); eprintln!("err: pushing {}: {}", obj.name, err);
} }
} }
}, },
PushState::Done => (), PushState::Done => remove_obj_from_index(obj.clone()),
PushState::Conflict => { PushState::Conflict => {
// download file // download file
} }
_ => todo!(), PushState::Error => (),
} }
} }
} }
// read index }
// if dir upload dir
fn remove_obj_from_index(obj: LocalObj) {
if let Err(err) = index::rm_line(obj.path.to_str().unwrap()) {
eprintln!("err: removing {} from index: {}", obj.name, err);
}
} }

View File

@ -3,7 +3,7 @@ use std::io;
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::delete_path::DeletePath; use crate::services::delete_path::DeletePath;
use crate::store::index; use crate::store::index;
use crate::store::object::blob; use crate::store::object::blob::Blob;
use crate::commands::status::LocalObj; use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState}; use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
@ -42,7 +42,7 @@ impl PushChange for Deleted {
// update tree // update tree
// todo date // todo date
blob::rm(obj.path.clone())?; Blob::new(obj.path.clone()).rm()?;
// remove index // remove index
index::rm_line(obj.path.to_str().unwrap())?; index::rm_line(obj.path.to_str().unwrap())?;

View File

@ -0,0 +1,79 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::req_props::ReqProps;
use crate::services::upload_file::UploadFile;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
pub struct Modified {
pub obj: LocalObj,
}
impl PushChange for Modified {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Valid,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = UploadFile::new()
.set_url(obj.path.to_str().unwrap())
.set_file(obj.path.clone())
.send_with_err();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error pushing file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update blob
Blob::new(obj.path.clone()).update(&lastmodified.to_string())?;
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

View File

@ -3,8 +3,7 @@ use std::io;
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::req_props::ReqProps; use crate::services::req_props::ReqProps;
use crate::services::upload_file::UploadFile; use crate::services::upload_file::UploadFile;
use crate::store::index; use crate::store::object::blob::Blob;
use crate::store::object::blob;
use crate::commands::status::LocalObj; use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState}; use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
@ -67,11 +66,8 @@ impl PushChange for New {
let lastmodified = prop.lastmodified.unwrap().timestamp_millis(); let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update blob // create new blob
blob::add(obj.path.clone(), &lastmodified.to_string(), true)?; Blob::new(obj.path.clone()).create(&lastmodified.to_string(), true)?;
// remove index
index::rm_line(obj.path.to_str().unwrap())?;
Ok(()) Ok(())
} }

View File

@ -8,6 +8,7 @@ use crate::commands::push::new::New;
use crate::commands::push::new_dir::NewDir; use crate::commands::push::new_dir::NewDir;
use crate::commands::push::rm_dir::RmDir; use crate::commands::push::rm_dir::RmDir;
use crate::commands::push::deleted::Deleted; use crate::commands::push::deleted::Deleted;
use crate::commands::push::modified::Modified;
#[derive(Debug)] #[derive(Debug)]
pub enum PushState { pub enum PushState {
@ -54,6 +55,7 @@ pub trait PushChange {
if err.status() == 404 { if err.status() == 404 {
Ok(None) Ok(None)
} else { } else {
eprintln!("err: when requesting properties of {} ({})", obj.name, err.status());
Err(()) Err(())
} }
}, },
@ -84,20 +86,20 @@ impl PushFactory {
pub fn new(&self, obj: LocalObj) -> Box<dyn PushChange> { pub fn new(&self, obj: LocalObj) -> Box<dyn PushChange> {
match obj.state { match obj.state {
State::New => Box::new(New { obj }), State::New => Box::new(New { obj }),
State::Renamed => todo!(), State::Modified => Box::new(Modified { obj }),
State::Modified => todo!(),
State::Deleted => Box::new(Deleted { obj }), State::Deleted => Box::new(Deleted { obj }),
State::Default => todo!(), State::Default => todo!(),
_ => todo!(),
} }
} }
pub fn new_dir(&self, obj: LocalObj) -> Box<dyn PushChange> { pub fn new_dir(&self, obj: LocalObj) -> Box<dyn PushChange> {
match obj.state { match obj.state {
State::New => Box::new(NewDir { obj }), State::New => Box::new(NewDir { obj }),
State::Renamed => todo!(),
State::Modified => todo!(), State::Modified => todo!(),
State::Deleted => Box::new(RmDir { obj }), State::Deleted => Box::new(RmDir { obj }),
State::Default => todo!(), State::Default => todo!(),
_ => todo!(),
} }
} }
} }

View File

@ -1,52 +1,17 @@
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::req_props::{ReqProps, ObjProps}; use crate::services::req_props::{ReqProps, ObjProps};
use crate::store::object::{Object, self}; use crate::store::object::Object;
use crate::utils::api::{ApiProps, get_api_props}; use crate::utils::api::{ApiProps, get_api_props};
use crate::utils::path; use crate::utils::path;
use crate::utils::remote::{enumerate_remote, EnumerateOptions}; use crate::utils::remote::{enumerate_remote, EnumerateOptions};
use std::fs::canonicalize;
use std::path::PathBuf; use std::path::PathBuf;
pub struct RemoteDiffArgs { // todo deletion
pub path: Option<String>, pub fn remote_diff() {
} let relative_p = path::current()
.unwrap()
pub fn remote_diff(args: RemoteDiffArgs) { .strip_prefix(path::repo_root()).unwrap().to_path_buf();
let path = { let (folders, files) = get_diff(relative_p);
if let Some(path) = args.path {
let mut cur = path::current().unwrap();
cur.push(path);
let canonic = canonicalize(cur).ok().unwrap();
dbg!(&canonic);
dbg!(path::repo_root());
let ok = canonic.strip_prefix(path::repo_root());
dbg!(&ok);
// todo
PathBuf::from("/")
} else {
PathBuf::from("/")
}
};
let mut folders: Vec<ObjProps> = vec![ObjProps {
contentlength: None,
href: None,
lastmodified: None,
relative_s: Some(path.to_str().unwrap().to_owned()),
}];
let mut files: Vec<ObjProps> = vec![];
let depth = "2"; // todo
// todo origin
let api_props = get_api_props();
let (folders, files) = enumerate_remote(
|a| req(&api_props, depth, a),
&should_skip,
EnumerateOptions {
depth: Some(depth.to_owned()),
relative_s: Some(path.to_str().unwrap().to_owned())
});
for folder in folders { for folder in folders {
println!("should pull {}", folder.clone().relative_s.unwrap()); println!("should pull {}", folder.clone().relative_s.unwrap());
@ -54,7 +19,20 @@ pub fn remote_diff(args: RemoteDiffArgs) {
for file in files { for file in files {
println!("should pull {}", file.clone().relative_s.unwrap()); println!("should pull {}", file.clone().relative_s.unwrap());
} }
}
pub fn get_diff(path: PathBuf) -> (Vec<ObjProps>, Vec<ObjProps>) {
let depth = "2"; // todo
let api_props = get_api_props();
enumerate_remote(
|a| req(&api_props, depth, a),
&should_skip,
EnumerateOptions {
depth: Some(depth.to_owned()),
relative_s: Some(path.to_str().unwrap().to_owned())
})
} }
fn should_skip(obj: ObjProps) -> bool { fn should_skip(obj: ObjProps) -> bool {

View File

@ -5,8 +5,9 @@ use std::collections::HashMap;
use crypto::digest::Digest; use crypto::digest::Digest;
use crypto::sha1::Sha1; use crypto::sha1::Sha1;
use colored::Colorize; use colored::Colorize;
use crate::utils::path; use crate::utils::path::{self, path_buf_to_string};
use crate::store::head; use crate::store::head;
use crate::store::object::blob::Blob;
use crate::utils::read::{read_folder, read_lines}; use crate::utils::read::{read_folder, read_lines};
use crate::store::object::tree; use crate::store::object::tree;
use crate::store::index; use crate::store::index;
@ -22,47 +23,117 @@ enum RemoveSide {
pub enum State { pub enum State {
Default, Default,
New, New,
Renamed, Moved,
Copied,
Modified, Modified,
Deleted, Deleted,
} }
// todo: relative path, filename, get modified // todo: relative path, filename
// todo: not catch added empty folder // todo: not catch added empty folder
pub fn status() { pub fn status() {
let (mut new_objs_hashes, mut del_objs_hashes) = get_diff(); let (mut new_objs_hashes, mut del_objs_hashes, objs_modified) = get_diff();
// get copy, modified let move_copy_hashes = get_move_copy_objs(&mut new_objs_hashes, &mut del_objs_hashes);
let staged_objs = get_staged(&mut new_objs_hashes, &mut del_objs_hashes);
let mut objs: Vec<LocalObj> = del_objs_hashes.iter().map(|x| { let mut hasher = Sha1::new();
let mut modified_objs_hashes = HashMap::new();
for obj in objs_modified {
hasher.input_str(&obj);
let hash = hasher.result_str();
hasher.reset();
modified_objs_hashes.insert(hash, LocalObj {
// todo otype
otype: get_otype(PathBuf::from(obj.clone())),
name: obj.clone().to_string(),
path: PathBuf::from(obj),
path_from: None,
state: State::Modified
});
}
let mut all_hashes = HashMap::new();
all_hashes.extend(move_copy_hashes);
all_hashes.extend(del_objs_hashes);
all_hashes.extend(new_objs_hashes);
all_hashes.extend(modified_objs_hashes);
let staged_objs = get_staged(&mut all_hashes);
let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
x.1.clone() x.1.clone()
}).collect(); }).collect();
for (_, elt) in new_objs_hashes {
objs.push(elt.clone());
}
print_status(staged_objs, objs); print_status(staged_objs, objs);
} }
fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> bool {
// todo prevent copied or moved if file empty
let mut blob = Blob::new(obj.path.clone());
let mut flag = true;
let identical_blobs = blob.get_all_identical_blobs();
// try to find an identical blob among the deleted files (=moved)
for obj_s in identical_blobs.clone() {
if !flag { break; }
hasher.input_str(&obj_s);
let hash = hasher.result_str();
hasher.reset();
if del_objs_h.contains_key(&hash) {
let mut new_move = obj.clone();
let deleted = del_objs_h.get(&hash).unwrap().clone();
del_objs_h.remove(&hash);
new_move.path_from = Some(deleted.path);
new_move.state = State::Moved;
move_copy_hashes.insert(key.clone(), new_move.clone());
flag = false;
}
}
// if did not find anything before try to find a file with the same content (=copy)
if flag {
if let Some(rel_s) = identical_blobs.first() {
let root = path::repo_root();
let rel_p = PathBuf::from(rel_s.clone());
let abs_p = root.join(rel_p.clone());
if abs_p.exists() {
let mut new_copy = obj.clone();
new_copy.path_from = Some(rel_p);
new_copy.state = State::Copied;
move_copy_hashes.insert(key, new_copy.clone());
flag = false;
}
}
}
flag
}
fn get_move_copy_objs(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> HashMap<String, LocalObj> {
let mut hasher = Sha1::new();
let mut move_copy_hashes = HashMap::new();
new_objs_h.retain(|key, obj| {
should_retain(&mut hasher, key.to_owned(), obj.clone(), &mut move_copy_hashes, del_objs_h)
});
move_copy_hashes
}
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct LocalObj { pub struct LocalObj {
pub otype: String, pub otype: String,
pub name: String, pub name: String,
pub path: PathBuf, pub path: PathBuf,
pub path_from: Option<PathBuf>, // origin path when state is move or copy
pub state: State, pub state: State,
} }
pub fn get_all_staged() -> Vec<LocalObj> { pub fn get_all_staged() -> Vec<LocalObj> {
let (mut new_objs_hashes, mut del_objs_hashes) = get_diff();
// get copy, modified
let staged_objs = get_staged(&mut new_objs_hashes, &mut del_objs_hashes);
staged_objs.clone()
// todo opti getting staged and then finding differences ?
}
fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
let mut lines: Vec<String> = vec![]; let mut lines: Vec<String> = vec![];
if let Ok(entries) = index::read_line() { if let Ok(entries) = index::read_line() {
@ -71,6 +142,26 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
} }
} }
let mut staged_objs = vec![];
for line in lines {
let obj = Blob::new(PathBuf::from(line)).get_local_obj();
if obj.state != State::Default {
staged_objs.push(obj);
}
}
staged_objs
}
fn get_staged(hashes: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
let mut lines: Vec<String> = vec![];
if let Ok(entries) = index::read_line() {
for entry in entries {
lines.push(entry.unwrap());
}
}
let mut hasher = Sha1::new(); let mut hasher = Sha1::new();
let mut staged_objs: Vec<LocalObj> = vec![]; let mut staged_objs: Vec<LocalObj> = vec![];
@ -83,12 +174,9 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
hasher.reset(); hasher.reset();
// find it on the list of hashes // find it on the list of hashes
if new_objs_h.contains_key(&hash) { if hashes.contains_key(&hash) {
staged_objs.push(new_objs_h.get(&hash).unwrap().clone()); staged_objs.push(hashes.get(&hash).unwrap().clone());
new_objs_h.remove(&hash); hashes.remove(&hash);
} else if del_objs_h.contains_key(&hash) {
staged_objs.push(del_objs_h.get(&hash).unwrap().clone());
del_objs_h.remove(&hash);
}else { }else {
let mut t_path = ref_p.clone(); let mut t_path = ref_p.clone();
let relative_p = PathBuf::from(obj.clone()); let relative_p = PathBuf::from(obj.clone());
@ -97,6 +185,7 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
otype: get_otype(t_path.clone()), otype: get_otype(t_path.clone()),
name: obj.to_string(), name: obj.to_string(),
path: relative_p.clone(), path: relative_p.clone(),
path_from: None,
state: { state: {
if t_path.exists() { if t_path.exists() {
State::New State::New
@ -111,9 +200,10 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
staged_objs staged_objs
} }
fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) { fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>, Vec<String>) {
let mut hashes = HashMap::new(); let mut hashes = HashMap::new();
let mut objs: Vec<String> = vec![]; let mut objs: Vec<String> = vec![];
let mut objs_modified: Vec<String> = vec![];
let root = path::repo_root(); let root = path::repo_root();
@ -149,7 +239,9 @@ fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) {
let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both); let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
obj_to_analyse.append(&mut diff.clone()); obj_to_analyse.append(&mut diff.clone());
} else { } else {
// todo look for change if Blob::new(cur_path).has_change() {
objs_modified.push(cur_obj);
}
} }
} }
@ -173,11 +265,12 @@ fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) {
otype: get_otype(abs_p), otype: get_otype(abs_p),
name: obj.to_string(), name: obj.to_string(),
path: p, path: p,
path_from: None,
state: State::New state: State::New
}); });
} }
(new_objs_hashes, hashes) (new_objs_hashes, hashes, objs_modified)
} }
fn get_otype(p: PathBuf) -> String { fn get_otype(p: PathBuf) -> String {
@ -199,6 +292,7 @@ fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, Lo
otype: String::from(ftype), otype: String::from(ftype),
name: String::from(name), name: String::from(name),
path: p, path: p,
path_from: None,
state: State::Default, state: State::Default,
}); });
} }
@ -245,24 +339,29 @@ fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
fn print_object(obj: LocalObj) { fn print_object(obj: LocalObj) {
if obj.state == State::Deleted { if obj.state == State::Deleted {
println!(" {} {}", String::from("deleted:").red(), obj.name.red()); println!(" {} {}", String::from("deleted:").red(), obj.name.red());
} else if obj.state == State::Renamed {
println!(" {} {}", String::from("renamed:").red(), obj.name.red());
} else if obj.state == State::New { } else if obj.state == State::New {
println!(" {} {}", String::from("new:").red(), obj.name.red()); println!(" {} {}", String::from("new:").red(), obj.name.red());
} else if obj.state == State::Modified { } else if obj.state == State::Modified {
println!(" {} {}", String::from("modified:").red(), obj.name.red()); println!(" {} {}", String::from("modified:").red(), obj.name.red());
} else if obj.state == State::Moved {
println!(" {} {} => {}", String::from("moved:").red(), path_buf_to_string(obj.path_from.unwrap()).red(), path_buf_to_string(obj.path).red());
} else if obj.state == State::Copied {
println!(" {} {} => {}", String::from("copied:").red(), path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path).red());
} }
} }
fn print_staged_object(obj: LocalObj) { fn print_staged_object(obj: LocalObj) {
if obj.state == State::Deleted { if obj.state == State::Deleted {
println!(" {} {}", String::from("deleted:").green(), obj.name.green()); println!(" {} {}", String::from("deleted:").green(), obj.name.green());
} else if obj.state == State::Renamed {
println!(" {} {}", String::from("renamed:").green(), obj.name.green());
} else if obj.state == State::New { } else if obj.state == State::New {
println!(" {} {}", String::from("new:").green(), obj.name.green()); println!(" {} {}", String::from("new:").green(), obj.name.green());
} else if obj.state == State::Modified { } else if obj.state == State::Modified {
println!(" {} {}", String::from("modified:").green(), obj.name.green()); println!(" {} {}", String::from("modified:").green(), obj.name.green());
} else if obj.state == State::Moved {
println!(" {} {} => {}", String::from("moved:").green(), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
} else if obj.state == State::Copied {
println!(" {} {} => {}", String::from("copied:"), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
} }
} }
@ -299,11 +398,6 @@ fn is_nextsync_config(path: PathBuf) -> bool {
path.ends_with(".nextsync") path.ends_with(".nextsync")
} }
fn read_head(mut path: PathBuf) -> io::Result<io::Lines<io::BufReader<File>>> {
path.push("HEAD");
read_lines(path)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@ -328,6 +422,7 @@ mod tests {
otype: String::from("tree"), otype: String::from("tree"),
name: String::from("test"), name: String::from("test"),
path: PathBuf::from(""), path: PathBuf::from(""),
path_from: None,
state: State::Default, state: State::Default,
}; };
hashes.insert(hash1.clone(), default_obj.clone()); hashes.insert(hash1.clone(), default_obj.clone());

View File

@ -2,7 +2,6 @@ use clap::{App, Arg, SubCommand};
use textwrap::{fill, Options}; use textwrap::{fill, Options};
use crate::commands::add::AddArgs; use crate::commands::add::AddArgs;
use crate::commands::remote_diff::RemoteDiffArgs;
use crate::commands::clone::{self, CloneArgs}; use crate::commands::clone::{self, CloneArgs};
mod commands; mod commands;
@ -168,18 +167,15 @@ fn main() {
} }
} }
} else if let Some(matches) = matches.subcommand_matches("remote-diff") { } else if let Some(matches) = matches.subcommand_matches("remote-diff") {
commands::remote_diff::remote_diff(RemoteDiffArgs { if let Some(val) = matches.values_of("path") {
path: { global::global::set_dir_path(String::from(val.clone().next().unwrap()));
if let Some(mut path) = matches.values_of("path") {
match path.next() {
Some(p) => Some(String::from(p)),
None => None,
} }
} else { commands::remote_diff::remote_diff();
None } else if let Some(matches) = matches.subcommand_matches("pull") {
if let Some(val) = matches.values_of("path") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
} }
}, commands::pull::pull();
});
} else if let Some(_) = matches.subcommand_matches("test") { } else if let Some(_) = matches.subcommand_matches("test") {
} }

View File

@ -16,7 +16,7 @@ impl DownloadFiles {
pub fn new() -> Self { pub fn new() -> Self {
DownloadFiles { DownloadFiles {
api_builder: ApiBuilder::new(), api_builder: ApiBuilder::new(),
relative_ps: String::from(""), relative_ps: String::new(),
} }
} }
@ -74,7 +74,7 @@ impl DownloadFiles {
if res.status().is_success() { if res.status().is_success() {
let body = res.bytes().await.map_err(ApiError::EmptyError)?; let body = res.bytes().await.map_err(ApiError::EmptyError)?;
match Self::write_file(p, &body.to_vec()) { match Self::write_file(p, &body.to_vec()) {
Err(_) => Err(ApiError::Unexpected(String::from(""))), Err(_) => Err(ApiError::Unexpected(String::new())),
Ok(_) => Ok(()), Ok(_) => Ok(()),
} }
} else { } else {

View File

@ -123,7 +123,7 @@ impl Downloader {
// download // download
let res = { let res = {
if should_use_stream { if should_use_stream {
download.save_stream(ref_p.clone(), Some(|a| self.update_bytes_bar(a))) download.save_stream(ref_p.clone(), if self.should_log { Some(|a| self.update_bytes_bar(a)) } else { None })
} else { } else {
download.save(ref_p.clone()) download.save(ref_p.clone())
} }

View File

@ -195,7 +195,7 @@ impl ReqProps {
let mut values: Vec<ObjProps> = vec![]; let mut values: Vec<ObjProps> = vec![];
let mut should_get = false; let mut should_get = false;
let mut val: String = String::from(""); let mut val: String = String::new();
let mut content = ObjProps::new(); let mut content = ObjProps::new();
for event in parser { for event in parser {

View File

@ -30,7 +30,7 @@ impl Object {
if path == "" { if path == "" {
return Object { return Object {
path: PathBuf::from("/"), path: PathBuf::from("/"),
hash: String::from(""), hash: String::new(),
obj_p: head::path(), obj_p: head::path(),
ts: None, ts: None,
} }
@ -222,6 +222,7 @@ fn create_obj(name: String, content: &str) -> io::Result<()> {
Ok(()) Ok(())
} }
// get the last time a blob synced with remote
pub fn get_timestamp(path_s: String) -> Option<i64> { pub fn get_timestamp(path_s: String) -> Option<i64> {
let mut obj_p = path::objects(); let mut obj_p = path::objects();
@ -235,7 +236,7 @@ pub fn get_timestamp(path_s: String) -> Option<i64> {
Some(Ok(line)) => { Some(Ok(line)) => {
let mut data = line.rsplit(' '); let mut data = line.rsplit(' ');
if data.clone().count() >= 2 { if data.clone().count() >= 2 {
Some(data.next().unwrap().parse::<i64>().unwrap()) Some(data.nth_back(1).unwrap().parse::<i64>().unwrap())
} else { } else {
None None
} }

View File

@ -1,52 +1,367 @@
use std::io; use std::io::{self, Read};
use std::fs::{self, File};
use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf; use std::path::PathBuf;
use std::fs; use std::time::SystemTime;
use crate::utils::path; use crypto::sha1::Sha1;
use crypto::digest::Digest;
use crate::commands::status::{LocalObj, State};
use crate::utils::path::path_buf_to_string;
use crate::utils::{path, read};
use crate::store::head; use crate::store::head;
use crate::store::object::{update_dates, parse_path, add_node, create_obj, rm_node}; use crate::store::object::{update_dates, add_node, rm_node};
pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> { pub struct Blob {
let (line, hash, name) = parse_path(path.clone(), true); r_path: PathBuf, // relative path
// add blob reference to parent a_path: PathBuf, // absolute path
if path.iter().count() == 1 { hash: String, // hash of relative path
head::add_line(line)?; file_hash: Option<String>,
} else { obj_p: PathBuf, // path of the object file
add_node(path.parent().unwrap(), &line)?; data: Vec<String>, // content of the blob
} }
let mut content = name.clone().to_owned(); impl Blob {
content.push_str(" "); pub fn new(r_path: PathBuf) -> Blob {
content.push_str(date); let mut hasher = Sha1::new();
hasher.input_str(r_path.to_str().unwrap());
let hash = hasher.result_str();
// create blob object let (dir, res) = hash.split_at(2);
create_obj(hash, &content)?;
let mut obj_p = path::objects();
obj_p.push(dir);
obj_p.push(res);
let root = path::repo_root();
let a_path = root.join(r_path.clone());
Blob {
r_path,
a_path,
hash,
file_hash: None,
obj_p,
data: vec![],
}
}
fn get_line_filename(&mut self) -> (String, String) {
let file_name = self.r_path.file_name().unwrap().to_str().unwrap().to_owned();
let mut line = String::from("blob");
line.push_str(" ");
line.push_str(&self.hash);
line.push_str(" ");
line.push_str(&file_name);
(line, file_name)
}
fn get_file_hash(&mut self) -> String {
if self.file_hash.is_none() {
let bytes = std::fs::read(self.a_path.clone()).unwrap();
let hash = md5::compute(&bytes);
self.file_hash = Some(format!("{:x}", hash))
}
self.file_hash.clone().unwrap()
}
fn create_blob_ref(&mut self, file_name: String, ts_remote: &str) -> io::Result<()> {
let metadata = fs::metadata(self.a_path.clone())?;
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let mut content = file_name.clone();
content.push_str(" ");
content.push_str(ts_remote);
content.push_str(" ");
content.push_str(&metadata.len().to_string());
content.push_str(" ");
content.push_str(&secs.to_string());
content.push_str(" ");
content.push_str(&self.get_file_hash());
let binding = self.obj_p.clone();
let child = binding.file_name();
self.obj_p.pop();
if !self.obj_p.clone().exists() {
fs::create_dir_all(self.obj_p.clone())?;
}
self.obj_p.push(child.unwrap().to_str().unwrap());
let mut file = OpenOptions::new()
.create_new(true)
.write(true)
.open(self.obj_p.clone())?;
writeln!(file, "{}", &content)?;
Ok(())
}
fn get_file_ref(&mut self) -> PathBuf {
let mut refs_p = path::refs();
let file_hash = self.get_file_hash().clone();
let (dir, res) = file_hash.split_at(2);
refs_p.push(dir);
if !refs_p.exists() {
fs::create_dir_all(refs_p.clone());
}
refs_p.push(res);
refs_p
}
// create a file in .nextsync/refs with the hash of this blob that
// redirect to the relative path
fn create_hash_ref(&mut self) -> io::Result<()> {
// todo check if the file has been modified for moved and copy
let refs_p = self.get_file_ref();
let mut file = OpenOptions::new()
.create(true)
.write(true)
.open(refs_p)?;
// todo deal with duplicate content
writeln!(file, "{}", self.r_path.clone().to_str().unwrap())?;
Ok(())
}
pub fn get_all_identical_blobs(&mut self) -> Vec<String> {
let refs_p = self.get_file_ref();
let mut blobs: Vec<String> = vec![];
if let Ok(lines) = read::read_lines(refs_p) {
for line in lines {
if let Ok(l) = line {
blobs.push(l.clone());
}
}
}
blobs
}
pub fn create(&mut self, ts_remote: &str, up_parent: bool) -> io::Result<()> {
let (line, file_name) = self.get_line_filename();
// add blob reference to parent
if self.r_path.iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(self.r_path.parent().unwrap(), &line)?;
}
if let Err(err) = self.create_blob_ref(file_name.clone(), ts_remote.clone()) {
eprintln!("err: saving blob ref of {}: {}", self.obj_p.clone().display(), err);
}
if let Err(err) = self.create_hash_ref() {
eprintln!("err: saving hash ref of {}: {}", self.obj_p.clone().display(), err);
}
// update date for all parent // update date for all parent
if up_parent { if up_parent {
update_dates(path, date)?; update_dates(self.r_path.clone(), ts_remote)?;
} }
Ok(()) Ok(())
} }
pub fn rm(path: PathBuf) -> io::Result<()> { pub fn rm(&mut self) -> io::Result<()> {
let (line, hash, _) = parse_path(path.clone(), true); let (line, _) = self.get_line_filename();
// remove blob reference to parent // remove blob reference to parent
if path.iter().count() == 1 { if self.r_path.iter().count() == 1 {
head::rm_line(&line)?; head::rm_line(&line)?;
} else { } else {
rm_node(path.parent().unwrap(), &line)?; rm_node(self.r_path.parent().unwrap(), &line)?;
} }
// remove blob object // remove blob object
let mut root = path::objects(); fs::remove_file(self.obj_p.clone())?;
let c = hash.clone();
let (dir, rest) = c.split_at(2);
root.push(dir);
root.push(rest);
fs::remove_file(root)?;
Ok(()) Ok(())
} }
pub fn update(&mut self, ts_remote: &str) -> io::Result<()> {
// remove old hash ref
let mut refs_p = path::refs();
let binding = self.saved_hash();
let (dir, res) = binding.split_at(2);
refs_p.push(dir);
refs_p.push(res);
if let Err(err) = fs::remove_file(refs_p) {
eprintln!("err: removing hash ref of {}: {}", self.r_path.clone().display(), err);
}
// creating new hash ref
if let Err(err) = self.create_hash_ref() {
eprintln!("err: saving hash ref of {}: {}", self.r_path.clone().display(), err);
}
// updating content of blob's ref
let metadata = fs::metadata(self.a_path.clone())?;
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let mut content = self.saved_filename();
content.push_str(" ");
content.push_str(ts_remote);
content.push_str(" ");
content.push_str(&metadata.len().to_string());
content.push_str(" ");
content.push_str(&secs.to_string());
content.push_str(" ");
content.push_str(&self.get_file_hash());
let mut file = OpenOptions::new()
.write(true)
.open(self.obj_p.clone())?;
writeln!(file, "{}", &content)?;
Ok(())
}
pub fn read_data(&mut self) {
if self.data.len() == 0 {
if let Ok(mut file) = File::open(self.obj_p.clone()) {
let mut buffer = String::new();
let _ = file.read_to_string(&mut buffer);
let data = buffer.rsplit(' ').collect::<Vec<_>>();
for e in data {
self.data.push(String::from(e));
}
self.data.reverse();
if let Some(last) = self.data.last_mut() {
if last.ends_with("\n") {
last.pop();
}
}
}
}
}
fn saved_filename(&mut self) -> String {
self.read_data();
if self.data.len() >= 1 {
self.data[0].clone()
} else {
String::new()
}
}
fn saved_remote_ts(&mut self) -> String {
self.read_data();
if self.data.len() >= 2 {
self.data[1].clone()
} else {
String::new()
}
}
fn saved_local_size(&mut self) -> String {
self.read_data();
if self.data.len() >= 3 {
self.data[2].clone()
} else {
String::new()
}
}
fn saved_local_ts(&mut self) -> u64 {
self.read_data();
if self.data.len() >= 4 {
self.data[3].parse::<u64>().unwrap()
} else {
0
}
}
fn saved_hash(&mut self) -> String {
self.read_data();
if self.data.len() >= 5 {
self.data[4].clone()
} else {
String::new()
}
}
fn has_same_size(&mut self) -> bool {
let metadata = match fs::metadata(self.a_path.clone()) {
Ok(m) => m,
Err(_) => return true,
};
if self.saved_local_size() == String::new() { return true; }
metadata.len().to_string() == self.saved_local_size()
}
fn is_newer(&mut self) -> bool {
let metadata = match fs::metadata(self.a_path.clone()) {
Ok(m) => m,
Err(_) => return true,
};
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if self.saved_local_ts() == 0 { return true; }
secs > self.saved_local_ts()
}
fn has_same_hash(&mut self) -> bool {
self.read_data();
if self.saved_hash() == String::new() { return false; }
let file_hash = self.get_file_hash().clone();
self.saved_hash() == file_hash
}
pub fn has_change(&mut self) -> bool {
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
}
pub fn get_local_obj(&mut self) -> LocalObj {
let state = {
let has_obj_ref = self.obj_p.clone().exists();
let blob_exists = self.a_path.clone().exists();
if has_obj_ref && !blob_exists {
State::Deleted
} else if !has_obj_ref && blob_exists {
State::New
} else if !has_obj_ref && !blob_exists {
State::Default
} else if self.has_change() {
// todo
if false {
State::Moved
} else if false {
State::Copied
} else {
State::Modified
}
} else {
State::Default
}
};
LocalObj {
otype: String::from("blob"),
name: path_buf_to_string(self.r_path.clone()),
path: self.r_path.clone(),
path_from: None,
state
}
}
}

View File

@ -1,10 +1,13 @@
use std::fs::File; use std::fs::File;
use std::io; use std::io;
use std::path::PathBuf; use std::path::PathBuf;
use crate::utils::path::path_buf_to_string;
use crate::utils::{read, path}; use crate::utils::{read, path};
use crate::store::head; use crate::store::head;
use crate::store::object::{self, update_dates, parse_path, hash_obj, add_node, create_obj}; use crate::store::object::{self, update_dates, parse_path, hash_obj, add_node, create_obj};
pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> { pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> {
let (line, hash, name) = parse_path(path.clone(), false); let (line, hash, name) = parse_path(path.clone(), false);
@ -30,7 +33,7 @@ pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> {
} }
pub fn rm(path: PathBuf) -> io::Result<()> { pub fn rm(path: PathBuf) -> io::Result<()> {
let (_, lines) = read(path.to_path_buf().to_str().unwrap().to_string()).unwrap(); let (_, lines) = read(path_buf_to_string(path.to_path_buf())).unwrap();
for line in lines { for line in lines {
let (ftype, hash, _) = parse_line(line.unwrap()); let (ftype, hash, _) = parse_line(line.unwrap());
if ftype == String::from("blob") { if ftype == String::from("blob") {
@ -78,7 +81,7 @@ pub fn read(tree: String) -> Option<(String, io::Lines<io::BufReader<File>>)> {
Ok(mut reader) => { Ok(mut reader) => {
let name = match reader.next() { let name = match reader.next() {
Some(Ok(line)) => line, Some(Ok(line)) => line,
_ => String::from(""), _ => String::new(),
}; };
Some((name, reader)) Some((name, reader))
}, },

View File

@ -117,6 +117,13 @@ pub fn objects() -> PathBuf {
path path
} }
pub fn refs() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");
path.push("refs");
path
}
pub fn nextsyncignore() -> Option<PathBuf> { pub fn nextsyncignore() -> Option<PathBuf> {
let mut path = repo_root(); let mut path = repo_root();
path.push(".nextsyncignore"); path.push(".nextsyncignore");
@ -126,3 +133,11 @@ pub fn nextsyncignore() -> Option<PathBuf> {
None None
} }
} }
pub fn path_buf_to_string(p: PathBuf) -> String {
if let Some(str) = p.to_str() {
str.to_string()
} else {
String::new()
}
}

View File

@ -20,7 +20,7 @@ pub fn enumerate_remote(
let relative_s = match folder.relative_s { let relative_s = match folder.relative_s {
Some(relative_s) => relative_s, Some(relative_s) => relative_s,
None => options.relative_s.clone().unwrap_or(String::from("")), None => options.relative_s.clone().unwrap_or(String::new())
}; };
// request folder content // request folder content
@ -88,6 +88,6 @@ pub fn enumerate_remote(
} }
fn calc_depth(obj: &ObjProps) -> u16 { fn calc_depth(obj: &ObjProps) -> u16 {
obj.relative_s.clone().unwrap_or(String::from("")).split("/").count() as u16 obj.relative_s.clone().unwrap_or(String::new()).split("/").count() as u16
} }