Compare commits

...

34 Commits

Author SHA1 Message Date
grimhilt
4533b9a72d refactor(tests): use init_test and clean_test 2024-05-07 18:20:32 +02:00
grimhilt
980d2d9a5d feat(add): prevent adding a file without changes 2024-05-07 18:12:05 +02:00
grimhilt
939b6f2fe3 feat: push deletion 2024-05-02 18:36:09 +02:00
grimhilt
4504b98112 fix(push): push deletion 2024-04-18 15:19:35 +02:00
grimhilt
e8c8ab9dfe fix(add): add deleted file 2024-04-16 17:54:25 +02:00
grimhilt
3420634bea chore: update clap 2024-03-31 22:17:26 +02:00
grimhilt
1aa02a24af test(push): add push remove test 2024-03-31 19:23:32 +02:00
grimhilt
5e43800d6c chore: update libraries 2024-03-31 19:19:10 +02:00
grimhilt
dc7df00ac9 chore: cleaning code 2024-03-17 00:20:58 +01:00
grimhilt
a1b9cde71a fix(tests): fix testsuite allow to pass push 2024-03-16 23:57:01 +01:00
grimhilt
7180647d26 test(pull): add test for pull 2024-03-11 14:47:05 +01:00
grimhilt
d5891a1a93 feat(push): add object when pushing directory 2024-03-11 14:16:41 +01:00
grimhilt
3207391fdb test(push): check that object are locally created when pushed 2024-03-10 23:06:32 +01:00
grimhilt
fa65b6b071 test(add): implicit dir 2024-03-10 17:29:50 +01:00
grimhilt
34dee1ceb6 fix(add): add directory implicitly 2024-03-10 17:29:37 +01:00
grimhilt
fe628ffc9f test(add): first tests 2024-03-10 16:49:21 +01:00
grimhilt
6b7a82bec6 fix: prevent adding nextsync config files 2024-03-10 16:49:06 +01:00
grimhilt
fdcd4633e5 fix: allow to push explicit directory 2024-03-10 16:19:23 +01:00
grimhilt
06bb51476b fix(push): push folder and return error when tcp fail 2024-03-01 17:56:52 +01:00
grimhilt
d8b2116aeb feat(remote): list remote with verbose option 2024-03-01 15:35:38 +01:00
grimhilt
8ed86a05ea style(obj): minor fixes 2024-02-29 09:36:52 +01:00
grimhilt
7951ad0520 refactor(tree): create impl Tree 2024-02-25 17:34:16 +01:00
grimhilt
faf7341525 refactor(blob): use object trait to create blob 2024-02-24 18:52:00 +01:00
grimhilt
642c358737 feat(test): allow multiple tests 2024-02-22 14:00:13 +01:00
grimhilt
e67082b85a refactor(test): use subdir 2024-02-22 13:02:22 +01:00
grimhilt
211e3702a3 refactor(test): remove old tests 2024-02-21 17:03:21 +01:00
grimhilt
a2f746d7f6 test: create first real test 2024-02-21 17:01:16 +01:00
grimhilt
69614b0c9f fix(token): allow to get and store token in local config 2024-02-21 17:01:14 +01:00
grimhilt
a5c5f4a713 fix(config): add option to last category 2024-02-21 17:01:10 +01:00
grimhilt
eaacff0e55 fix: minor warnings 2024-02-21 17:01:08 +01:00
grimhilt
287953c086 feat(config): create a proper config file with proper settings manipulation 2024-02-21 17:01:06 +01:00
grimhilt
6a11bb494b feat(credential): allow to add credential 2024-02-21 17:01:04 +01:00
grimhilt
1c60560c6e refactor(clone): set remote in config 2024-02-21 17:01:02 +01:00
grimhilt
c6534cfd40 feat(remote): add new remote 2024-02-21 17:00:43 +01:00
64 changed files with 2791 additions and 1210 deletions

15
.gitignore vendored
View File

@@ -1,7 +1,10 @@
*
!/**/
!*.rs
!.gitignore
!README.md
!LICENSE
target
*.test
.env
todo
.nextsync
.nextsyncignore
test
tests/nextcloud-docker-dev
tests/data

731
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,22 +6,25 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
reqwest = { version = "0.11", features = ["stream", "json", "multipart"] }
tokio = { version = "1", features = ["full"] }
rustc-serialize="0.3.25"
reqwest = { version = "0.12", features = ["stream", "json", "multipart"] }
tokio = { version = "1.37", features = ["full"] }
dotenv ="0.15.0"
clap = "2.33"
clap = "4.5.4"
rust-crypto = "0.2.36"
colored = "2.0.0"
xml-rs = "0.8.0"
regex = "1.8.3"
colored = "2.1.0"
xml-rs = "0.8.19"
regex = "1.10.4"
lazy_static = "1.4.0"
glob = "0.3.1"
textwrap = "0.13"
chrono = "0.4.26"
indicatif = "0.17.5"
textwrap = "0.16.1"
chrono = "0.4.37"
indicatif = "0.17.8"
md5 = "0.7.0"
futures-util = "0.3.28"
rpassword = "7.2"
futures-util = "0.3.30"
rpassword = "7.3.1"
rand = "0.8.5"
tempfile = "3.10.1"
[profile.release]
debug = true

View File

@@ -12,4 +12,4 @@ timestamp2: timestamp of file locally to know when the file has changed on the s
folder_name timestamp
tree hash_path folder_name
blob hash_path file_name
```
```

View File

@@ -6,4 +6,6 @@ pub mod clone;
pub mod push;
pub mod config;
pub mod remote_diff;
pub mod remote;
pub mod pull;
pub mod credential;

View File

@@ -1,18 +1,14 @@
use std::io::Write;
use std::fs::OpenOptions;
use std::path::{Path, PathBuf};
use clap::Values;
use glob::glob;
use crate::store::index;
use crate::store::{self, object::Object};
use crate::utils::{self, path};
use crate::store::object::object::{Obj, ObjMethods};
use crate::utils::nextsyncignore::{self, ignore_file};
use crate::utils::path::{normalize_relative, repo_root, path_buf_to_string};
use super::status::get_all_objs;
pub struct AddArgs<'a> {
pub files: Option<Values<'a>>,
pub struct AddArgs {
pub files: Vec<String>,
pub force: bool,
pub all: bool,
}
@@ -21,13 +17,13 @@ pub struct AddArgs<'a> {
pub fn add(args: AddArgs) {
let mut pattern: String;
let file_vec: Vec<&str> = match args.all {
let file_vec: Vec<String> = match args.all {
true => {
pattern = path_buf_to_string(repo_root());
pattern.push_str("/*");
vec![&pattern]
vec![pattern]
},
false => args.files.unwrap().collect(),
false => args.files,
};
let mut added_files: Vec<String> = vec![];
@@ -35,7 +31,7 @@ pub fn add(args: AddArgs) {
let rules = nextsyncignore::get_rules();
for file in file_vec {
let f = match normalize_relative(file) {
let f = match normalize_relative(&file) {
Ok(f) => f,
Err(err) => {
eprintln!("err: {} {}", file, err);
@@ -43,35 +39,22 @@ pub fn add(args: AddArgs) {
}
};
// check if the file must be ignored
if !args.force && ignore_file(&f, rules.clone(), &mut ignored_f) {
continue;
}
let path = repo_root().join(Path::new(&f));
match path.exists() {
true => {
if path.is_dir() {
add_folder_content(path.to_path_buf(), &mut added_files);
let mut obj = Obj::from_path(f.clone());
if obj.has_changes() {
add_entry(path, args.force, &mut added_files, rules.clone(), &mut ignored_f);
}
added_files.push(f);
},
false => {
if Object::new(path.to_str().unwrap()).exists() {
// object is deleted so not a present file but can still be added
if Obj::from_path(file.clone()).exists_on_remote() {
// object is deleted so not present but can still be added for deletion
added_files.push(String::from(f));
} else {
// try globbing if nothing has been found
for entry in try_globbing(path) {
if path::is_nextsync_config(entry.clone()) {
continue;
}
if !args.force && ignore_file(&path_buf_to_string(entry.clone()), rules.clone(), &mut ignored_f) {
continue;
}
if entry.is_dir() {
add_folder_content(entry.to_path_buf(), &mut added_files);
}
added_files.push(path_buf_to_string(entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
add_entry(entry, args.force, &mut added_files, rules.clone(), &mut ignored_f);
}
}
}
@@ -82,6 +65,41 @@ pub fn add(args: AddArgs) {
write_added_files(added_files);
}
fn add_entry(entry: PathBuf, force: bool, added_files: &mut Vec<String>, rules: Vec<String>, ignored_f: &mut Vec<String>) {
// ignore nextsync config files
if path::is_nextsync_config(entry.clone()) {
return;
}
// check if the file must be ignored
if !force && ignore_file(&path_buf_to_string(entry.clone()), rules, ignored_f) {
return;
}
// add the parent if there is one and it is not already created
add_parent(entry.clone(), added_files);
added_files.push(path_buf_to_string(entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
if entry.is_dir() {
add_folder_content(entry.to_path_buf(), added_files);
}
}
fn add_parent(entry: PathBuf, added_files: &mut Vec<String>) {
let test_parent = entry.strip_prefix(repo_root()).unwrap().parent();
if test_parent.is_none() || test_parent.unwrap() == PathBuf::new() {
return;
}
let parent = entry.parent().unwrap();
if !Obj::from_path(parent).exists_on_remote() {
add_parent(parent.to_path_buf(), added_files);
added_files.push(path_buf_to_string(parent.strip_prefix(repo_root()).unwrap().to_path_buf()));
}
}
fn print_ignored_files(ignored_files: Vec<String>) {
if ignored_files.len() > 0 {
// todo multiple nextsyncignore

View File

@@ -2,7 +2,6 @@ use std::io;
use std::io::prelude::*;
use std::fs::DirBuilder;
use std::path::{Path, PathBuf};
use clap::Values;
use regex::Regex;
use crate::services::downloader::Downloader;
use crate::utils::api::ApiProps;
@@ -12,22 +11,22 @@ use crate::global::global::{DIR_PATH, set_dir_path};
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::{ReqProps, ObjProps};
use crate::store::object::{tree, blob::Blob};
use crate::store::object::{tree::Tree, blob::Blob};
use crate::commands::config;
use crate::commands::init;
pub const DEPTH: &str = "3";
pub struct CloneArgs<'a> {
pub remote: Values<'a>,
pub struct CloneArgs {
pub remote: String,
pub depth: Option<String>,
}
pub fn clone(args: CloneArgs) {
let d = DIR_PATH.lock().unwrap().clone();
let url = args.remote.clone().next().unwrap();
let (host, tmp_user, dist_path_str) = get_url_props(url);
let url = args.remote.clone();
let (host, tmp_user, dist_path_str) = get_url_props(&url);
let username = match tmp_user {
Some(u) => u.to_string(),
None => {
@@ -59,11 +58,15 @@ pub fn clone(args: CloneArgs) {
std::process::exit(1);
} else {
init::init();
let mut remote_config = api_props.username.clone();
remote_config.push_str("@");
remote_config.push_str(api_props.host.strip_prefix("https://").unwrap());
remote_config.push_str(&api_props.root);
if config::set("remote", &remote_config).is_err() {
// set remote origin in config file
let mut remote_url = api_props.username.clone();
remote_url.push_str("@");
remote_url.push_str(api_props.host.strip_prefix("https://").unwrap());
remote_url.push_str(&api_props.root);
if config::add_remote("origin", &remote_url).is_err()
{
eprintln!("err: not able to save remote");
}
}
@@ -87,7 +90,7 @@ pub fn clone(args: CloneArgs) {
// add tree
let path_folder = p.strip_prefix(ref_path.clone()).unwrap();
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
if let Err(err) = tree::add(path_folder.to_path_buf(), &lastmodified.to_string(), false) {
if let Err(err) = Tree::from_path(path_folder.to_path_buf()).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
}
}
@@ -103,7 +106,7 @@ fn save_blob(obj: ObjProps) {
let relative_s = &obj.clone().relative_s.unwrap();
let relative_p = PathBuf::from(&relative_s);
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
if let Err(err) = Blob::new(relative_p).create(&lastmodified.to_string(), false) {
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
}
}
@@ -174,7 +177,7 @@ mod tests {
fn test_get_url_props() {
let p = "/foo/bar";
let u = Some("user");
let d = String::from("http://nextcloud.com");
// let d = String::from("http://nextcloud.com");
let sd = String::from("https://nextcloud.com");
let sld = String::from("https://nextcloud.example.com");
let ld = String::from("http://nextcloud.example.com");

View File

@@ -1,39 +1,190 @@
use std::fs::OpenOptions;
use std::io::{self, Write};
use std::io::{self, Write, BufRead, Seek, SeekFrom};
use crate::utils::{path, read};
use std::collections::HashMap;
pub struct ConfigSetArgs {
pub name: String,
pub value: String,
}
pub fn config_set(args: ConfigSetArgs) {
// configure possible options and their associated category
let mut option_categories: HashMap<&str, &str> = HashMap::new();
option_categories.insert("force_insecure", "core");
option_categories.insert("token", "core");
// get category of option
let category = option_categories.get(args.name.as_str());
if category.is_none() {
eprintln!("fatal: '{}' is not a valid option.", args.name.clone());
std::process::exit(1);
}
let _ = write_option_in_cat(category.unwrap(), &args.name, &args.value);
}
pub fn find_option_in_cat(category: &str, option: &str) -> Option<String> {
let mut config = path::nextsync();
config.push("config");
let mut in_target_category = false;
if let Ok(lines) = read::read_lines(config) {
for line in lines {
if let Ok(line) = line {
let trimmed_line = line.trim();
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
in_target_category = trimmed_line == format!("[{}]", category);
} else if in_target_category {
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
if parts.len() == 2 && parts[0].trim() == option {
return Some(parts[1].trim().to_string());
}
}
}
}
}
None
}
pub fn write_option_in_cat(category: &str, option: &str, value: &str) -> io::Result<()> {
let mut config = path::nextsync();
config.push("config");
let mut file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&config)?;
let mut in_target_category = false;
let mut option_found = false;
// Go to the beginning of the file
file.seek(SeekFrom::Start(0))?;
// Create a temporary file to hold the modified content
let mut tmp_file = tempfile::Builder::new()
.prefix(".nextsyncconfig")
.tempfile()?;
let reader = io::BufReader::new(&file);
for line in reader.lines() {
let line = line?;
let trimmed_line = line.trim();
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
// if we were already in target category we are now leaving it
// add option only if not found before
if in_target_category && !option_found {
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
} else if !in_target_category {
in_target_category = trimmed_line == format!("[{}]", category);
}
}
if in_target_category && !option_found && trimmed_line.starts_with(&format!("{} =", option)) {
// Option already exists, update its value
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
option_found = true;
} else {
// Write the original line
writeln!(&mut tmp_file, "{}", line)?;
}
}
// add to last category
if in_target_category && !option_found {
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
}
// if the category didn't exist create it and add the option
if !in_target_category {
writeln!(&mut tmp_file, "[{}]", category)?;
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
}
// Flush and sync the temporary file to ensure data is written to disk
tmp_file.flush()?;
// Go back to the beginning of the file
tmp_file.seek(SeekFrom::Start(0))?;
file.seek(SeekFrom::Start(0))?;
// Copy the contents of the temporary file to the original file
io::copy(&mut tmp_file, &mut file)?;
Ok(())
}
pub fn add_remote(name: &str, url: &str) -> io::Result<()> {
let config = path::config();
// check if there is already a remote with this name
if get_remote(name).is_some()
{
eprintln!("error: remote {} already exists.", name);
std::process::exit(3);
}
pub fn set(var: &str, val: &str) -> io::Result<()> {
let mut root = path::nextsync();
root.push("config");
// todo check if exist
let mut file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.append(true)
.open(root)?;
.open(config)?;
writeln!(file, "[remote \"{}\"]", name)?;
writeln!(file, "\turl = {}", url)?;
let mut line = var.to_owned();
line.push_str(" ");
line.push_str(val);
writeln!(file, "{}", line)?;
Ok(())
}
pub fn get(var: &str) -> Option<String> {
let mut root = path::nextsync();
root.push("config");
if let Ok(lines) = read::read_lines(root) {
for line in lines {
if let Ok(l) = line {
if l.starts_with(var.clone()) {
let (_, val) = l.split_once(" ").unwrap();
return Some(val.to_owned());
}
}
}
}
None
pub fn get_remote(name: &str) -> Option<String> {
find_option_in_cat(&format!("remote \"{}\"", name), "url")
}
/// return a vector of remote found in config file (e.g: ("origin", "https://example.com"))
pub fn get_all_remote() -> Vec<(String, String)> {
let config = path::config();
let mut remotes: Vec<(String, String)> = vec![];
let mut in_remote = false;
let mut remote_name = String::new();
if let Ok(lines) = read::read_lines(config) {
for line in lines {
if let Ok(line) = line {
let trimmed_line = line.trim();
if trimmed_line.starts_with("[remote ") {
in_remote = true;
remote_name = trimmed_line.strip_prefix("[remote \"").unwrap().strip_suffix("\"]").unwrap().to_string();
}
else if trimmed_line.starts_with('[')
{
in_remote = false;
}
else if in_remote {
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
if parts.len() == 2 {
remotes.push((remote_name.to_string(), parts[1].trim().to_string()))
}
}
}
}
}
remotes
}
pub fn get_core(name: &str) -> Option<String> {
find_option_in_cat("core", name)
}

View File

@@ -0,0 +1,54 @@
use crate::commands::clone::get_url_props;
use crate::services::api::ApiError::RequestError;
use crate::services::login::Login;
use crate::services::api_call::ApiCall;
use crate::commands::config;
pub struct CredentialArgs {
pub username: String,
pub password: Option<String>,
}
pub fn credential_add(args: CredentialArgs) {
// get remote if exists
let remote = match config::get_remote("origin") {
None => {
eprintln!("fatal: No remote origin, impossible to send request to get token");
std::process::exit(1);
},
Some(remote) => remote
};
let (host, _, _) = get_url_props(&remote);
// get username and password
let username = args.username.to_owned();
let password = match args.password {
Some(mut pwd) => pwd.to_owned(),
None => {
println!("Please enter the password for {}: ", username);
rpassword::read_password().unwrap()
}
};
// get token
let get_token = Login::new()
.set_auth(&username, &password)
.set_host(Some(host))
.send_login();
// deal with error
if let Err(err) = get_token {
if let RequestError(err) = err {
eprintln!("fatal: Failed to get token for these credential. ({})", err);
}
else {
eprintln!("fatal: Failed to get token for these credential.");
}
std::process::exit(1);
}
// save token
let _ = config::write_option_in_cat("core", "token", get_token.unwrap().as_str());
}

View File

@@ -1,7 +1,6 @@
use std::env;
use std::fs::{DirBuilder, File};
use std::path::PathBuf;
use crate::utils::read::read_folder;
use crate::global::global::DIR_PATH;
pub fn init() {
@@ -12,23 +11,24 @@ pub fn init() {
None => env::current_dir().unwrap(),
};
// todo
// check if dir is empty
if let Ok(entries) = read_folder(path.clone()) {
if entries.len() != 0 {
eprintln!("fatal: destination path '{}' already exists and is not an empty directory.", path.display());
std::process::exit(1);
}
} else {
eprintln!("fatal: cannot open the destination directory");
std::process::exit(1);
}
// if let Ok(entries) = read_folder(path.clone()) {
// if entries.len() != 0 {
// eprintln!("fatal: destination path '{}' already exists and is not an empty directory.", path.display());
// std::process::exit(1);
// }
// } else {
// eprintln!("fatal: cannot open the destination directory");
// std::process::exit(1);
// }
let builder = DirBuilder::new();
path.push(".nextsync");
match builder.create(path.clone()) {
Ok(()) => (),
Err(_) => println!("Error: cannot create .nextsync"),
Err(err) => println!("Error: cannot create .nextsync ({})", err),
};
path.push("objects");
@@ -58,12 +58,13 @@ pub fn init() {
Err(_) => println!("Error: cannot create index"),
}
path.pop();
path.pop();
path.push(".nextsyncignore");
match File::create(path) {
Ok(_) => (),
Err(_) => println!("Error: cannot create .nextsyncignore"),
}
// todo
// path.pop();
// path.pop();
// path.push(".nextsyncignore");
//
// match File::create(path) {
// Ok(_) => (),
// Err(_) => println!("Error: cannot create .nextsyncignore"),
// }
}

View File

@@ -4,7 +4,7 @@ use std::fs::DirBuilder;
use crate::services::downloader::Downloader;
use crate::services::req_props::ObjProps;
use crate::store::object::blob::Blob;
use crate::store::object::tree;
use crate::store::object::tree::Tree;
use crate::utils::api::get_api_props;
use crate::utils::path;
use crate::commands::remote_diff::get_diff;
@@ -16,10 +16,10 @@ pub fn pull() {
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
let (folders, files) = get_diff(relative_p);
let ref_p = path::nextsync();
let root = path::repo_root();
for folder in folders {
let p = ref_p.clone().join(PathBuf::from(folder.relative_s.unwrap()));
let p = root.clone().join(PathBuf::from(folder.relative_s.unwrap()));
if !p.exists() {
// create folder
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
@@ -27,9 +27,9 @@ pub fn pull() {
}
// add tree
let path_folder = p.strip_prefix(ref_p.clone()).unwrap();
let path_folder = p.strip_prefix(root.clone()).unwrap();
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
if let Err(err) = tree::add(path_folder.to_path_buf(), &lastmodified.to_string(), false) {
if let Err(err) = Tree::from_path(path_folder).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
}
}
@@ -39,18 +39,16 @@ pub fn pull() {
.set_api_props(get_api_props())
.set_files(files)
.should_log()
.download(ref_p.clone(), Some(&update_blob));
.download(root, Some(&update_blob));
// todo look if need to download or update
}
fn update_blob(obj: ObjProps) {
// todo update blob
return;
let relative_s = &obj.clone().relative_s.unwrap();
let relative_p = PathBuf::from(&relative_s);
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
// todo update function
if let Err(err) = Blob::new(relative_p).create(&lastmodified.to_string(), false) {
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
}
}

View File

@@ -15,8 +15,7 @@ pub mod moved;
pub mod copied;
pub fn push() {
// todo err when pushing new folder
let _remote = match config::get("remote") {
let _remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: no remote set in configuration");
@@ -72,9 +71,10 @@ pub fn push() {
},
PushState::Done => remove_obj_from_index(obj.clone()),
PushState::Conflict => {
eprintln!("conflict when pushing blob");
// download file
}
PushState::Error => (),
PushState::Error => (eprintln!("error when pushing changes blob")),
}
}
}

View File

@@ -70,7 +70,7 @@ impl PushChange for Copied {
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// create destination blob
if let Err(err) = Blob::new(obj.path.clone()).create(&lastmodified.to_string(), false) {
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
}

View File

@@ -7,6 +7,7 @@ use crate::store::index;
use crate::store::object::blob::Blob;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::object::ObjMethods;
pub struct Deleted {
pub obj: LocalObj
@@ -43,7 +44,7 @@ impl PushChange for Deleted {
// update tree
// todo date
Blob::new(obj.path.clone()).rm()?;
Blob::from_path(obj.path.clone()).rm_node()?;
// remove index
index::rm_line(obj.path.to_str().unwrap())?;

View File

@@ -68,7 +68,7 @@ impl PushChange for Modified {
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update blob
Blob::new(obj.path.clone()).update(&lastmodified.to_string())?;
Blob::from_path(obj.path.clone()).update(&lastmodified.to_string())?;
Ok(())
}

View File

@@ -8,6 +8,7 @@ use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
use crate::utils::path::path_buf_to_string;
use crate::store::object::object::ObjMethods;
pub struct Moved {
pub obj: LocalObj,
@@ -70,10 +71,10 @@ impl PushChange for Moved {
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// delete source and create destination blob
if let Err(err) = Blob::new(obj.path.clone()).create(&lastmodified.to_string(), false) {
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
}
if let Err(err) = Blob::new(obj.path_from.clone().unwrap()).rm() {
if let Err(err) = Blob::from_path(obj.path_from.clone().unwrap()).rm() {
eprintln!("err: removing ref of {}: {}", obj.name.clone(), err);
}

View File

@@ -32,11 +32,12 @@ impl PushChange for New {
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
dbg!(&err);
eprintln!("fatal: error pushing file '{}': {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error pushing file {}", obj.name);
eprintln!("fatal: request error pushing file '{}'", obj.name);
std::process::exit(1);
}
_ => (),
@@ -68,7 +69,7 @@ impl PushChange for New {
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// create new blob
Blob::new(obj.path.clone()).create(&lastmodified.to_string(), false)?;
Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false)?;
Ok(())
}

View File

@@ -5,7 +5,7 @@ use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps;
use crate::services::create_folder::CreateFolder;
use crate::store::index;
use crate::store::object::tree;
use crate::store::object::tree::Tree;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
@@ -75,7 +75,7 @@ impl PushChange for NewDir {
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update tree
tree::add(obj.path.clone(), &lastmodified.to_string(), true)?;
Tree::from_path(obj.path.clone()).create(&lastmodified.to_string(), true)?;
// remove index
index::rm_line(obj.path.to_str().unwrap())?;

View File

@@ -73,9 +73,16 @@ pub trait PushChange {
};
// check if remote is newest
let last_sync_ts = Blob::new(obj.path.clone())
.saved_remote_ts()
.parse::<i64>().unwrap();
let last_sync_ts = {
if obj.otype == String::from("blob") {
Blob::from_path(obj.path.clone())
.saved_remote_ts()
.parse::<i64>().unwrap()
} else {
// todo timestamp on tree
99999999999999
}
};
let remote_ts = obj_data.lastmodified.unwrap().timestamp_millis();
if last_sync_ts < remote_ts {

View File

@@ -4,9 +4,10 @@ use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::delete_path::DeletePath;
use crate::store::index;
use crate::store::object::tree;
use crate::store::object::tree::Tree;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::object::ObjMethods;
pub struct RmDir {
pub obj: LocalObj
@@ -49,7 +50,7 @@ impl PushChange for RmDir {
// update tree
// todo update date
tree::rm(obj.path.clone())?;
Tree::from_path(obj.path.clone()).rm()?;
// remove index
index::rm_line(obj.path.to_str().unwrap())?;

27
src/commands/remote.rs Normal file
View File

@@ -0,0 +1,27 @@
use crate::commands::config;
use super::config::get_all_remote;
pub struct RemoteArgs {
pub name: String,
pub url: String,
}
pub fn remote_add(args: RemoteArgs) {
let _ = config::add_remote(&args.name, &args.url);
}
pub fn remote_list(verbose: bool) {
let remotes = get_all_remote();
for remote in remotes {
if verbose
{
println!("{} {}", remote.0, remote.1);
}
else
{
println!("{}", remote.0);
}
}
}

View File

@@ -24,7 +24,7 @@ pub fn remote_diff() {
pub fn get_diff(path: PathBuf) -> (Vec<ObjProps>, Vec<ObjProps>) {
let depth = "2"; // todo opti
let depth = "6"; // todo opti
let api_props = get_api_props();
enumerate_remote(
@@ -55,6 +55,7 @@ fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjPro
.set_request(relative_s, &api_props)
.set_depth(depth)
.gethref()
.getcontentlength() // todo opti
.getlastmodified()
.send_req_multiple()
}

View File

@@ -1,16 +1,15 @@
use std::fs::File;
use std::path::PathBuf;
use std::io::{Lines, BufReader};
use std::collections::HashMap;
use crypto::digest::Digest;
use crypto::sha1::Sha1;
use colored::Colorize;
use crate::utils::path::{self, path_buf_to_string};
use crate::store::head;
use crate::store::object::blob::Blob;
use crate::utils::read::{read_folder, read_lines};
use crate::store::object::tree;
use crate::store::object::object::Obj;
use crate::store::object::tree::Tree;
use crate::utils::read::read_folder;
use crate::store::index;
use crate::store::object::object::ObjMethods;
pub struct StatusArgs {
pub nostyle: bool,
@@ -97,7 +96,7 @@ fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes
{
return true;
}
let mut blob = Blob::new(obj.path.clone());
let mut blob = Blob::from_path(obj.path.clone());
let mut flag = true;
let identical_blobs = blob.get_all_identical_blobs();
@@ -161,24 +160,8 @@ pub struct LocalObj {
}
pub fn get_all_staged() -> Vec<LocalObj> {
let mut lines: Vec<String> = vec![];
if let Ok(entries) = index::read_line() {
for entry in entries {
lines.push(entry.unwrap());
}
}
let mut staged_objs = vec![];
for line in lines {
let obj = Blob::new(line).get_local_obj();
if obj.state != State::Default {
staged_objs.push(obj);
}
}
staged_objs
let mut all_hashes = get_all_objs_hashes();
get_staged(&mut all_hashes)
}
fn get_staged(hashes: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
@@ -227,20 +210,27 @@ fn get_staged(hashes: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
staged_objs
}
fn read_tree_to_hashmap(tree: &mut Tree, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
while let Some(child) = tree.next() {
hashes.insert(String::from(child.get_hash_path()), child.get_local_obj());
};
}
fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>, Vec<String>) {
let mut hashes = HashMap::new();
let mut objs: Vec<String> = vec![];
let mut objs_modified: Vec<String> = vec![];
let root = path::repo_root();
let current_p = path::current().unwrap();
// todo use repo_root instead of current
let dist_path = current_p.strip_prefix(root.clone()).unwrap().to_path_buf();
if let Ok(lines) = read_lines(head::path()) {
add_to_hashmap(lines, &mut hashes, dist_path.clone());
}
read_tree_to_hashmap(&mut Tree::from_head(), &mut hashes, dist_path.clone());
//if let Ok(lines) = read_lines(head::path()) {
// add_to_hashmap(lines, &mut hashes, dist_path.clone());
//}
if let Ok(entries) = read_folder(root.clone()) {
add_to_vec(entries, &mut objs, root.clone());
@@ -255,22 +245,27 @@ fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>, Vec<Stri
let obj_path = root.clone().join(cur_path.clone());
if obj_path.is_dir() {
if let Some((_, lines)) = tree::read(cur_obj.clone()) {
add_to_hashmap(lines, &mut hashes, cur_path.clone());
}
// read virtual tree
read_tree_to_hashmap(&mut Tree::from_path(cur_obj.clone()), &mut hashes, dist_path.clone());
//let mut tree = Tree::from_path(cur_obj.clone());
//if let Some(lines) = tree.get_children() {
//add_to_hashmap(lines, &mut hashes, cur_path.clone());
//}
// read physical tree
if let Ok(entries) = read_folder(obj_path.clone()) {
add_to_vec(entries, &mut objs, root.clone());
}
// remove duplicate
let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
obj_to_analyse.append(&mut diff.clone());
} else {
if Blob::new(cur_path).has_change() {
if Blob::from_path(cur_path).has_changes() {
objs_modified.push(cur_obj);
}
}
}
for (_, elt) in &mut hashes {
@@ -308,24 +303,24 @@ fn get_otype(p: PathBuf) -> String {
}
}
fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
for line in lines {
if let Ok(ip) = line {
if ip.clone().len() > 5 {
let (ftype, hash, name) = tree::parse_line(ip);
let mut p = path.clone();
p.push(name.clone());
hashes.insert(String::from(hash), LocalObj{
otype: String::from(ftype),
name: String::from(name),
path: p,
path_from: None,
state: State::Default,
});
}
}
}
}
//fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
// for line in lines {
// if let Ok(ip) = line {
// if ip.clone().len() > 5 {
// let (ftype, hash, name) = tree::parse_line(ip);
// let mut p = path.clone();
// p.push(name.clone());
// hashes.insert(String::from(hash), LocalObj{
// otype: String::from(ftype),
// name: String::from(name),
// path: p,
// path_from: None,
// state: State::Default,
// });
// }
// }
// }
//}
fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) {
for entry in entries {
@@ -355,7 +350,7 @@ fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
// not staged files
if objs.len() != 0 {
println!("Changes not staged for push:");
println!(" (Use\"nextsync add <file>...\" to update what will be pushed)");
println!(" (Use \"nextsync add <file>...\" to update what will be pushed)");
for object in objs {
print_object(object);

View File

@@ -1,4 +1,4 @@
use clap::{App, SubCommand};
use clap::Command;
mod subcommands;
@@ -9,38 +9,41 @@ mod global;
mod store;
fn main() {
let app = App::new("Nextsync")
let app = Command::new("Nextsync")
.version("1.0")
.author("grimhilt")
.about("A git-line command line tool to interact with nextcloud")
.setting(clap::AppSettings::SubcommandRequiredElseHelp)
.subcommand(subcommands::clone::create())
.subcommand(subcommands::init::create())
.subcommand(subcommands::status::create())
.subcommand(subcommands::add::create())
.subcommand(subcommands::push::create())
.subcommand(subcommands::reset::create())
.subcommand(subcommands::config::create())
.subcommand(subcommands::remote_diff::create())
.subcommand(subcommands::pull::create())
.subcommand(
SubCommand::with_name("test")
);
.subcommands([
subcommands::clone::create(),
subcommands::init::create(),
subcommands::status::create(),
subcommands::add::create(),
subcommands::push::create(),
subcommands::reset::create(),
subcommands::remote::create(),
subcommands::config::create(),
subcommands::remote_diff::create(),
subcommands::pull::create(),
subcommands::credential::create(),
]);
// .setting(clap::AppSettings::SubcommandRequiredElseHelp);
let matches = app.get_matches();
match matches.subcommand() {
("init", Some(args)) => subcommands::init::handler(args),
("status", Some(args)) => subcommands::status::handler(args),
("add", Some(args)) => subcommands::add::handler(args),
("reset", Some(_)) => commands::reset::reset(),
("clone", Some(args)) => subcommands::clone::handler(args),
("push", Some(_)) => commands::push::push(),
("config", Some(args)) => subcommands::config::handler(args),
("remote-diff", Some(args)) => subcommands::remote_diff::handler(args),
("pull", Some(args)) => subcommands::pull::handler(args),
(_, _) => {},
Some(("init", args)) => subcommands::init::handler(args),
Some(("status", args)) => subcommands::status::handler(args),
Some(("add", args)) => subcommands::add::handler(args),
Some(("reset", _)) => commands::reset::reset(),
Some(("clone", args)) => subcommands::clone::handler(args),
Some(("push", _)) => commands::push::push(),
Some(("config", args)) => subcommands::config::handler(args),
Some(("remote-diff", args)) => subcommands::remote_diff::handler(args),
Some(("pull", args)) => subcommands::pull::handler(args),
Some(("remote", args)) => subcommands::remote::handler(args),
Some(("credential", args)) => subcommands::credential::handler(args),
Some((_, _)) => {},
None => {},
};
}

View File

@@ -1,9 +1,10 @@
use std::error::Error;
use lazy_static::lazy_static;
use std::sync::Mutex;
use reqwest::Client;
use reqwest::RequestBuilder;
use reqwest::multipart::Form;
use reqwest::{Response, Error, Method};
use reqwest::{Response, Method};
use reqwest::header::{HeaderValue, CONTENT_TYPE, HeaderMap, IntoHeaderName};
use crate::utils::api::ApiProps;
use crate::commands::config;
@@ -42,12 +43,19 @@ impl ApiBuilder {
}
pub fn set_url(&mut self, method: Method, url: &str) -> &mut ApiBuilder {
self.request = Some(self.client.request(method, url));
let mut new_url = url.to_owned();
if let Some(active) = config::get_core("force_insecure") {
if active == "true" {
new_url = url.replace("https", "http");
}
}
self.request = Some(self.client.request(method, new_url));
self
}
pub fn build_request(&mut self, method: Method, path: &str) -> &mut ApiBuilder {
let remote = match config::get("remote") {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
@@ -64,8 +72,8 @@ impl ApiBuilder {
if path != "/" {
url.push_str(path);
}
self.request = Some(self.client.request(method, url));
self
self.set_url(method, &url)
}
pub fn set_req(&mut self, meth: Method, p: &str, api_props: &ApiProps) -> &mut ApiBuilder {
@@ -79,8 +87,8 @@ impl ApiBuilder {
if p != "/" {
url.push_str(p);
}
self.request = Some(self.client.request(meth, url));
self
self.set_url(meth, &url)
}
pub fn set_basic_auth(&mut self, login: String, pwd: String) -> &mut ApiBuilder {
@@ -177,7 +185,7 @@ impl ApiBuilder {
self.set_request_manager();
}
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
let res_req = tokio::runtime::Runtime::new().unwrap().block_on(async {
match self.request.take() {
None => {
eprintln!("fatal: incorrect request");
@@ -192,7 +200,16 @@ impl ApiBuilder {
}
},
}
}).map_err(ApiError::RequestError)?;
});
// handle request error
let res = match res_req {
Err(err) => {
eprintln!("fatal: {}", err.source().unwrap());
std::process::exit(1);
},
Ok(res) => res,
};
if res.status().is_success() {
if need_text {
@@ -208,7 +225,7 @@ impl ApiBuilder {
}
}
pub async fn old_send(&mut self) -> Result<Response, Error> {
pub async fn old_send(&mut self) -> Result<Response, reqwest::Error> {
let mut request_manager = get_request_manager().lock().unwrap();
let request_manager = request_manager.as_mut().unwrap();
if !self.host.is_none()
@@ -230,9 +247,9 @@ impl ApiBuilder {
Some(req) => {
if let Some(headers) = &self.headers {
req.headers(headers.clone())
.send().await.map_err(Error::from)
.send().await.map_err(reqwest::Error::from)
} else {
req.send().await.map_err(Error::from)
req.send().await.map_err(reqwest::Error::from)
}
},
}

View File

@@ -24,7 +24,7 @@ impl Copy {
pub fn set_url_copy(&mut self, url: &str, destination: &str) -> &mut Copy {
self.api_builder.build_request(Method::from_bytes(b"COPY").unwrap(), url);
let remote = match config::get("remote") {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");

View File

@@ -93,6 +93,7 @@ impl Downloader {
let mut total_size = 0;
let nb_objs = self.files.len();
// set the full size of the download
self.files
.iter()
.for_each(|f|
@@ -111,11 +112,7 @@ impl Downloader {
let should_use_stream = {
if let Some(size) = file.contentlength {
if size > SIZE_TO_STREAM {
true
} else {
false
}
size > SIZE_TO_STREAM
} else {
false
}

View File

@@ -28,7 +28,11 @@ impl ApiCall for Login {
let url = match self.host.clone() {
Some(h) => {
let mut u = String::from("https://");
let mut u = if &h[0..8] == "https://" || &h[0..7] == "http://" {
String::new()
} else {
String::from("https://")
};
u.push_str(&h);
u.push_str("/ocs/v2.php/core/getapppassword");
u
@@ -53,6 +57,12 @@ impl Login {
self
}
pub fn set_auth(&mut self, username: &str, password: &str) -> &mut Login {
self.login = username.to_owned();
self.password = password.to_owned();
self
}
pub fn set_host(&mut self, host: Option<String>) -> &mut Login {
self.host = host;
self

View File

@@ -25,7 +25,7 @@ impl Move {
pub fn set_url_move(&mut self, url: &str, destination: &str) -> &mut Move {
self.api_builder.build_request(Method::from_bytes(b"MOVE").unwrap(), url);
let remote = match config::get("remote") {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");

View File

@@ -67,7 +67,7 @@ impl ApiCall for ReqProps {
}
fn set_url(&mut self, url: &str) -> &mut ReqProps {
let remote = match config::get("remote") {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");

View File

@@ -39,7 +39,7 @@ impl RequestManager {
{
if self.host.is_none()
{
let remote = match config::get("remote") {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
// todo ask user instead
@@ -56,6 +56,7 @@ impl RequestManager {
pub fn get_token(&mut self) -> String {
if self.token.is_none() {
// look in global config
if let Some(token) = gconfig::read_token() {
if !token.is_empty() {
self.token = Some(token);
@@ -63,10 +64,21 @@ impl RequestManager {
}
}
// look in local config
if let Some(token) = config::find_option_in_cat("core", "token")
{
if !token.is_empty() {
self.token = Some(token);
return self.token.clone().unwrap();
}
}
// ask for a token
let get_token = Login::new()
.ask_auth()
.set_host(Some(self.get_host()))
.send_login();
// todo deal with error cases
self.token = Some(get_token.unwrap());
if let Err(err) = gconfig::write_token(&self.token.clone().unwrap()) {

View File

@@ -9,6 +9,7 @@ use crate::utils::{read, path};
pub mod tree;
pub mod blob;
pub mod object;
pub struct Object {
path: PathBuf,
@@ -131,7 +132,7 @@ fn rm(hash: &str) -> io::Result<()> {
fn rm_node(path: &Path, node: &str) -> io::Result<()> {
let mut root = path::objects();
let (dir, rest) = hash_obj(path.clone().to_str().unwrap());
let (dir, rest) = hash_obj(path.to_str().unwrap());
root.push(dir);
root.push(rest);
@@ -143,11 +144,11 @@ fn rm_node(path: &Path, node: &str) -> io::Result<()> {
fn add_node(path: &Path, node: &str) -> io::Result<()> {
let mut root = path::objects();
let (dir, rest) = hash_obj(path.clone().to_str().unwrap());
let (dir, rest) = hash_obj(path.to_str().unwrap());
root.push(dir);
if !root.exists() {
todo!();
//todo!();
}
root.push(rest);
@@ -168,7 +169,7 @@ fn update_dates(mut path: PathBuf, date: &str) -> io::Result<()> {
let (dir, res) = hash_obj(path.to_str().unwrap());
obj_p.push(dir);
obj_p.push(res);
update_date(obj_p.clone(), date.clone())?;
update_date(obj_p.clone(), date)?;
obj_p.pop();
obj_p.pop();
}

View File

@@ -4,76 +4,201 @@ use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf;
use std::time::SystemTime;
use crypto::sha1::Sha1;
use crypto::digest::Digest;
use crate::commands::status::{LocalObj, State};
use crate::commands::status::State;
use crate::utils::into::IntoPathBuf;
use crate::utils::path::path_buf_to_string;
use crate::utils::{path, read};
use crate::store::head;
use crate::store::object::{update_dates, add_node, rm_node};
use crate::store::object::update_dates;
use crate::store::object::object::ObjMethods;
use crate::store::object::object::Obj;
const HASH_EMPTY: &str = "d41d8cd98f00b204e9800998ecf8427e";
pub struct Blob {
r_path: PathBuf, // relative path
a_path: PathBuf, // absolute path
hash: String, // hash of relative path
file_hash: Option<String>,
obj_p: PathBuf, // path of the object file
data: Vec<String>, // content of the blob
pub obj: Obj,
data: Vec<String>, // content of the ref file
file_hash: Option<String>, // hash of the file's content
}
//pub struct Blob {
// r_path: PathBuf, // relative path
// a_path: PathBuf, // absolute path
// hash: String, // hash of relative path
// file_hash: Option<String>,
// obj_p: PathBuf, // path of the object file
// data: Vec<String>, // content of the blob
//}
impl Blob {
pub fn new<S>(r_path: S) -> Blob where S: IntoPathBuf {
let r_path = r_path.into();
if r_path.is_dir() {
eprintln!("{}: is a directory not a blob", r_path.display());
}
let mut hasher = Sha1::new();
hasher.input_str(r_path.to_str().unwrap());
let hash = hasher.result_str();
let (dir, res) = hash.split_at(2);
let mut obj_p = path::objects();
obj_p.push(dir);
obj_p.push(res);
let root = path::repo_root();
let a_path = root.join(r_path.clone());
Blob {
r_path,
a_path,
hash,
file_hash: None,
obj_p,
pub fn new(obj: Obj) -> Self {
Self {
obj,
data: vec![],
}
file_hash: None,
}
}
fn get_line_filename(&mut self) -> (String, String) {
let file_name = self.r_path.file_name().unwrap().to_str().unwrap().to_owned();
let mut line = String::from("blob");
line.push_str(" ");
line.push_str(&self.hash);
line.push_str(" ");
line.push_str(&file_name);
(line, file_name)
pub fn from_path<S>(r_path: S) -> Blob where S: IntoPathBuf {
let r_path = r_path.into();
Self {
obj: Obj::from_path(r_path),
data: vec![],
file_hash: None,
}
}
fn get_file_hash(&mut self) -> String {
if self.file_hash.is_none() {
let bytes = std::fs::read(self.a_path.clone()).unwrap();
let bytes = std::fs::read(self.get_file_path()).unwrap();
let hash = md5::compute(&bytes);
self.file_hash = Some(format!("{:x}", hash))
}
self.file_hash.clone().unwrap()
}
fn create_blob_ref(&mut self, file_name: String, ts_remote: &str) -> io::Result<()> {
let metadata = fs::metadata(self.a_path.clone())?;
/// read line of blob to get all informations and store them in self.data
pub fn read_data(&mut self) {
if self.data.len() == 0 {
if let Ok(mut file) = File::open(self.get_obj_path()) {
let mut buffer = String::new();
let _ = file.read_to_string(&mut buffer);
let data = buffer.rsplit(' ').collect::<Vec<_>>();
for e in data {
self.data.push(String::from(e));
}
self.data.reverse();
// remove \n of last element
if let Some(last) = self.data.last_mut() {
if last.ends_with("\n") {
last.pop();
}
}
}
}
}
fn get_data_index(&mut self, index: usize) -> String {
self.read_data();
if self.data.len() >= index + 1 {
self.data[index].clone()
} else {
String::new()
}
}
fn saved_filename(&mut self) -> String {
self.get_data_index(0)
}
pub fn saved_remote_ts(&mut self) -> String {
self.get_data_index(1)
}
fn saved_local_size(&mut self) -> String {
self.get_data_index(2)
}
fn saved_local_ts(&mut self) -> u64 {
match self.get_data_index(3).as_str() {
"" => 0,
str => str.parse::<u64>().unwrap()
}
}
fn saved_hash(&mut self) -> String {
self.get_data_index(4)
}
fn has_same_size(&mut self) -> bool {
let metadata = match fs::metadata(self.get_file_path()) {
Ok(m) => m,
Err(_) => return true,
};
if self.saved_local_size() == String::new() { return true; }
metadata.len().to_string() == self.saved_local_size()
}
fn is_newer(&mut self) -> bool {
let metadata = match fs::metadata(self.get_file_path()) {
Ok(m) => m,
Err(_) => return true,
};
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if self.saved_local_ts() == 0 { return true; }
secs > self.saved_local_ts()
}
fn has_same_hash(&mut self) -> bool {
if self.saved_hash() == String::new() { return false; }
let file_hash = self.get_file_hash().clone();
self.saved_hash() == file_hash
}
pub fn has_changes(&mut self) -> bool {
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
}
pub fn get_all_identical_blobs(&mut self) -> Vec<String> {
// an empty file is a new file not the copy of another empty file
if self.get_file_hash() == HASH_EMPTY {
return vec![];
}
let refs_p = self.get_obj_path();
let mut blobs: Vec<String> = vec![];
if let Ok(lines) = read::read_lines(refs_p) {
for line in lines {
if let Ok(l) = line {
blobs.push(l.clone());
}
}
}
blobs
}
pub fn status(&mut self, path_from: &mut Option<PathBuf>) -> State {
let has_obj_ref = self.get_obj_path().exists();
let blob_exists = self.get_file_path().exists();
if has_obj_ref && !blob_exists {
State::Deleted
} else if !has_obj_ref && blob_exists {
let identical_blobs = self.get_all_identical_blobs();
if identical_blobs.len() != 0 {
let identical_blob = Blob::from_path(identical_blobs[0].clone()).get_local_obj();
if identical_blob.state == State::Deleted {
*path_from = Some(identical_blob.path);
State::Moved
} else if identical_blob.state == State::Default {
*path_from = Some(identical_blob.path);
State::Copied
} else {
State::New
}
} else {
State::New
}
} else if !has_obj_ref && !blob_exists {
State::Default
} else if self.has_changes() {
State::Modified
} else {
State::Default
}
}
fn create_blob_ref(&mut self, ts_remote: &str) -> io::Result<()> {
let metadata = fs::metadata(self.get_file_path())?;
let secs = metadata
.modified()
.unwrap()
@@ -81,30 +206,28 @@ impl Blob {
.unwrap()
.as_secs();
let mut content = file_name.clone();
content.push_str(" ");
content.push_str(ts_remote);
content.push_str(" ");
content.push_str(&metadata.len().to_string());
content.push_str(" ");
content.push_str(&secs.to_string());
content.push_str(" ");
content.push_str(&self.get_file_hash());
// build line with all needed properties
let content = format!("{} {} {} {} {}",
self.get_name(),
ts_remote,
metadata.len().to_string(),
secs.to_string(),
self.get_file_hash());
let binding = self.obj_p.clone();
let child = binding.file_name();
self.obj_p.pop();
if !self.obj_p.clone().exists() {
fs::create_dir_all(self.obj_p.clone())?;
// create parent dir if needed
let mut obj_path = self.get_obj_path();
obj_path.pop();
if !obj_path.exists() {
fs::create_dir_all(obj_path)?;
}
self.obj_p.push(child.unwrap().to_str().unwrap());
// open ref file
let mut file = OpenOptions::new()
.create_new(true)
.write(true)
.open(self.obj_p.clone())?;
.open(self.get_obj_path())?;
writeln!(file, "{}", &content)?;
writeln!(file, "{}", content)?;
Ok(())
}
@@ -134,261 +257,74 @@ impl Blob {
.open(refs_p)?;
// todo deal with duplicate content
writeln!(file, "{}", self.r_path.clone().to_str().unwrap())?;
writeln!(file, "{}", self.get_relative_file_path().to_str().unwrap())?;
Ok(())
}
pub fn get_all_identical_blobs(&mut self) -> Vec<String> {
// an empty file is a new file not the copy of another empty file
if self.get_file_hash() == HASH_EMPTY {
return vec![];
}
let refs_p = self.get_file_ref();
let mut blobs: Vec<String> = vec![];
if let Ok(lines) = read::read_lines(refs_p) {
for line in lines {
if let Ok(l) = line {
blobs.push(l.clone());
}
}
}
blobs
}
pub fn create(&mut self, ts_remote: &str, up_parent: bool) -> io::Result<()> {
let (line, file_name) = self.get_line_filename();
// add blob reference to parent
if self.r_path.iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(self.r_path.parent().unwrap(), &line)?;
}
let _ = self.add_ref_to_parent();
if let Err(err) = self.create_blob_ref(file_name.clone(), ts_remote.clone()) {
eprintln!("err: saving blob ref of {}: {}", self.r_path.clone().display(), err);
if let Err(err) = self.create_blob_ref(ts_remote.clone()) {
eprintln!("err: saving blob ref of {}: {}", self.get_relative_file_path().display(), err);
}
if let Err(err) = self.create_hash_ref() {
eprintln!("err: saving hash ref of {}: {}", self.r_path.clone().display(), err);
eprintln!("err: saving hash ref of {}: {}", self.get_relative_file_path().display(), err);
}
// update date for all parent
if up_parent {
if let Err(err) = update_dates(self.r_path.clone(), ts_remote) {
eprintln!("err: updating parent date of {}: {}", self.r_path.clone().display(), err);
if let Err(err) = update_dates(self.get_relative_file_path(), ts_remote) {
eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
}
}
Ok(())
}
pub fn rm(&mut self) -> io::Result<()> {
let (line, _) = self.get_line_filename();
// remove blob reference to parent
if self.r_path.iter().count() == 1 {
head::rm_line(&line)?;
} else {
rm_node(self.r_path.parent().unwrap(), &line)?;
}
// remove blob object
fs::remove_file(self.obj_p.clone())?;
Ok(())
}
pub fn update(&mut self, ts_remote: &str) -> io::Result<()> {
// remove old hash ref
let mut refs_p = path::refs();
let binding = self.saved_hash();
let (dir, res) = binding.split_at(2);
refs_p.push(dir);
refs_p.push(res);
if let Err(err) = fs::remove_file(refs_p) {
eprintln!("err: removing hash ref of {}: {}", self.r_path.clone().display(), err);
}
// creating new hash ref
if let Err(err) = self.create_hash_ref() {
eprintln!("err: saving hash ref of {}: {}", self.r_path.clone().display(), err);
}
// updating content of blob's ref
let metadata = fs::metadata(self.a_path.clone())?;
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
let mut content = self.saved_filename();
content.push_str(" ");
content.push_str(ts_remote);
content.push_str(" ");
content.push_str(&metadata.len().to_string());
content.push_str(" ");
content.push_str(&secs.to_string());
content.push_str(" ");
content.push_str(&self.get_file_hash());
let mut file = OpenOptions::new()
.write(true)
.open(self.obj_p.clone())?;
writeln!(file, "{}", &content)?;
// // remove old hash ref
// let mut refs_p = path::refs();
// let binding = self.saved_hash();
// let (dir, res) = binding.split_at(2);
// refs_p.push(dir);
// refs_p.push(res);
// if let Err(err) = fs::remove_file(refs_p) {
// eprintln!("err: removing hash ref of {}: {}", self.r_path.clone().display(), err);
// }
//
// // creating new hash ref
// if let Err(err) = self.create_hash_ref() {
// eprintln!("err: saving hash ref of {}: {}", self.r_path.clone().display(), err);
// }
//
// // updating content of blob's ref
// let metadata = fs::metadata(self.a_path.clone())?;
// let secs = metadata
// .modified()
// .unwrap()
// .duration_since(SystemTime::UNIX_EPOCH)
// .unwrap()
// .as_secs();
//
// let mut content = self.saved_filename();
// content.push_str(" ");
// content.push_str(ts_remote);
// content.push_str(" ");
// content.push_str(&metadata.len().to_string());
// content.push_str(" ");
// content.push_str(&secs.to_string());
// content.push_str(" ");
// content.push_str(&self.get_file_hash());
//
// let mut file = OpenOptions::new()
// .write(true)
// .open(self.obj_p.clone())?;
//
// writeln!(file, "{}", &content)?;
Ok(())
}
pub fn read_data(&mut self) {
if self.data.len() == 0 {
if let Ok(mut file) = File::open(self.obj_p.clone()) {
let mut buffer = String::new();
let _ = file.read_to_string(&mut buffer);
let data = buffer.rsplit(' ').collect::<Vec<_>>();
for e in data {
self.data.push(String::from(e));
}
self.data.reverse();
// remove \n of last element
if let Some(last) = self.data.last_mut() {
if last.ends_with("\n") {
last.pop();
}
}
}
}
}
fn saved_filename(&mut self) -> String {
self.read_data();
if self.data.len() >= 1 {
self.data[0].clone()
} else {
String::new()
}
}
pub fn saved_remote_ts(&mut self) -> String {
self.read_data();
if self.data.len() >= 2 {
self.data[1].clone()
} else {
String::new()
}
}
fn saved_local_size(&mut self) -> String {
self.read_data();
if self.data.len() >= 3 {
self.data[2].clone()
} else {
String::new()
}
}
fn saved_local_ts(&mut self) -> u64 {
self.read_data();
if self.data.len() >= 4 {
self.data[3].parse::<u64>().unwrap()
} else {
0
}
}
fn saved_hash(&mut self) -> String {
self.read_data();
if self.data.len() >= 5 {
self.data[4].clone()
} else {
String::new()
}
}
fn has_same_size(&mut self) -> bool {
let metadata = match fs::metadata(self.a_path.clone()) {
Ok(m) => m,
Err(_) => return true,
};
if self.saved_local_size() == String::new() { return true; }
metadata.len().to_string() == self.saved_local_size()
}
fn is_newer(&mut self) -> bool {
let metadata = match fs::metadata(self.a_path.clone()) {
Ok(m) => m,
Err(_) => return true,
};
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if self.saved_local_ts() == 0 { return true; }
secs > self.saved_local_ts()
}
fn has_same_hash(&mut self) -> bool {
if self.saved_hash() == String::new() { return false; }
let file_hash = self.get_file_hash().clone();
self.saved_hash() == file_hash
}
pub fn has_change(&mut self) -> bool {
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
}
pub fn status(&mut self, path_from: &mut Option<PathBuf>) -> State {
let has_obj_ref = self.obj_p.clone().exists();
let blob_exists = self.a_path.clone().exists();
if has_obj_ref && !blob_exists {
State::Deleted
} else if !has_obj_ref && blob_exists {
let identical_blobs = self.get_all_identical_blobs();
if identical_blobs.len() != 0 {
let identical_blob = Blob::new(identical_blobs[0].clone())
.get_local_obj();
if identical_blob.state == State::Deleted {
*path_from = Some(identical_blob.path);
State::Moved
} else if identical_blob.state == State::Default {
*path_from = Some(identical_blob.path);
State::Copied
} else {
State::New
}
} else {
State::New
}
} else if !has_obj_ref && !blob_exists {
State::Default
} else if self.has_change() {
State::Modified
} else {
State::Default
}
}
pub fn get_local_obj(&mut self) -> LocalObj {
let mut path_from = None;
let state = self.status(&mut path_from);
LocalObj {
otype: String::from("blob"),
name: path_buf_to_string(self.r_path.clone()),
path: self.r_path.clone(),
path_from,
state
}
}
}

415
src/store/object/object.rs Normal file
View File

@@ -0,0 +1,415 @@
use std::io;
use std::fs;
use std::path::PathBuf;
use crate::utils::path;
use crate::store::head;
use crate::store::object::{add_node, rm_node};
use crypto::sha1::Sha1;
use crypto::digest::Digest;
use crate::utils::into::IntoPathBuf;
use crate::store::object::{blob::Blob, tree::Tree};
use crate::commands::status::{State, LocalObj};
#[derive(Clone, Copy)]
pub enum ObjType {
TREE,
BLOB,
DEFAULT
}
pub trait ObjMethods {
fn get_type(&self) -> ObjType;
fn get_obj_path(&self) -> PathBuf;
fn get_file_path(&self) -> PathBuf;
fn get_relative_file_path(&self) -> PathBuf;
fn get_repo_file_path(&self) -> PathBuf;
fn get_name(&self) -> String;
fn get_hash_path(&self) -> String;
fn get_local_obj(&self) -> LocalObj;
fn get_line(&self, obj_type: ObjType) -> String;
fn add_ref_to_parent(&self) -> io::Result<()>;
fn rm(&mut self) -> io::Result<()>;
fn rm_node(&mut self) -> io::Result<()>;
fn rm_node_down(&mut self) -> io::Result<()>;
fn exists_on_remote(&mut self) -> bool;
fn has_changes(&mut self) -> bool;
}
pub struct Obj {
name: String,
obj_path: PathBuf,
obj_type: ObjType,
file_path: PathBuf, // file here is used as both file and directory
relative_file_path: PathBuf,
repo_file_path: PathBuf,
hash_path: String, // hash of the relative path of the file
}
impl ObjMethods for Obj {
fn get_type(&self) -> ObjType {
self.obj_type
}
fn get_obj_path(&self) -> PathBuf {
self.obj_path.clone()
}
fn get_file_path(&self) -> PathBuf {
self.file_path.clone()
}
fn get_relative_file_path(&self) -> PathBuf {
self.relative_file_path.clone()
}
fn get_repo_file_path(&self) -> PathBuf {
self.repo_file_path.clone()
}
fn get_local_obj(&self) -> LocalObj {
LocalObj {
otype: match self.obj_type {
ObjType::BLOB => String::from("blob"),
ObjType::TREE => String::from("tree"),
ObjType::DEFAULT => String::from("default"),
},
name: self.get_name(),
path: self.get_repo_file_path(),
path_from: None,
state: State::New
}
}
fn get_name(&self) -> String {
self.name.clone()
}
fn get_hash_path(&self) -> String {
self.hash_path.clone()
}
// build line for parent reference
fn get_line(&self, obj_type: ObjType) -> String {
let type_str = match obj_type {
ObjType::BLOB => "blob",
ObjType::TREE => "tree",
ObjType::DEFAULT => "default",
};
format!("{} {} {}", type_str, self.get_hash_path(), self.get_name())
}
fn add_ref_to_parent(&self) -> io::Result<()> {
let line = self.get_line(self.obj_type);
if self.get_relative_file_path().iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(self.get_relative_file_path().parent().unwrap(), &line)?;
}
Ok(())
}
fn rm_node(&mut self) -> io::Result<()> {
// remove parent reference to self
let line = self.get_line(self.obj_type);
if self.get_relative_file_path().iter().count() == 1 {
head::rm_line(&line)?;
} else {
rm_node(self.get_relative_file_path().parent().unwrap(), &line)?;
}
Ok(())
}
fn rm_node_down(&mut self) -> io::Result<()> {
eprintln!("rm_node_down: tried to do this on Obj");
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
eprintln!("rm: tried to do this on Obj");
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj_path.exists()
}
fn has_changes(&mut self) -> bool {
if !self.obj_path.exists() {
return true;
}
match self.obj_type {
ObjType::BLOB => Blob::from_path(self.relative_file_path.clone()).has_changes(),
ObjType::TREE => Tree::from_path(self.relative_file_path.clone()).has_changes(),
ObjType::DEFAULT => {
unreachable!();
}
}
}
}
impl ObjMethods for Blob {
fn get_type(&self) -> ObjType {
self.obj.get_type()
}
fn get_obj_path(&self) -> PathBuf {
self.obj.get_obj_path()
}
fn get_file_path(&self) -> PathBuf {
self.obj.get_file_path()
}
fn get_relative_file_path(&self) -> PathBuf {
self.obj.get_relative_file_path()
}
fn get_repo_file_path(&self) -> PathBuf {
self.obj.get_repo_file_path()
}
fn get_local_obj(&self) -> LocalObj {
self.obj.get_local_obj()
}
fn get_name(&self) -> String {
self.obj.get_name()
}
fn get_hash_path(&self) -> String {
self.obj.get_hash_path()
}
fn get_line(&self, _: ObjType) -> String {
self.obj.get_line(ObjType::BLOB)
}
fn add_ref_to_parent(&self) -> io::Result<()> {
self.obj.add_ref_to_parent()
}
fn rm_node(&mut self) -> io::Result<()> {
// remove self object and children object
let _ = self.rm_node_down();
self.obj.rm_node()
}
fn rm_node_down(&mut self) -> io::Result<()> {
// remove reference to self
fs::remove_file(self.get_obj_path())?;
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
// remove all references, including children's one
self.rm_node()?;
// remove file
fs::remove_file(self.get_file_path())?;
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj.exists_on_remote()
}
fn has_changes(&mut self) -> bool {
self.obj.has_changes()
}
}
impl ObjMethods for Tree {
fn get_type(&self) -> ObjType {
self.obj.get_type()
}
fn get_obj_path(&self) -> PathBuf {
self.obj.get_obj_path()
}
fn get_file_path(&self) -> PathBuf {
self.obj.get_file_path()
}
fn get_relative_file_path(&self) -> PathBuf {
self.obj.get_relative_file_path()
}
fn get_repo_file_path(&self) -> PathBuf {
self.obj.get_repo_file_path()
}
fn get_local_obj(&self) -> LocalObj {
self.obj.get_local_obj()
}
fn get_name(&self) -> String {
self.obj.get_name()
}
fn get_hash_path(&self) -> String {
self.obj.get_hash_path()
}
fn get_line(&self, _: ObjType) -> String {
self.obj.get_line(ObjType::TREE)
}
fn add_ref_to_parent(&self) -> io::Result<()> {
self.obj.add_ref_to_parent()
}
fn rm_node(&mut self) -> io::Result<()> {
// remove self object and children object
let _ = self.rm_node_down();
self.obj.rm_node()
}
/// remove objects and children but not parent reference to self
fn rm_node_down(&mut self) -> io::Result<()> {
// remove children
while let Some(mut child) = self.next() {
match child.get_type() {
ObjType::TREE => child.rm_node_down(),
ObjType::BLOB => child.rm_node_down(),
_ => Ok(())
}?;
};
// remove reference to self
fs::remove_file(self.get_obj_path())?;
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
// remove all references, including children's one
self.rm_node()?;
// remove directory and all subfiles
fs::remove_dir_all(self.get_file_path())?;
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj.exists_on_remote()
}
fn has_changes(&mut self) -> bool {
self.obj.has_changes()
}
}
impl Obj {
fn new() -> Self {
Obj {
name: String::new(),
obj_path: PathBuf::new(),
file_path: PathBuf::new(),
obj_type: ObjType::DEFAULT,
hash_path: String::new(),
relative_file_path: PathBuf::new(),
repo_file_path: PathBuf::new()
}
}
pub fn from_path<S>(path: S) -> Obj where S: IntoPathBuf {
let path = path.into();
let mut hasher = Sha1::new();
hasher.input_str(path.to_str().unwrap());
let hash = hasher.result_str();
let (dir, res) = hash.split_at(2);
let mut obj_path = path::objects();
obj_path.push(dir);
obj_path.push(res);
// set to absolute path if not already
let root = path::repo_root();
let abs_path = match path.clone().starts_with(root.clone()) {
true => path.clone(),
false => root.join(path.clone())
};
Obj {
name: match abs_path.file_name() {
None => String::new(),
Some(name) => name.to_str().unwrap().to_owned()
},
obj_path,
obj_type: match path.exists() {
true => match path.is_dir() {
true => ObjType::TREE,
false => ObjType::BLOB
},
false => ObjType::DEFAULT
},
file_path: abs_path,
relative_file_path: path.clone(),
repo_file_path: path,
hash_path: hash,
}
}
/// load from the information line stored in the object
pub fn from_line(line: String, base_dir: Option<PathBuf>) -> Box<dyn ObjMethods> {
let mut split = line.trim().rsplit(' ');
if split.clone().count() != 3 {
eprintln!("fatal: invalid object(s) ({})", line.trim());
std::process::exit(1);
}
let name = split.next().unwrap();
let hash_path = split.next().unwrap();
let obj_type = split.next().unwrap();
let (dir, res) = hash_path.split_at(2);
let mut obj_path = path::objects();
obj_path.push(dir);
obj_path.push(res);
let path = match base_dir {
Some(dir) => dir.join(name),
None => PathBuf::from(name),
};
let root = path::repo_root();
let abs_path = root.join(path.clone());
let obj = Obj {
name: String::from(name),
obj_path,
obj_type: match obj_type {
"tree" => ObjType::TREE,
"blob" => ObjType::BLOB,
_ => ObjType::DEFAULT
},
file_path: abs_path,
relative_file_path: path.clone(),
repo_file_path: path,
hash_path: String::from(hash_path),
};
match obj.obj_type {
ObjType::TREE => Box::new(Tree::new(obj)),
ObjType::BLOB => Box::new(Blob::new(obj)),
ObjType::DEFAULT => Box::new(Tree::new(obj))
}
}
pub fn from_head() -> Self {
Obj {
name: String::new(),
obj_path: head::path(),
obj_type: ObjType::TREE,
file_path: PathBuf::new(),
relative_file_path: PathBuf::new(),
repo_file_path: PathBuf::new(),
hash_path: String::new(),
}
}
}

View File

@@ -1,106 +1,119 @@
use std::fs::File;
use std::io;
use std::path::PathBuf;
use crate::utils::path::path_buf_to_string;
use crate::utils::{read, path};
use crate::store::head;
use crate::store::object::{self, update_dates, parse_path, hash_obj, add_node, create_obj};
use crate::utils::into::IntoPathBuf;
use crate::store::object::object::Obj;
use crate::store::object::update_dates;
use crate::store::object::object::ObjMethods;
use std::fs::{self, File, OpenOptions};
use std::io::{self, BufRead, BufReader, Write};
pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> {
let (line, hash, name) = parse_path(path.clone(), false);
// add tree reference to parent
if path.iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(path.parent().unwrap(), &line)?;
}
// create tree object
let mut content = name;
content.push_str(" ");
content.push_str(date);
create_obj(hash, &content)?;
// update date for all parent
if up_parent {
update_dates(path, date)?;
}
Ok(())
pub struct Tree {
pub obj: Obj,
pub buf_reader: Option<BufReader<File>>,
is_head: bool,
}
pub fn rm(path: PathBuf) -> io::Result<()> {
let (_, lines) = read(path_buf_to_string(path.to_path_buf())).unwrap();
for line in lines {
let (ftype, hash, _) = parse_line(line.unwrap());
if ftype == String::from("blob") {
object::rm(&hash)?;
} else {
rm_hash(hash)?;
impl Tree {
pub fn new(obj: Obj) -> Self {
Tree {
obj,
buf_reader: None,
is_head: false,
}
}
Ok(())
}
fn rm_hash(hash: String) -> io::Result<()> {
let mut obj_p = path::objects();
let (dir, res) = hash.split_at(2);
obj_p.push(dir);
obj_p.push(res);
pub fn from_head() -> Self {
Tree {
obj: Obj::from_head(),
buf_reader: None,
is_head: true,
}
}
match read::read_lines(obj_p) {
Ok(mut reader) => {
reader.next();
for line in reader {
let (ftype, hash, _) = parse_line(line.unwrap());
if ftype == String::from("blob") {
object::rm(&hash)?;
} else {
rm_hash(hash)?;
pub fn from_path<S>(r_path: S) -> Tree where S: IntoPathBuf {
Tree {
obj: Obj::from_path(r_path.into()),
buf_reader: None,
is_head: false,
}
}
pub fn read(&mut self) {
if self.buf_reader.is_none() {
if let Ok(file) = File::open(self.get_obj_path()) {
self.buf_reader = Some(BufReader::new(file));
// skip first line (declaration) if is not head
if !self.is_head {
let mut line = String::new();
self.buf_reader.as_mut().unwrap().read_line(&mut line);
}
}
},
Err(err) => {
eprintln!("error reading tree: {}", err);
},
}
Ok(())
}
pub fn read(tree: String) -> Option<(String, io::Lines<io::BufReader<File>>)> {
let mut obj_p = path::objects();
let (dir, res) = hash_obj(&tree);
obj_p.push(dir);
obj_p.push(res);
match read::read_lines(obj_p) {
Ok(mut reader) => {
let name = match reader.next() {
Some(Ok(line)) => line,
_ => String::new(),
};
Some((name, reader))
},
Err(err) => {
eprintln!("error reading tree: {}", err);
None
},
}
}
pub fn parse_line(line: String) -> (String, String, String) {
let mut split = line.rsplit(' ');
if split.clone().count() != 3 {
eprintln!("fatal: invalid object(s)");
std::process::exit(1);
}
}
pub fn has_changes(&mut self) -> bool {
todo!();
return true;
}
pub fn next(&mut self) -> Option<Box<dyn ObjMethods>> {
self.read();
//if let Some(ref mut file) = self.buf_reader {
// let mut line = String::new();
// match file.read_line(&mut line) {
// Ok(0) => Ok(None), // End of file
// Ok(_) => Ok(Some(line.trim_end().len())), // Return length of line
// Err(e) => Err(e),
// }
//} else {
// Ok(None) // If file is None, return None
//}
match self.buf_reader {
Some(ref mut file) => {
let mut line = String::new();
match file.read_line(&mut line) {
Ok(0) => None,
Ok(_) => Some(Obj::from_line(line, Some(self.get_relative_file_path()))),
Err(e) => {
eprintln!("tree::next: failed to read next line: {}", e);
None
}
}
},
None => None
}
}
pub fn create(&self, date: &str, up_parent: bool) -> io::Result<()> {
// add tree reference to parent
let _ = self.add_ref_to_parent();
// create tree object
let content = format!("{} {}", self.get_name(), date);
// create parent dir if needed
let mut obj_path = self.get_obj_path();
obj_path.pop();
if !obj_path.exists() {
fs::create_dir_all(obj_path)?;
}
// open ref file
let mut file = OpenOptions::new()
.create_new(true)
.write(true)
.open(self.get_obj_path())?;
// update date for all parent
// if up_parent {
// if let Err(err) = update_dates(self.get_relative_file_path(), date) {
// eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
// }
// }
writeln!(file, "{}", content)?;
Ok(())
}
let name = split.next().unwrap();
let hash = split.next().unwrap();
let ftype = split.next().unwrap();
(String::from(ftype), String::from(hash), String::from(name))
}

View File

@@ -7,3 +7,5 @@ pub mod push;
pub mod config;
pub mod remote_diff;
pub mod pull;
pub mod remote;
pub mod credential;

View File

@@ -1,38 +1,42 @@
use clap::{App, Arg, SubCommand, ArgMatches};
use clap::{Arg, ArgMatches, Command, ArgAction};
use crate::commands;
use crate::commands::add::AddArgs;
pub fn create() -> App<'static, 'static> {
SubCommand::with_name("add")
pub fn create() -> Command {
Command::new("add")
.arg(
Arg::with_name("files")
.required(true)
Arg::new("files")
.required_unless_present("all")
.conflicts_with("all")
.multiple(true)
.takes_value(true)
.num_args(1..)
.value_name("FILE")
.help("Files to add"),
)
.arg(
Arg::with_name("force")
.short("f")
Arg::new("force")
.short('f')
.long("force")
.action(ArgAction::SetTrue)
.help("Allow adding otherwise ignored files."),
)
.arg(
Arg::with_name("all")
.short("A")
Arg::new("all")
.short('A')
.long("all")
.action(ArgAction::SetTrue)
.help("This adds, modifies, and removes index entries to match the working tree"),
)
.about("Add changes to the index")
}
pub fn handler(args: &ArgMatches<'_>) {
pub fn handler(args: &ArgMatches) {
commands::add::add(AddArgs {
files: args.values_of("files"),
force: args.is_present("force"),
all: args.is_present("all"),
files: match args.get_many::<String>("files") {
None => vec![],
Some(vals) => vals.map(|s| s.to_string()).collect(),
},
force: *args.get_one::<bool>("force").unwrap(),
all: *args.get_one::<bool>("all").unwrap(),
});
}

View File

@@ -1,59 +1,52 @@
use clap::{App, Arg, SubCommand, ArgMatches};
use std::borrow::Cow;
use textwrap::{fill, Options};
use clap::{Arg, Command, ArgMatches};
// use textwrap::{fill, Options};
use crate::commands::clone::{self, CloneArgs};
use crate::commands::clone::CloneArgs;
use crate::global;
use crate::commands;
fn sized_str<'a>(content: &'a str) -> &'a str {
fill(content, Options::new(70).width).as_str();
"ok"
}
// fn sized_str<'a>(content: &'a str) -> &'a str {
// fill(content, Options::new(70).width).as_str();
// "ok"
// }
fn test() -> String {
String::from("ok")
}
pub fn create() -> App<'static, 'static> {
let remote_desc = sized_str(&format!("The repository to clone from. See the NEXTSYNC URLS section below for more information on specifying repositories."));
let depth_desc = sized_str(&format!("Depth of the recursive fetch of object properties. This value should be lower when there are a lot of files per directory and higher when there are a lot of subdirectories with fewer files. (Default: {})", clone::DEPTH));
SubCommand::with_name("clone")
pub fn create() -> Command {
// let remote_desc = sized_str(&format!("The repository to clone from. See the NEXTSYNC URLS section below for more information on specifying repositories."));
// let depth_desc = sized_str(&format!("Depth of the recursive fetch of object properties. This value should be lower when there are a lot of files per directory and higher when there are a lot of subdirectories with fewer files. (Default: {})", clone::DEPTH));
Command::new("clone")
.arg(
Arg::with_name("remote")
Arg::new("remote")
.required(true)
.takes_value(true)
.num_args(1)
.value_name("REMOTE")
//.help(_desc)
)
.arg(
Arg::with_name("depth")
.short("d")
Arg::new("depth")
.short('d')
.long("depth")
.required(false)
.takes_value(true)
.num_args(1)
//.help(&depth_desc)
)
.arg(
Arg::with_name("directory")
Arg::new("directory")
.required(false)
.takes_value(true)
.num_args(1)
.value_name("DIRECTORY")
)
.about("Clone a repository into a new directory")
.after_help("NEXTSYNC URLS\nThe following syntaxes may be used:\n\t- user@host.xz/path/to/repo\n\t- http[s]://host.xz/apps/files/?dir=/path/to/repo&fileid=111111\n\t- [http[s]://]host.xz/remote.php/dav/files/user/path/to/repo\n")
}
pub fn handler(args: &ArgMatches<'_>) {
if let Some(val) = args.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(String::from(val.to_string()));
}
if let Some(remote) = args.values_of("remote") {
if let Some(remote) = args.get_one::<String>("remote") {
commands::clone::clone(CloneArgs {
remote,
depth: args.values_of("depth").map(
|mut val| val.next().unwrap().to_owned()
),
remote: remote.to_string(),
depth: args.get_one::<String>("depth").cloned(),
});
}
}

View File

@@ -1,29 +1,48 @@
use clap::{App, Arg, SubCommand, ArgMatches};
use clap::{Arg, Command, ArgMatches};
use crate::commands::config::ConfigSetArgs;
use crate::commands;
pub fn create() -> App<'static, 'static> {
SubCommand::with_name("config")
.arg(
Arg::with_name("variable")
.required(true)
.takes_value(true)
.value_name("VARIABLE")
pub fn create() -> Command {
Command::new("config")
.about("Get and set repository or global options")
.subcommand(
Command::new("get")
.about("Get the value of a configuration variable")
.arg(
Arg::new("name")
.help("The name of the configuration variable")
.required(true)
.index(1)
)
)
.arg(
Arg::with_name("value")
.required(true)
.takes_value(true)
.value_name("VALUE")
.subcommand(
Command::new("set")
.about("Set a configuration variable")
.arg(
Arg::new("name")
.help("The name of the configuration variable")
.required(true)
.index(1)
)
.arg(
Arg::new("value")
.help("The value to set")
.required(true)
.index(2)
)
)
}
pub fn handler(args: &ArgMatches<'_>) {
if let Some(mut var) = args.values_of("variable") {
if let Some(mut val) = args.values_of("value") {
if commands::config::set(var.next().unwrap(), val.next().unwrap()).is_err() {
eprintln!("fatal: cannot save the value");
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("set", set_matches)) => {
commands::config::config_set(ConfigSetArgs {
name: set_matches.get_one::<String>("name").unwrap().to_string(),
value: set_matches.get_one::<String>("value").unwrap().to_string(),
});
}
_ => println!("Invalid or missing subcommand for 'config'"),
}
}

View File

@@ -0,0 +1,39 @@
use clap::{Arg, Command, ArgMatches};
use crate::commands;
use crate::commands::credential::CredentialArgs;
pub fn create() -> Command {
Command::new("credential")
.about("Manage set of credentials")
.subcommand(
Command::new("add")
.arg(
Arg::new("username")
.required(true)
.num_args(1)
.value_name("NAME")
.help("The username used to connect to nextcloud"),
)
.arg(
Arg::new("password")
.required(false)
.num_args(1)
.value_name("PASSWORD")
.help("The passowd used to connect to nextcloud (optional)"),
)
.about("Add a new set of credential")
)
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("add", add_matches)) => {
commands::credential::credential_add(CredentialArgs {
username: add_matches.get_one::<String>("username").unwrap().to_string(),
password: add_matches.get_one::<String>("password").cloned(),
});
}
_ => println!("Invalid or missing subcommand for 'credential'"),
}
}

View File

@@ -1,23 +1,23 @@
use clap::{App, Arg, SubCommand, ArgMatches};
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> App<'static, 'static> {
SubCommand::with_name("init")
pub fn create() -> Command {
Command::new("init")
.arg(
Arg::with_name("directory")
Arg::new("directory")
.required(false)
.takes_value(true)
.num_args(1)
.value_name("DIRECTORY")
)
.about("Create an empty Nextsync repository")
// Create an empty nextsync repository or reinitialize an existing one
}
pub fn handler(args: &ArgMatches<'_>) {
if let Some(val) = args.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(val.to_string());
}
commands::init::init();
}

View File

@@ -1,23 +1,23 @@
use clap::{App, Arg, SubCommand, ArgMatches};
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> App<'static, 'static> {
SubCommand::with_name("pull")
pub fn create() -> Command {
Command::new("pull")
.arg(
Arg::with_name("path")
Arg::new("path")
.required(false)
.takes_value(true)
.num_args(1)
.value_name("PATH")
.help("The path to pull."),
)
.about("Fetch and integrate changes from the nextcloud server.")
}
pub fn handler(args: &ArgMatches<'_>) {
if let Some(val) = args.values_of("path") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("path") {
global::global::set_dir_path(val.to_string());
}
commands::pull::pull();
}

View File

@@ -1,6 +1,6 @@
use clap::{App, Arg, SubCommand};
use clap::Command;
pub fn create() -> App<'static, 'static> {
SubCommand::with_name("push")
pub fn create() -> Command {
Command::new("push")
.about("Push changes on nextcloud")
}

46
src/subcommands/remote.rs Normal file
View File

@@ -0,0 +1,46 @@
use clap::{Arg, Command, ArgMatches, ArgAction};
use crate::commands;
use crate::commands::remote::RemoteArgs;
pub fn create() -> Command {
Command::new("remote")
.about("Manage set of tracked repositories")
.subcommand(
Command::new("add")
.arg(
Arg::new("name")
.required(true)
.index(1)
.help("The name of the remote"),
)
.arg(
Arg::new("url")
.required(true)
.index(2)
.help("The url of the remote"),
)
.about("Add a new remote to this repository")
)
.arg(
Arg::new("verbose")
.short('v')
.long("verbose")
.action(ArgAction::SetTrue)
.help("Be a little more verbose and show remote url after name.")
)
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("add", add_matches)) => {
commands::remote::remote_add(RemoteArgs {
name: add_matches.get_one::<String>("name").unwrap().to_string(),
url: add_matches.get_one::<String>("url").unwrap().to_string(),
});
}
_ => {
commands::remote::remote_list(*args.get_one::<bool>("verbose").unwrap());
}
}
}

View File

@@ -1,14 +1,14 @@
use clap::{App, Arg, SubCommand, ArgMatches};
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> App<'static, 'static> {
SubCommand::with_name("remote-diff")
pub fn create() -> Command {
Command::new("remote-diff")
.arg(
Arg::with_name("path")
Arg::new("path")
.required(false)
.takes_value(true)
.num_args(1)
.value_name("PATH")
.help("The path to pull."),
)
@@ -16,9 +16,9 @@ pub fn create() -> App<'static, 'static> {
}
pub fn handler(args: &ArgMatches<'_>) {
if let Some(val) = args.values_of("path") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("path") {
global::global::set_dir_path(val.to_string());
}
commands::remote_diff::remote_diff();
}

View File

@@ -1,6 +1,6 @@
use clap::{App, Arg, SubCommand};
use clap::Command;
pub fn create() -> App<'static, 'static> {
SubCommand::with_name("reset")
pub fn create() -> Command {
Command::new("reset")
.about("Clear the index")
}

View File

@@ -1,30 +1,30 @@
use clap::{App, Arg, SubCommand, ArgMatches};
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
use crate::commands::status::StatusArgs;
pub fn create() -> App<'static, 'static> {
SubCommand::with_name("status")
pub fn create() -> Command {
Command::new("status")
.arg(
Arg::with_name("directory")
.required(false)
.takes_value(true)
Arg::new("directory")
.num_args(1)
.value_name("DIRECTORY")
)
.arg(
Arg::with_name("nostyle")
Arg::new("nostyle")
.long("nostyle")
.help("Status with minium information and style"),
)
.about("Show the working tree status")
}
pub fn handler(args: &ArgMatches<'_>) {
if let Some(val) = args.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(val.to_string());
}
commands::status::status(StatusArgs {
nostyle: args.is_present("nostyle"),
nostyle: args.contains_id("nostyle"),
});
}

View File

@@ -18,7 +18,7 @@ impl Clone for ApiProps {
}
pub fn get_api_props() -> ApiProps {
let remote = match config::get("remote") {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
@@ -39,6 +39,8 @@ pub fn get_relative_s(p: String, api_props: &ApiProps) -> String {
final_p = final_p.strip_prefix("/remote.php/dav/files/").unwrap().to_string();
final_p = final_p.strip_prefix(&api_props.username).unwrap().to_string();
final_p = final_p.strip_prefix(&api_props.root).unwrap().to_string();
final_p = final_p.strip_prefix("/").unwrap().to_string();
if final_p.starts_with("/") {
final_p = final_p.strip_prefix("/").unwrap().to_string();
}
final_p
}

View File

@@ -1,4 +1,4 @@
use std::path::PathBuf;
use std::path::{PathBuf, Path};
pub trait IntoPathBuf {
fn into(self) -> PathBuf;
@@ -10,9 +10,21 @@ impl IntoPathBuf for PathBuf {
}
}
impl IntoPathBuf for &Path {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}
impl IntoPathBuf for String {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}
impl IntoPathBuf for &str {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}

View File

@@ -114,6 +114,13 @@ pub fn nextsync() -> PathBuf {
path
}
pub fn config() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");
path.push("config");
path
}
pub fn objects() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");

View File

@@ -20,6 +20,7 @@ pub fn enumerate_remote(
let mut deleted: Vec<PathBuf> = vec![];
let mut files: Vec<ObjProps> = vec![];
let mut objs_hashmap: HashMap<String, Vec<String>> = HashMap::new();
objs_hashmap.insert(
options.relative_s.clone().unwrap_or(String::new()),
Vec::new());
@@ -53,7 +54,11 @@ pub fn enumerate_remote(
};
// separate folders and files in response
let d = options.depth.clone().unwrap_or("0".to_owned()).parse::<u16>().unwrap();
let d = options.depth.clone()
.unwrap_or("0".to_owned())
.parse::<u16>()
.unwrap();
// first element is not used as it is the fetched folder
if let Some(should_skip_fct) = should_skip.clone() {
iter_with_skip_fct(
@@ -82,6 +87,7 @@ pub fn enumerate_remote(
&mut all_folders);
}
}
// go through all folders not checked for deletion before
// as they were empty
if let Some(_) = should_skip.clone() {
@@ -90,18 +96,13 @@ pub fn enumerate_remote(
objs_hashmap.remove(&key);
}
}
dbg!(deleted);
dbg!(objs_hashmap);
(all_folders, files)
}
fn calc_depth(obj: &ObjProps) -> u16 {
calc_depth_string(obj.relative_s.clone().unwrap_or(String::new()))
}
fn calc_depth_string(s: String) -> u16 {
s.split("/").count() as u16
let path = obj.relative_s.clone().unwrap_or(String::new());
path.split("/").count() as u16
}
fn iter_with_skip_fct(
@@ -121,7 +122,6 @@ fn iter_with_skip_fct(
let current_depth = calc_depth(object);
if object.is_dir() {
// add folder to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
@@ -208,7 +208,7 @@ fn get_non_new_local_element(iter: &mut dyn Iterator<Item = &PathBuf>) -> Option
!Object::new(el.unwrap().clone().to_str().unwrap()).exists()
} else {
// ignore newly created file (not sync)
Blob::new(el.unwrap().clone()).status(&mut None) == State::New
Blob::from_path(el.unwrap().clone()).status(&mut None) == State::New
}
} {
el = iter.next();

116
tests/add.rs Normal file
View File

@@ -0,0 +1,116 @@
use std::str;
mod utils;
use utils::{utils::*, client::ClientTest};
fn line_should_contains(lines: &Vec<String>, nb: usize, str: &str) {
if lines[nb].find(str).is_none()
{
eprintln!("'{}' not found in '{}'", str, lines[nb]);
dbg!(lines);
}
assert!(lines[nb].find(str).is_some());
}
fn lines_should_not_contains(lines: Vec<String>, str: &str) {
for line in lines {
if line.find("Changes not staged for push").is_some() {
return;
}
if line.find(str).is_some() {
eprintln!("'{}' found in '{}'", str, line);
}
assert!(line.find(str).is_none());
}
}
fn collect_status_lines(client: &mut ClientTest) -> Vec<String> {
let out = client.run_cmd("status");
str::from_utf8(&out.stdout)
.unwrap()
.split("\n")
.map(|s| s.to_owned())
.collect()
}
#[cfg(test)]
mod add_tests {
use crate::utils::{server::ServerTest, status_utils::status_should_be_empty};
use super::*;
#[test]
fn simple_add() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
let lines = collect_status_lines(&mut client);
// test
line_should_contains(&lines, 2, "file1");
client.clean();
}
#[test]
fn add_config_file() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add .nextsync -f");
let lines = collect_status_lines(&mut client);
// test
lines_should_not_contains(lines, ".nextsync");
client.clean();
}
#[test]
fn add_dir_implicit() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file1", "foo");
// adding the file should add the dir
client.run_cmd_ok("add dir/file1");
let lines = collect_status_lines(&mut client);
// tests
line_should_contains(&lines, 2, "dir");
line_should_contains(&lines, 3, "dir/file1");
client.clean();
}
#[test]
fn add_file_no_changes() {
// add a file push it and add it again
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
status_should_be_empty(&mut client);
client.run_cmd_ok("add file1");
status_should_be_empty(&mut client);
clean_test(client, &mut server)
}
}

View File

@@ -1,45 +0,0 @@
#!/bin/sh
source ./utils.sh
nb_tests=0
TEST_SUITE_NAME="add/directory/"
add_test_no_env() {
touch $2
$exe add $3
status_cmp "$1" "$4"
}
add_test() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
add_test_no_env "$1" "$2" "$3" "$4"
}
add_dir() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
mkdir dir
$exe add "dir"
res=$($exe status --nostyle)
status_cmp "dir" "new: dir"
}
add_subdir() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
mkdir foo foo/bar
$exe add "foo"
res=$($exe status --nostyle)
status_cmp "dir" "new: foo/bar\nnew: foo"
}
add_dir
add_subdir
echo $nb_tests
exit 0

View File

@@ -1,114 +0,0 @@
#!/bin/sh
source ./utils.sh
nb_tests=0
TEST_SUITE_NAME="add/file/"
add_test_no_env() {
touch $2
$exe add $3
status_cmp "$1" "$4"
}
add_test() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
add_test_no_env "$1" "$2" "$3" "$4"
}
add_basics() {
add_test "basic" "toto" "toto" "new: toto"
}
add_space() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
touch 'to to'
$exe add 'to to'
res=$($exe status --nostyle)
status_cmp "space" "new: to to"
}
add_multiple() {
add_test "multiple" "titi riri" "titi riri" "new: titi\nnew: riri"
}
add_regex() {
add_test "regex" "titi riri" "./*" "new: riri\nnew: titi"
}
add_file_subdir() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
mkdir dir
touch dir/toto
$exe add "./dir/toto"
res=$($exe status --nostyle)
status_cmp "file_subdir" "new: dir/toto"
}
add_whole_subdir() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
mkdir dir
touch dir/toto
touch dir/roro
$exe add "dir"
res=$($exe status --nostyle)
status_cmp "whole_subdir" "new: dir/roro\nnew: dir/toto\nnew: dir"
}
add_subdir_regex() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
mkdir dir
touch dir/toto dir/roro
$exe add "./dir/*"
res=$($exe status --nostyle)
status_cmp "subdir_regex" "new: dir/roro\nnew: dir/toto"
}
add_duplication() {
add_test "duplication" "toto" "toto toto" "new: toto"
}
add_duplication_subdir() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
mkdir dir
add_test_no_env "duplication_subdir" "dir/toto" "dir/toto dir/toto" "new: dir/toto"
}
add_all() {
nb_tests=$((nb_tests + 1))
setup_env
$exe init
mkdir dir
touch dir/toto dir/roro lolo
$exe add -A
res=$($exe status --nostyle)
status_cmp "all" "new: .nextsyncignore\nnew: dir/roro\nnew: dir/toto\nnew: dir\nnew: lolo"
}
#test add file without changes
add_basics
add_space
add_multiple
add_regex
add_file_subdir
add_whole_subdir
add_subdir_regex
add_duplication
add_duplication_subdir
add_all
echo $nb_tests
exit 0

View File

@@ -1,45 +0,0 @@
#!/bin/sh
source ./utils.sh
# Getting all tests
TESTS=$(find -mindepth 2 -name "*.sh")
if [ $# -ne 0 ]; then
TESTS=$(find -mindepth 2 -path "*$1*")
fi
# Executing tests
nb_tests=0
nb_success=0
for test in $TESTS; do
#nb_tests=$((nb_tests + 1))
# run file
tmp_stderr=$(mktf)
nb_tests_tmp=$($test 2>"$tmp_stderr")
exit_code=$?
capture_stderr=$(<"$tmp_stderr")
[ "$capture_stderr" != "" ] && echo -e "$capture_stderr"
rm $tmp_stderr
# add nb_tests from executed test_suite to global nb_test
[ "$nb_tests_tmp" != "" ] &&
[ $nb_tests_tmp -gt 0 ] &&
nb_tests=$((nb_tests + nb_tests_tmp))
# deal with the result of the test
if [ $exit_code -eq 0 ]; then
nb_success=$((nb_success + nb_tests_tmp))
echo "$test ran successfully"
elif [ $exit_code -eq 4 ]; then
# not executable (nextsync) found, not need to try other tests
exit 1
else
nb_success=$((nb_success + nb_tests_tmp - 1))
echo "$test failed with exit code $exit_code"
fi
done;
rm -rf /tmp/*_nextsync
echo -e "\nRan $nb_tests tests ($((nb_tests - nb_success)) Failed)"

36
tests/pull.rs Normal file
View File

@@ -0,0 +1,36 @@
mod utils;
use utils::{utils::*};
#[cfg(test)]
mod pull_tests {
use super::*;
#[test]
fn simple_pull() {
let (mut client, mut server) = init_test();
let _ = server.add_file("file1", "foo");
client.run_cmd_ok("pull");
// tests
assert!(client.has_file("file1", "foo"));
clean_test(client, &mut server);
}
#[test]
fn simple_pull_directory() {
let (mut client, mut server) = init_test();
let _ = server.add_dir("dir");
let _ = server.add_file("dir/file1", "foo");
client.run_cmd_ok("pull");
// tests
assert!(client.has_file("dir/file1", "foo"));
clean_test(client, &mut server);
}
}

168
tests/push.rs Normal file
View File

@@ -0,0 +1,168 @@
mod utils;
use utils::{utils::*, status_utils::*};
#[cfg(test)]
mod push_tests {
use super::*;
#[test]
fn simple_push() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
clean_test(client, &mut server);
}
#[test]
fn push_update() {
let (mut client, mut server) = init_test();
// init content of file1
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
// change content of file1
let _ = client.add_file("file1", "bar");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
clean_test(client, &mut server);
}
#[test]
fn push_dir_explicit() {
let (mut client, mut server) = init_test();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged.clone(), "file2");
lines_should_not_contains(staged, "foo");
lines_should_not_contains(not_staged.clone(), "file2");
lines_should_not_contains(not_staged, "foo");
clean_test(client, &mut server);
}
#[test]
fn push_dir_implicit() {
let (mut client, mut server) = init_test();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add dir/file2");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged.clone(), "file2");
lines_should_not_contains(staged, "foo");
lines_should_not_contains(not_staged.clone(), "file2");
lines_should_not_contains(not_staged, "foo");
clean_test(client, &mut server);
}
#[test]
fn push_all() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add *");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
assert!(staged.len() == 0);
assert!(not_staged.len() == 0);
clean_test(client, &mut server);
}
#[test]
fn push_file_deletion() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
// push file1
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
status_should_be_empty(&mut client);
// remove it
let _ = client.remove_file("file1");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_not_file("file1"));
status_should_be_empty(&mut client);
clean_test(client, &mut server);
}
#[test]
fn push_dir_deletion() {
let (mut client, mut server) = init_test();
// push dir and file2
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
// push deletion
let _ = client.remove_dir("dir");
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
assert!(server.has_not_dir("dir"));
clean_test(client, &mut server);
}
}

14
tests/utils.rs Normal file
View File

@@ -0,0 +1,14 @@
#[path = "utils/server.rs"]
pub mod server;
#[path = "utils/client.rs"]
pub mod client;
#[path = "utils/utils.rs"]
pub mod utils;
#[path = "utils/status_utils.rs"]
pub mod status_utils;
#[path = "utils/files_utils.rs"]
pub mod files_utils;

View File

@@ -1,38 +0,0 @@
#!/bin/sh
mktd()
{
echo $(mktemp -d --suffix=_nextsync)
}
mktf()
{
echo $(mktemp --suffix=_nextsync)
}
get_exe() {
exe=$(pwd)
exe+="/../target/debug/nextsync"
if [ ! -f $exe ]; then
echo "No executable found, try to compile first" >&2
exit 4
fi
}
setup_env() {
[ ! -v exe ] && get_exe
path=$(mktd)
cd $path
}
# test_name expected_output
status_cmp() {
res=$($exe status --nostyle)
diff <(echo -e "$2" ) <(echo -e "$res") 2> /dev/null > /dev/null
if [ $? -ne 0 ]; then
echo -e "$TEST_SUITE_NAME$1: Output differ:" >&2
diff -u <(echo -e "$2" ) <(echo -e "$res") | grep "^[-\+\ ][^-\+]" >&2
echo -e "\nMore in $path" >&2
echo $nb_tests
exit 1
fi
}

162
tests/utils/client.rs Normal file
View File

@@ -0,0 +1,162 @@
use std::str;
use std::process::{Command, Output};
use std::fs::{self, File};
use std::io::Write;
use std::env;
use std::path::PathBuf;
use super::files_utils::has_files;
#[cfg(test)]
pub struct ClientTest {
user: String, // the nextcloud user
volume: String, // temp dir for the test
pub test_id: String, // name of the test (e.g nextsync_rand)
exe_path: PathBuf, // absolute path of nextsync executable
}
#[cfg(test)]
impl ClientTest {
pub fn new(id: String) -> Self {
// create a directory in /tmp with the given id
let mut vol = String::from("/tmp/");
vol.push_str(&id);
let _ = fs::create_dir(vol.clone());
// get nextsync path
let mut exe_path = env::current_dir().unwrap();
exe_path = exe_path.join("target/debug/nextsync");
// build the client
ClientTest {
user: String::from("admin"),
volume: vol,
test_id: id,
exe_path
}
}
pub fn init(mut self) -> Self {
self.run_cmd_ok("init");
// set remote url
let url = String::from(format!("{}@nextcloud.local/{}", self.user, self.test_id));
self.run_cmd_ok(&format!("remote add origin {}", url));
// set force_unsecure as debug server has not certificate
self.run_cmd_ok("config set force_insecure true");
// set token for request
self.run_cmd_ok(&format!("credential add {} {}", self.user, self.user));
self
}
pub fn clean(self) -> Self {
let _ = fs::remove_dir_all(&self.volume);
self
}
pub fn run_cmd_ok(&mut self, args: &str) -> Output {
let output = self.run_cmd(args);
if !output.status.success() {
println!("id: {}", self.test_id.clone());
println!("Failed to execute: '{}'", args);
println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
}
assert!(output.status.success());
output
}
pub fn run_cmd(&mut self, args: &str) -> Output {
let output = Command::new(self.exe_path.to_str().unwrap())
.current_dir(self.volume.clone())
.args(args.split(" "))
.output()
.expect("Could not execute nextsync command");
return output;
}
pub fn add_dir(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
let _ = fs::create_dir_all(path)?;
Ok(())
}
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
let mut file = File::create(path)?;
file.write_all(content.as_bytes())?;
Ok(())
}
pub fn remove_file(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
fs::remove_file(path)?;
Ok(())
}
pub fn remove_dir(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
fs::remove_dir_all(path)?;
Ok(())
}
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
let full_path = PathBuf::from(self.volume.clone()).join(file);
has_files(full_path, file, content, self.test_id.clone())
}
/// get the files given by the status command in two vector (staged and not staged)
pub fn get_status(&mut self) -> (Vec<String>, Vec<String>) {
let out = self.run_cmd("status");
let lines: Vec<String> = str::from_utf8(&out.stdout)
.unwrap()
.split("\n")
.map(|s| s.to_owned())
.collect();
let mut staged = vec![];
let mut not_staged = vec![];
let mut in_staged = true;
let mut counter = 0;
for line in lines {
if line.find("not staged").is_some() {
in_staged = false;
counter = 1;
continue;
}
// skip two first line as there are not files
if counter < 2 {
counter += 1;
continue;
}
if line == String::from("") {
continue;
}
if in_staged {
staged.push(line);
} else {
not_staged.push(line);
}
}
return (staged, not_staged);
}
}

View File

@@ -0,0 +1,50 @@
use std::io::{BufReader, BufRead};
use std::fs::File;
use std::path::PathBuf;
#[cfg(test)]
pub fn has_files(full_path: PathBuf, file: &str, content: &str, test_id: String) -> bool
{
if !full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("File '{}' doesn't exists", file);
return false;
}
let f = File::open(full_path).unwrap();
for line in BufReader::new(f).lines(){
if let Ok(line) = line {
if line != content {
println!("id: {}", test_id);
eprintln!("File '{}' is not equal, {} != {}", file, line, content);
return false;
}
return line == content;
}
}
return true;
}
#[cfg(test)]
pub fn has_not_file(full_path: PathBuf, file: &str, test_id: String) -> bool
{
if full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("File '{}' exists but it shouldn't", file);
return false;
}
return true;
}
#[cfg(test)]
pub fn has_not_dir(full_path: PathBuf, dir: &str, test_id: String) -> bool
{
if full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("Dir '{}' exists but it shouldn't", dir);
return false;
}
return true;
}

123
tests/utils/server.rs Normal file
View File

@@ -0,0 +1,123 @@
use std::process::Command;
use std::os::unix::fs::PermissionsExt;
use std::fs::{self, File, Permissions};
use std::io::Write;
use std::env;
use std::path::PathBuf;
use super::files_utils::{self, has_files};
#[cfg(test)]
pub struct ServerTest {
user: String,
volume: PathBuf,
pub test_id: String
}
#[cfg(test)]
impl ServerTest {
pub fn new(id: String) -> Self {
let mut volume = env::current_dir().unwrap();
volume = volume.join("tests/data/admin/files");
ServerTest {
user: String::from("admin"),
volume,
test_id: id
}
}
pub fn init(&mut self) -> &mut ServerTest{
self.add_dir(&self.test_id.clone());
self.volume = self.volume.join(self.test_id.clone());
self.sync_root();
self
}
pub fn clean(&mut self) -> &mut ServerTest{
self.remove_dir(self.test_id.clone());
self.sync_root();
self
}
pub fn add_dir(&mut self, path: &str) -> &mut ServerTest {
let mut full_path = self.volume.clone();
full_path.push(path);
match fs::create_dir(&full_path) {
Ok(_) => {
// Set permissions to 777 to allow nextcloud to access it (workaround avoiding to
// set group and owner to www-data)
if let Err(e) = fs::set_permissions(&full_path, Permissions::from_mode(0o777)) {
eprintln!("Error setting permissions: {}", e);
}
},
Err(e) => eprintln!("Error creating directory: {}", e),
}
// do not sync test directory when creating it
if !path.ends_with("_nextsync")
{
self.sync_test();
}
self
}
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
let mut full_path = self.volume.clone();
full_path.push(name);
let mut file = File::create(full_path)?;
file.write_all(content.as_bytes())?;
self.sync_test();
Ok(())
}
pub fn remove_dir(&mut self, path: String) -> &mut ServerTest {
let mut full_path = self.volume.clone();
full_path.push(path);
let _ = fs::remove_dir_all(&full_path);
self.sync_test();
self
}
fn sync_root(&self) -> &Self {
self.sync("")
}
fn sync_test(&self) -> &Self {
let test_id = self.test_id.clone();
self.sync(&test_id)
}
fn sync(&self, path: &str) -> &Self {
// perform the occ files:scan command inside the nextcloud docker container
let nextcloud_docker = "master-nextcloud-1";
let args = format!("exec -t --user www-data {} /var/www/html/occ files:scan --path=/{}/files/{}", nextcloud_docker, &self.user, path);
let _output = Command::new("docker")
.args(args.split(" "))
.output()
.expect("Could not execute docker exec command");
self
}
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
let full_path = self.volume.clone().join(file);
has_files(full_path, file, content, self.test_id.clone())
}
pub fn has_not_file(&mut self, file: &str) -> bool {
let full_path = self.volume.clone().join(file);
files_utils::has_not_file(full_path, file, self.test_id.clone())
}
pub fn has_not_dir(&mut self, dir: &str) -> bool {
let full_path = self.volume.clone().join(dir);
dbg!(full_path.clone());
files_utils::has_not_file(full_path, dir, self.test_id.clone())
}
}

View File

@@ -0,0 +1,27 @@
use super::client::ClientTest;
#[cfg(test)]
pub fn lines_should_not_contains(lines: Vec<String>, str: &str) {
for line in lines {
if line.find(str).is_some() {
eprintln!("'{}' found in '{}'", str, line);
}
assert!(line.find(str).is_none());
}
}
#[cfg(test)]
pub fn status_should_be_empty(client: &mut ClientTest) {
let (staged, not_staged) = client.get_status();
if staged.len() != 0 {
eprintln!("id: {}", client.test_id.clone());
eprintln!("Staged should be empty but has '{}' line(s)", staged.len());
assert!(staged.len() == 0);
}
if staged.len() != 0 {
eprintln!("id: {}", client.test_id.clone());
eprintln!("Not Staged should be empty but has '{}' line(s)", not_staged.len());
assert!(not_staged.len() == 0);
}
}

31
tests/utils/utils.rs Normal file
View File

@@ -0,0 +1,31 @@
use rand::{distributions::Alphanumeric, Rng};
use super::client::ClientTest;
use super::server::ServerTest;
#[cfg(test)]
pub fn get_random_test_id() -> String {
let mut id: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(7)
.map(char::from)
.collect();
id.push_str("_nextsync");
id.to_owned()
}
#[cfg(test)]
pub fn init_test() -> (ClientTest, ServerTest) {
let id = get_random_test_id();
let mut server = ServerTest::new(id.clone());
server.init();
let client = ClientTest::new(id).init();
(client, server)
}
#[cfg(test)]
pub fn clean_test(client: ClientTest, server: &mut ServerTest) {
client.clean();
server.clean();
}