Compare commits

..

72 Commits

Author SHA1 Message Date
grimhilt
4533b9a72d refactor(tests): use init_test and clean_test 2024-05-07 18:20:32 +02:00
grimhilt
980d2d9a5d feat(add): prevent adding a file without changes 2024-05-07 18:12:05 +02:00
grimhilt
939b6f2fe3 feat: push deletion 2024-05-02 18:36:09 +02:00
grimhilt
4504b98112 fix(push): push deletion 2024-04-18 15:19:35 +02:00
grimhilt
e8c8ab9dfe fix(add): add deleted file 2024-04-16 17:54:25 +02:00
grimhilt
3420634bea chore: update clap 2024-03-31 22:17:26 +02:00
grimhilt
1aa02a24af test(push): add push remove test 2024-03-31 19:23:32 +02:00
grimhilt
5e43800d6c chore: update libraries 2024-03-31 19:19:10 +02:00
grimhilt
dc7df00ac9 chore: cleaning code 2024-03-17 00:20:58 +01:00
grimhilt
a1b9cde71a fix(tests): fix testsuite allow to pass push 2024-03-16 23:57:01 +01:00
grimhilt
7180647d26 test(pull): add test for pull 2024-03-11 14:47:05 +01:00
grimhilt
d5891a1a93 feat(push): add object when pushing directory 2024-03-11 14:16:41 +01:00
grimhilt
3207391fdb test(push): check that object are locally created when pushed 2024-03-10 23:06:32 +01:00
grimhilt
fa65b6b071 test(add): implicit dir 2024-03-10 17:29:50 +01:00
grimhilt
34dee1ceb6 fix(add): add directory implicitly 2024-03-10 17:29:37 +01:00
grimhilt
fe628ffc9f test(add): first tests 2024-03-10 16:49:21 +01:00
grimhilt
6b7a82bec6 fix: prevent adding nextsync config files 2024-03-10 16:49:06 +01:00
grimhilt
fdcd4633e5 fix: allow to push explicit directory 2024-03-10 16:19:23 +01:00
grimhilt
06bb51476b fix(push): push folder and return error when tcp fail 2024-03-01 17:56:52 +01:00
grimhilt
d8b2116aeb feat(remote): list remote with verbose option 2024-03-01 15:35:38 +01:00
grimhilt
8ed86a05ea style(obj): minor fixes 2024-02-29 09:36:52 +01:00
grimhilt
7951ad0520 refactor(tree): create impl Tree 2024-02-25 17:34:16 +01:00
grimhilt
faf7341525 refactor(blob): use object trait to create blob 2024-02-24 18:52:00 +01:00
grimhilt
642c358737 feat(test): allow multiple tests 2024-02-22 14:00:13 +01:00
grimhilt
e67082b85a refactor(test): use subdir 2024-02-22 13:02:22 +01:00
grimhilt
211e3702a3 refactor(test): remove old tests 2024-02-21 17:03:21 +01:00
grimhilt
a2f746d7f6 test: create first real test 2024-02-21 17:01:16 +01:00
grimhilt
69614b0c9f fix(token): allow to get and store token in local config 2024-02-21 17:01:14 +01:00
grimhilt
a5c5f4a713 fix(config): add option to last category 2024-02-21 17:01:10 +01:00
grimhilt
eaacff0e55 fix: minor warnings 2024-02-21 17:01:08 +01:00
grimhilt
287953c086 feat(config): create a proper config file with proper settings manipulation 2024-02-21 17:01:06 +01:00
grimhilt
6a11bb494b feat(credential): allow to add credential 2024-02-21 17:01:04 +01:00
grimhilt
1c60560c6e refactor(clone): set remote in config 2024-02-21 17:01:02 +01:00
grimhilt
c6534cfd40 feat(remote): add new remote 2024-02-21 17:00:43 +01:00
grimhilt
7719e27fe8 clean main: divide clap config into multiple files, broke clone 70 lines width 2023-10-28 23:46:12 +02:00
grimhilt
fc8e976c9c add add/directory tests 2023-10-28 22:23:48 +02:00
grimhilt
53b103af9e fix add -A 2023-10-28 22:12:27 +02:00
grimhilt
81c24b5e3c fix multiples warnings 2023-10-28 15:49:16 +02:00
grimhilt
22b9351862 add multiple test for the add command 2023-10-28 15:45:35 +02:00
grimhilt
0c925bc4f4 count global number of tests 2023-10-28 15:45:06 +02:00
grimhilt
d34b9bab5e globbing in add and clean the function 2023-10-28 15:44:53 +02:00
grimhilt
56234eaa3d add todos and fix some bugs on add and status 2023-10-28 00:15:47 +02:00
grimhilt
fd477a8139 start some tests on add command 2023-10-28 00:14:14 +02:00
grimhilt
559316e756 add user agent on login request 2023-10-27 23:04:01 +02:00
grimhilt
f4a905c57f store token 2023-10-24 15:32:51 +02:00
grimhilt
c6cf8a9730 update readme to show auth 2023-10-21 22:29:00 +02:00
grimhilt
f6db6992a0 working login system 2023-10-21 22:27:34 +02:00
grimhilt
908ead5b11 change name of functions 2023-10-21 21:48:21 +02:00
grimhilt
9ea1d01c27 add trait ApiCall 2023-10-21 21:47:48 +02:00
grimhilt
07f6405b26 test login 2023-10-21 19:54:11 +02:00
grimhilt
dadf00f4a5 add import necessary for test 2023-09-12 15:48:37 +02:00
grimhilt
a35c7b20d8 cleaning warnings 2023-08-27 22:57:05 +02:00
grimhilt
863e3bd68a find deletion on pull 2023-08-27 22:50:51 +02:00
grimhilt
57647e5df2 implement -all option to add 2023-08-25 18:52:29 +02:00
grimhilt
41c4796555 push copy file 2023-08-25 16:34:16 +02:00
grimhilt
aced8b992a create IntoPathBuf 2023-08-25 16:25:29 +02:00
grimhilt
d323ae3070 push move file 2023-08-25 16:09:28 +02:00
grimhilt
d476622305 prevent copy or move of empty file 2023-08-24 22:19:11 +02:00
grimhilt
498fada9ec push modification 2023-08-24 20:59:41 +02:00
grimhilt
f64d719b31 find modified, copied, moved file in staged 2023-08-23 12:52:45 +02:00
grimhilt
dcf137667b clean code 2023-08-11 22:09:34 +02:00
grimhilt
5b46b1e2f1 not cleaned status with moved and copied 2023-08-11 18:21:29 +02:00
grimhilt
4b12edbe5c add refs to blob 2023-08-11 18:21:00 +02:00
grimhilt
16dbd25168 add modified files in status 2023-08-04 19:17:21 +02:00
grimhilt
91a29480df fix blob creation and get changes 2023-08-04 19:17:04 +02:00
grimhilt
ce047eba12 prevent crash of downloader when not logging 2023-08-04 19:16:44 +02:00
grimhilt
94220be935 add timestamp and hash in blob 2023-08-04 16:01:24 +02:00
grimhilt
d5097727cb create impl for blob 2023-08-04 15:25:51 +02:00
grimhilt
cb43a46456 draft of pull 2023-07-28 13:22:55 +02:00
grimhilt
4c34df7cfe remote-diff only a directory 2023-07-28 13:22:13 +02:00
grimhilt
29def4967c normalize path in add and check if path correspond to deleted object 2023-07-24 00:49:08 +02:00
grimhilt
2775c77c55 improve status and minor fixes 2023-07-24 00:48:22 +02:00
73 changed files with 4499 additions and 1025 deletions

15
.gitignore vendored
View File

@@ -1,7 +1,10 @@
*
!/**/
!*.rs
!.gitignore
!README.md
!LICENSE
target target
*.test tests/nextcloud-docker-dev
.env tests/data
todo
.nextsync
.nextsyncignore
test

761
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,21 +6,25 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies] [dependencies]
reqwest = { version = "0.11", features = ["stream", "json", "multipart"] } rustc-serialize="0.3.25"
tokio = { version = "1", features = ["full"] } reqwest = { version = "0.12", features = ["stream", "json", "multipart"] }
tokio = { version = "1.37", features = ["full"] }
dotenv ="0.15.0" dotenv ="0.15.0"
clap = "2.33" clap = "4.5.4"
rust-crypto = "0.2.36" rust-crypto = "0.2.36"
colored = "2.0.0" colored = "2.1.0"
xml-rs = "0.8.0" xml-rs = "0.8.19"
regex = "1.8.3" regex = "1.10.4"
lazy_static = "1.4.0" lazy_static = "1.4.0"
glob = "0.3.1" glob = "0.3.1"
textwrap = "0.13" textwrap = "0.16.1"
chrono = "0.4.26" chrono = "0.4.37"
indicatif = "0.17.5" indicatif = "0.17.8"
md5 = "0.7.0" md5 = "0.7.0"
futures-util = "0.3.28" futures-util = "0.3.30"
rpassword = "7.3.1"
rand = "0.8.5"
tempfile = "3.10.1"
[profile.release] [profile.release]
debug = true debug = true

View File

@@ -9,18 +9,16 @@ This should work pretty much like git with some adaptations to be more debuggabl
## Features ## Features
- [x] Cloning - [x] Cloning
- [x] Status (only for new and deleted files/folders) - [x] Status (new, deleted, modified, copied, moved)
- [x] Pushing updates (only deletion and addition no changes) - [x] Pushing updates (new, deleted, modified)
- [x] Using a .nextsyncignore to ignore files - [x] Using a .nextsyncignore to ignore files
- [ ] Pulling changes - [ ] Pulling changes
- [ ] Auth without using env variables - [x] Auth with a token
- [ ] Detecting local changes - [ ] Remember token
- [ ] Various optimisation - [ ] Various optimisation
## Usage ## Usage
For the authentification, I use env variables (USERNAME and PASSWORD), this is temporary.
``` ```
USAGE: USAGE:
nextsync [SUBCOMMAND] nextsync [SUBCOMMAND]

View File

@@ -2,8 +2,10 @@
## Blob object ## Blob object
``` ```
file_name timestamp size hash file_name timestamp1 size timestamp2 hash
``` ```
timestamp1: timestamp of file on server to know if the server has an update
timestamp2: timestamp of file locally to know when the file has changed on the system
## Tree object ## Tree object
``` ```

View File

@@ -6,3 +6,6 @@ pub mod clone;
pub mod push; pub mod push;
pub mod config; pub mod config;
pub mod remote_diff; pub mod remote_diff;
pub mod remote;
pub mod pull;
pub mod credential;

View File

@@ -1,59 +1,121 @@
use std::io::Write; use std::io::Write;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use clap::Values; use glob::glob;
use crate::store; use crate::store::{self, object::Object};
use crate::utils::{self}; use crate::utils::{self, path};
use crate::store::object::object::{Obj, ObjMethods};
use crate::utils::nextsyncignore::{self, ignore_file}; use crate::utils::nextsyncignore::{self, ignore_file};
use crate::utils::path::{normalize_relative, repo_root, path_buf_to_string};
pub struct AddArgs<'a> { pub struct AddArgs {
pub files: Values<'a>, pub files: Vec<String>,
pub force: bool, pub force: bool,
pub all: bool,
} }
// todo match deleted files // todo match deleted files
// todo match weird reg expression
// todo normalize path
pub fn add(args: AddArgs) { pub fn add(args: AddArgs) {
let mut index_file = store::index::open();
let mut added_files: Vec<String> = vec![]; let mut pattern: String;
let rules = match nextsyncignore::read_lines() { let file_vec: Vec<String> = match args.all {
Ok(r) => r, true => {
Err(_) => vec![], pattern = path_buf_to_string(repo_root());
pattern.push_str("/*");
vec![pattern]
},
false => args.files,
}; };
let mut added_files: Vec<String> = vec![];
let mut ignored_f = vec![]; let mut ignored_f = vec![];
let file_vec: Vec<&str> = args.files.collect(); let rules = nextsyncignore::get_rules();
for file in file_vec { for file in file_vec {
if !args.force && ignore_file(&file.to_string(), rules.clone(), &mut ignored_f) { let f = match normalize_relative(&file) {
continue; Ok(f) => f,
} Err(err) => {
let path = Path::new(file); eprintln!("err: {} {}", file, err);
continue;
}
};
let path = repo_root().join(Path::new(&f));
match path.exists() { match path.exists() {
true => { true => {
if path.is_dir() { let mut obj = Obj::from_path(f.clone());
added_files.push(String::from(path.to_str().unwrap())); if obj.has_changes() {
add_folder_content(path.to_path_buf(), &mut added_files); add_entry(path, args.force, &mut added_files, rules.clone(), &mut ignored_f);
} else {
added_files.push(String::from(path.to_str().unwrap()));
} }
}, },
false => { false => {
// todo deleted file/folder verif if exists if Obj::from_path(file.clone()).exists_on_remote() {
added_files.push(String::from(path.to_str().unwrap())); // object is deleted so not present but can still be added for deletion
added_files.push(String::from(f));
} else {
// try globbing if nothing has been found
for entry in try_globbing(path) {
add_entry(entry, args.force, &mut added_files, rules.clone(), &mut ignored_f);
}
}
} }
} }
} }
if ignored_f.len() > 0 { print_ignored_files(ignored_f);
write_added_files(added_files);
}
fn add_entry(entry: PathBuf, force: bool, added_files: &mut Vec<String>, rules: Vec<String>, ignored_f: &mut Vec<String>) {
// ignore nextsync config files
if path::is_nextsync_config(entry.clone()) {
return;
}
// check if the file must be ignored
if !force && ignore_file(&path_buf_to_string(entry.clone()), rules, ignored_f) {
return;
}
// add the parent if there is one and it is not already created
add_parent(entry.clone(), added_files);
added_files.push(path_buf_to_string(entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
if entry.is_dir() {
add_folder_content(entry.to_path_buf(), added_files);
}
}
fn add_parent(entry: PathBuf, added_files: &mut Vec<String>) {
let test_parent = entry.strip_prefix(repo_root()).unwrap().parent();
if test_parent.is_none() || test_parent.unwrap() == PathBuf::new() {
return;
}
let parent = entry.parent().unwrap();
if !Obj::from_path(parent).exists_on_remote() {
add_parent(parent.to_path_buf(), added_files);
added_files.push(path_buf_to_string(parent.strip_prefix(repo_root()).unwrap().to_path_buf()));
}
}
fn print_ignored_files(ignored_files: Vec<String>) {
if ignored_files.len() > 0 {
// todo multiple nextsyncignore // todo multiple nextsyncignore
println!("The following paths are ignored by your .nextsyncignore file:"); println!("The following paths are ignored by your .nextsyncignore file:");
for file in ignored_f { for file in ignored_files {
println!("{}", file); println!("{}", file);
} }
} }
}
// save all added_files in index fn write_added_files(added_files: Vec<String>) {
// todo avoid duplication let mut index_file = store::index::open();
for file in added_files { for file in added_files {
if store::index::alread_added(file.clone()) {
continue;
}
match writeln!(index_file, "{}", file) { match writeln!(index_file, "{}", file) {
Ok(()) => (), Ok(()) => (),
Err(err) => eprintln!("{}", err), Err(err) => eprintln!("{}", err),
@@ -62,7 +124,25 @@ pub fn add(args: AddArgs) {
drop(index_file); drop(index_file);
} }
fn try_globbing(path: PathBuf) -> Vec<PathBuf> {
let mut paths: Vec<PathBuf> = vec![];
if let Ok(entries) = glob(path.to_str().unwrap()) {
for entry in entries {
match entry {
Ok(ppath) => paths.push(ppath),
Err(e) => {
eprintln!("err: {} incorrect pattern ({})", path.display(), e);
}
}
}
} else {
eprintln!("err: {} is not something you can add.", path.to_str().unwrap());
}
return paths;
}
fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) { fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
// todo check for changes
let mut folders: Vec<PathBuf> = vec![]; let mut folders: Vec<PathBuf> = vec![];
folders.push(path); folders.push(path);
@@ -70,12 +150,16 @@ fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
if let Ok(entries) = utils::read::read_folder(folder.clone()) { if let Ok(entries) = utils::read::read_folder(folder.clone()) {
for entry in entries { for entry in entries {
let path_entry = PathBuf::from(entry); let path_entry = PathBuf::from(entry);
if path_entry.is_dir() { if !path::is_nextsync_config(path_entry.clone())
folders.push(path_entry.clone()); {
if path_entry.is_dir() {
folders.push(path_entry.clone());
}
added_files.push(path_buf_to_string(path_entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
} }
added_files.push(String::from(path_entry.to_str().unwrap()));
} }
} }
} }
} }

View File

@@ -2,30 +2,31 @@ use std::io;
use std::io::prelude::*; use std::io::prelude::*;
use std::fs::DirBuilder; use std::fs::DirBuilder;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
use clap::Values;
use regex::Regex; use regex::Regex;
use crate::services::downloader::Downloader; use crate::services::downloader::Downloader;
use crate::utils::api::ApiProps; use crate::utils::api::ApiProps;
use crate::utils::path::path_buf_to_string;
use crate::utils::remote::{enumerate_remote, EnumerateOptions}; use crate::utils::remote::{enumerate_remote, EnumerateOptions};
use crate::global::global::{DIR_PATH, set_dir_path}; use crate::global::global::{DIR_PATH, set_dir_path};
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::{ReqProps, ObjProps}; use crate::services::req_props::{ReqProps, ObjProps};
use crate::store::object::{tree, blob}; use crate::store::object::{tree::Tree, blob::Blob};
use crate::commands::config; use crate::commands::config;
use crate::commands::init; use crate::commands::init;
pub const DEPTH: &str = "3"; pub const DEPTH: &str = "3";
pub struct CloneArgs<'a> { pub struct CloneArgs {
pub remote: Values<'a>, pub remote: String,
pub depth: Option<String>, pub depth: Option<String>,
} }
pub fn clone(args: CloneArgs) { pub fn clone(args: CloneArgs) {
let d = DIR_PATH.lock().unwrap().clone(); let d = DIR_PATH.lock().unwrap().clone();
let url = args.remote.clone().next().unwrap(); let url = args.remote.clone();
let (host, tmp_user, dist_path_str) = get_url_props(url); let (host, tmp_user, dist_path_str) = get_url_props(&url);
let username = match tmp_user { let username = match tmp_user {
Some(u) => u.to_string(), Some(u) => u.to_string(),
None => { None => {
@@ -46,7 +47,7 @@ pub fn clone(args: CloneArgs) {
let iter = Path::new(dist_path_str).iter(); let iter = Path::new(dist_path_str).iter();
let dest_dir = iter.last().unwrap(); let dest_dir = iter.last().unwrap();
let lp = std::env::current_dir().unwrap().join(dest_dir); let lp = std::env::current_dir().unwrap().join(dest_dir);
set_dir_path(lp.to_str().unwrap().to_string()); set_dir_path(path_buf_to_string(lp.clone()));
lp lp
}, },
}; };
@@ -57,11 +58,15 @@ pub fn clone(args: CloneArgs) {
std::process::exit(1); std::process::exit(1);
} else { } else {
init::init(); init::init();
let mut remote_config = api_props.username.clone();
remote_config.push_str("@"); // set remote origin in config file
remote_config.push_str(api_props.host.strip_prefix("https://").unwrap()); let mut remote_url = api_props.username.clone();
remote_config.push_str(&api_props.root); remote_url.push_str("@");
if config::set("remote", &remote_config).is_err() { remote_url.push_str(api_props.host.strip_prefix("https://").unwrap());
remote_url.push_str(&api_props.root);
if config::add_remote("origin", &remote_url).is_err()
{
eprintln!("err: not able to save remote"); eprintln!("err: not able to save remote");
} }
} }
@@ -69,7 +74,7 @@ pub fn clone(args: CloneArgs) {
let depth = &args.depth.clone().unwrap_or(DEPTH.to_string()); let depth = &args.depth.clone().unwrap_or(DEPTH.to_string());
let (folders, files) = enumerate_remote( let (folders, files) = enumerate_remote(
|a| req(&api_props, depth, a), |a| req(&api_props, depth, a),
&should_skip, None,
EnumerateOptions { EnumerateOptions {
depth: Some(depth.to_owned()), depth: Some(depth.to_owned()),
relative_s: None relative_s: None
@@ -85,12 +90,12 @@ pub fn clone(args: CloneArgs) {
// add tree // add tree
let path_folder = p.strip_prefix(ref_path.clone()).unwrap(); let path_folder = p.strip_prefix(ref_path.clone()).unwrap();
let lastmodified = folder.lastmodified.unwrap().timestamp_millis(); let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
if let Err(err) = tree::add(path_folder.to_path_buf(), &lastmodified.to_string(), false) { if let Err(err) = Tree::from_path(path_folder.to_path_buf()).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", path_folder.display(), err); eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
} }
} }
let downloader = Downloader::new() Downloader::new()
.set_api_props(api_props.clone()) .set_api_props(api_props.clone())
.set_files(files) .set_files(files)
.should_log() .should_log()
@@ -101,15 +106,11 @@ fn save_blob(obj: ObjProps) {
let relative_s = &obj.clone().relative_s.unwrap(); let relative_s = &obj.clone().relative_s.unwrap();
let relative_p = PathBuf::from(&relative_s); let relative_p = PathBuf::from(&relative_s);
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis(); let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
if let Err(err) = blob::add(relative_p, &lastmodified.to_string(), false) { if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err); eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
} }
} }
fn should_skip(_: ObjProps) -> bool {
return false;
}
fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> { fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> {
ReqProps::new() ReqProps::new()
.set_request(relative_s, &api_props) .set_request(relative_s, &api_props)
@@ -176,7 +177,7 @@ mod tests {
fn test_get_url_props() { fn test_get_url_props() {
let p = "/foo/bar"; let p = "/foo/bar";
let u = Some("user"); let u = Some("user");
let d = String::from("http://nextcloud.com"); // let d = String::from("http://nextcloud.com");
let sd = String::from("https://nextcloud.com"); let sd = String::from("https://nextcloud.com");
let sld = String::from("https://nextcloud.example.com"); let sld = String::from("https://nextcloud.example.com");
let ld = String::from("http://nextcloud.example.com"); let ld = String::from("http://nextcloud.example.com");

View File

@@ -1,39 +1,190 @@
use std::fs::OpenOptions; use std::fs::OpenOptions;
use std::io::{self, Write}; use std::io::{self, Write, BufRead, Seek, SeekFrom};
use crate::utils::{path, read}; use crate::utils::{path, read};
use std::collections::HashMap;
pub fn set(var: &str, val: &str) -> io::Result<()> { pub struct ConfigSetArgs {
let mut root = path::nextsync(); pub name: String,
root.push("config"); pub value: String,
}
pub fn config_set(args: ConfigSetArgs) {
// configure possible options and their associated category
let mut option_categories: HashMap<&str, &str> = HashMap::new();
option_categories.insert("force_insecure", "core");
option_categories.insert("token", "core");
// get category of option
let category = option_categories.get(args.name.as_str());
if category.is_none() {
eprintln!("fatal: '{}' is not a valid option.", args.name.clone());
std::process::exit(1);
}
let _ = write_option_in_cat(category.unwrap(), &args.name, &args.value);
}
pub fn find_option_in_cat(category: &str, option: &str) -> Option<String> {
let mut config = path::nextsync();
config.push("config");
let mut in_target_category = false;
if let Ok(lines) = read::read_lines(config) {
for line in lines {
if let Ok(line) = line {
let trimmed_line = line.trim();
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
in_target_category = trimmed_line == format!("[{}]", category);
} else if in_target_category {
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
if parts.len() == 2 && parts[0].trim() == option {
return Some(parts[1].trim().to_string());
}
}
}
}
}
None
}
pub fn write_option_in_cat(category: &str, option: &str, value: &str) -> io::Result<()> {
let mut config = path::nextsync();
config.push("config");
let mut file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&config)?;
let mut in_target_category = false;
let mut option_found = false;
// Go to the beginning of the file
file.seek(SeekFrom::Start(0))?;
// Create a temporary file to hold the modified content
let mut tmp_file = tempfile::Builder::new()
.prefix(".nextsyncconfig")
.tempfile()?;
let reader = io::BufReader::new(&file);
for line in reader.lines() {
let line = line?;
let trimmed_line = line.trim();
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
// if we were already in target category we are now leaving it
// add option only if not found before
if in_target_category && !option_found {
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
} else if !in_target_category {
in_target_category = trimmed_line == format!("[{}]", category);
}
}
if in_target_category && !option_found && trimmed_line.starts_with(&format!("{} =", option)) {
// Option already exists, update its value
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
option_found = true;
} else {
// Write the original line
writeln!(&mut tmp_file, "{}", line)?;
}
}
// add to last category
if in_target_category && !option_found {
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
}
// if the category didn't exist create it and add the option
if !in_target_category {
writeln!(&mut tmp_file, "[{}]", category)?;
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
}
// Flush and sync the temporary file to ensure data is written to disk
tmp_file.flush()?;
// Go back to the beginning of the file
tmp_file.seek(SeekFrom::Start(0))?;
file.seek(SeekFrom::Start(0))?;
// Copy the contents of the temporary file to the original file
io::copy(&mut tmp_file, &mut file)?;
Ok(())
}
pub fn add_remote(name: &str, url: &str) -> io::Result<()> {
let config = path::config();
// check if there is already a remote with this name
if get_remote(name).is_some()
{
eprintln!("error: remote {} already exists.", name);
std::process::exit(3);
}
// todo check if exist
let mut file = OpenOptions::new() let mut file = OpenOptions::new()
.read(true) .read(true)
.write(true) .write(true)
.create(true) .create(true)
.append(true) .append(true)
.open(root)?; .open(config)?;
writeln!(file, "[remote \"{}\"]", name)?;
writeln!(file, "\turl = {}", url)?;
let mut line = var.to_owned();
line.push_str(" ");
line.push_str(val);
writeln!(file, "{}", line)?;
Ok(()) Ok(())
} }
pub fn get(var: &str) -> Option<String> { pub fn get_remote(name: &str) -> Option<String> {
let mut root = path::nextsync(); find_option_in_cat(&format!("remote \"{}\"", name), "url")
root.push("config"); }
/// return a vector of remote found in config file (e.g: ("origin", "https://example.com"))
pub fn get_all_remote() -> Vec<(String, String)> {
let config = path::config();
let mut remotes: Vec<(String, String)> = vec![];
let mut in_remote = false;
let mut remote_name = String::new();
if let Ok(lines) = read::read_lines(config) {
if let Ok(lines) = read::read_lines(root) {
for line in lines { for line in lines {
if let Ok(l) = line { if let Ok(line) = line {
if l.starts_with(var.clone()) { let trimmed_line = line.trim();
let (_, val) = l.split_once(" ").unwrap();
return Some(val.to_owned()); if trimmed_line.starts_with("[remote ") {
in_remote = true;
remote_name = trimmed_line.strip_prefix("[remote \"").unwrap().strip_suffix("\"]").unwrap().to_string();
} }
else if trimmed_line.starts_with('[')
{
in_remote = false;
}
else if in_remote {
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
if parts.len() == 2 {
remotes.push((remote_name.to_string(), parts[1].trim().to_string()))
}
}
} }
} }
} }
None remotes
}
pub fn get_core(name: &str) -> Option<String> {
find_option_in_cat("core", name)
} }

View File

@@ -0,0 +1,54 @@
use crate::commands::clone::get_url_props;
use crate::services::api::ApiError::RequestError;
use crate::services::login::Login;
use crate::services::api_call::ApiCall;
use crate::commands::config;
pub struct CredentialArgs {
pub username: String,
pub password: Option<String>,
}
pub fn credential_add(args: CredentialArgs) {
// get remote if exists
let remote = match config::get_remote("origin") {
None => {
eprintln!("fatal: No remote origin, impossible to send request to get token");
std::process::exit(1);
},
Some(remote) => remote
};
let (host, _, _) = get_url_props(&remote);
// get username and password
let username = args.username.to_owned();
let password = match args.password {
Some(mut pwd) => pwd.to_owned(),
None => {
println!("Please enter the password for {}: ", username);
rpassword::read_password().unwrap()
}
};
// get token
let get_token = Login::new()
.set_auth(&username, &password)
.set_host(Some(host))
.send_login();
// deal with error
if let Err(err) = get_token {
if let RequestError(err) = err {
eprintln!("fatal: Failed to get token for these credential. ({})", err);
}
else {
eprintln!("fatal: Failed to get token for these credential.");
}
std::process::exit(1);
}
// save token
let _ = config::write_option_in_cat("core", "token", get_token.unwrap().as_str());
}

View File

@@ -1,7 +1,6 @@
use std::env; use std::env;
use std::fs::{DirBuilder, File}; use std::fs::{DirBuilder, File};
use std::path::PathBuf; use std::path::PathBuf;
use crate::utils::read::read_folder;
use crate::global::global::DIR_PATH; use crate::global::global::DIR_PATH;
pub fn init() { pub fn init() {
@@ -12,23 +11,24 @@ pub fn init() {
None => env::current_dir().unwrap(), None => env::current_dir().unwrap(),
}; };
// todo
// check if dir is empty // check if dir is empty
if let Ok(entries) = read_folder(path.clone()) { // if let Ok(entries) = read_folder(path.clone()) {
if entries.len() != 0 { // if entries.len() != 0 {
eprintln!("fatal: destination path '{}' already exists and is not an empty directory.", path.display()); // eprintln!("fatal: destination path '{}' already exists and is not an empty directory.", path.display());
std::process::exit(1); // std::process::exit(1);
} // }
} else { // } else {
eprintln!("fatal: cannot open the destination directory"); // eprintln!("fatal: cannot open the destination directory");
std::process::exit(1); // std::process::exit(1);
} // }
let builder = DirBuilder::new(); let builder = DirBuilder::new();
path.push(".nextsync"); path.push(".nextsync");
match builder.create(path.clone()) { match builder.create(path.clone()) {
Ok(()) => (), Ok(()) => (),
Err(_) => println!("Error: cannot create .nextsync"), Err(err) => println!("Error: cannot create .nextsync ({})", err),
}; };
path.push("objects"); path.push("objects");
@@ -38,6 +38,13 @@ pub fn init() {
}; };
path.pop(); path.pop();
path.push("refs");
match builder.create(path.clone()) {
Ok(()) => (),
Err(_) => println!("Error: cannot create refs"),
};
path.pop();
path.push("HEAD"); path.push("HEAD");
match File::create(path.clone()) { match File::create(path.clone()) {
Ok(_) => (), Ok(_) => (),
@@ -51,12 +58,13 @@ pub fn init() {
Err(_) => println!("Error: cannot create index"), Err(_) => println!("Error: cannot create index"),
} }
path.pop(); // todo
path.pop(); // path.pop();
path.push(".nextsyncignore"); // path.pop();
// path.push(".nextsyncignore");
match File::create(path) { //
Ok(_) => (), // match File::create(path) {
Err(_) => println!("Error: cannot create .nextsyncignore"), // Ok(_) => (),
} // Err(_) => println!("Error: cannot create .nextsyncignore"),
// }
} }

54
src/commands/pull.rs Normal file
View File

@@ -0,0 +1,54 @@
use std::path::PathBuf;
use std::fs::DirBuilder;
use crate::services::downloader::Downloader;
use crate::services::req_props::ObjProps;
use crate::store::object::blob::Blob;
use crate::store::object::tree::Tree;
use crate::utils::api::get_api_props;
use crate::utils::path;
use crate::commands::remote_diff::get_diff;
pub fn pull() {
let relative_p = path::current()
.unwrap()
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
let (folders, files) = get_diff(relative_p);
let root = path::repo_root();
for folder in folders {
let p = root.clone().join(PathBuf::from(folder.relative_s.unwrap()));
if !p.exists() {
// create folder
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
eprintln!("err: cannot create directory {} ({})", p.display(), err);
}
// add tree
let path_folder = p.strip_prefix(root.clone()).unwrap();
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
if let Err(err) = Tree::from_path(path_folder).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
}
}
}
Downloader::new()
.set_api_props(get_api_props())
.set_files(files)
.should_log()
.download(root, Some(&update_blob));
// todo look if need to download or update
}
fn update_blob(obj: ObjProps) {
let relative_s = &obj.clone().relative_s.unwrap();
let relative_p = PathBuf::from(&relative_s);
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
// todo update function
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
}
}

View File

@@ -1,26 +1,38 @@
use std::path::PathBuf; use std::path::PathBuf;
use crate::commands::{status, config}; use crate::commands::{status, config};
use crate::commands::push::push_factory::{PushFactory, PushState}; use crate::commands::push::push_factory::{PushFactory, PushState};
use crate::store::index;
use super::status::LocalObj;
pub mod push_factory; pub mod push_factory;
pub mod new; pub mod new;
pub mod new_dir; pub mod new_dir;
pub mod rm_dir; pub mod rm_dir;
pub mod deleted; pub mod deleted;
pub mod modified;
pub mod moved;
pub mod copied;
pub fn push() { pub fn push() {
// todo let _remote = match config::get_remote("origin") {
let _remote = match config::get("remote") {
Some(r) => r, Some(r) => r,
None => { None => {
eprintln!("fatal: no remote set in configuration"); eprintln!("fatal: no remote set in configuration");
// todo debug
//std::process::exit(1); //std::process::exit(1);
String::from("") String::new()
} }
}; };
let staged_objs = status::get_all_staged(); let staged_objs = status::get_all_staged();
// exit if there is nothing to push
if staged_objs.len() == 0 {
println!("Everything up-to-date");
std::process::exit(0);
}
// path that certify that all its children can be push whithout hesistation // path that certify that all its children can be push whithout hesistation
// (e.g. if remote dir has no changes since last sync all children // (e.g. if remote dir has no changes since last sync all children
// can be pushed without verification) // can be pushed without verification)
@@ -51,20 +63,25 @@ pub fn push() {
match push_factory.can_push(&mut whitelist) { match push_factory.can_push(&mut whitelist) {
PushState::Valid => { PushState::Valid => {
match push_factory.push() { match push_factory.push() {
Ok(()) => (), Ok(()) => remove_obj_from_index(obj.clone()),
Err(err) => { Err(err) => {
eprintln!("err: pushing {}: {}", obj.name, err); eprintln!("err: pushing {}: {}", obj.name, err);
} }
} }
}, },
PushState::Done => (), PushState::Done => remove_obj_from_index(obj.clone()),
PushState::Conflict => { PushState::Conflict => {
eprintln!("conflict when pushing blob");
// download file // download file
} }
_ => todo!(), PushState::Error => (eprintln!("error when pushing changes blob")),
} }
} }
} }
// read index }
// if dir upload dir
fn remove_obj_from_index(obj: LocalObj) {
if let Err(err) = index::rm_line(obj.path.to_str().unwrap()) {
eprintln!("err: removing {} from index: {}", obj.name, err);
}
} }

View File

@@ -0,0 +1,84 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::r#copy::Copy;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
use crate::utils::path::path_buf_to_string;
pub struct Copied {
pub obj: LocalObj,
}
impl PushChange for Copied {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Conflict,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = Copy::new()
.set_url_copy(
&path_buf_to_string(obj.path_from.clone().unwrap()),
obj.path.to_str().unwrap())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error copying file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error copying file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// create destination blob
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
}
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

View File

@@ -1,11 +1,13 @@
use std::path::PathBuf; use std::path::PathBuf;
use std::io; use std::io;
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::delete_path::DeletePath; use crate::services::delete_path::DeletePath;
use crate::store::index; use crate::store::index;
use crate::store::object::blob; use crate::store::object::blob::Blob;
use crate::commands::status::LocalObj; use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState}; use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::object::ObjMethods;
pub struct Deleted { pub struct Deleted {
pub obj: LocalObj pub obj: LocalObj
@@ -26,7 +28,7 @@ impl PushChange for Deleted {
let obj = &self.obj; let obj = &self.obj;
let res = DeletePath::new() let res = DeletePath::new()
.set_url(obj.path.to_str().unwrap()) .set_url(obj.path.to_str().unwrap())
.send_with_err(); .send();
match res { match res {
Err(ApiError::IncorrectRequest(err)) => { Err(ApiError::IncorrectRequest(err)) => {
@@ -42,7 +44,7 @@ impl PushChange for Deleted {
// update tree // update tree
// todo date // todo date
blob::rm(obj.path.clone())?; Blob::from_path(obj.path.clone()).rm_node()?;
// remove index // remove index
index::rm_line(obj.path.to_str().unwrap())?; index::rm_line(obj.path.to_str().unwrap())?;

View File

@@ -0,0 +1,80 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps;
use crate::services::upload_file::UploadFile;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
pub struct Modified {
pub obj: LocalObj,
}
impl PushChange for Modified {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Valid,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = UploadFile::new()
.set_url(obj.path.to_str().unwrap())
.set_file(obj.path.clone())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error pushing file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update blob
Blob::from_path(obj.path.clone()).update(&lastmodified.to_string())?;
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

View File

@@ -0,0 +1,88 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::r#move::Move;
use crate::services::req_props::ReqProps;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
use crate::utils::path::path_buf_to_string;
use crate::store::object::object::ObjMethods;
pub struct Moved {
pub obj: LocalObj,
}
impl PushChange for Moved {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Conflict,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = Move::new()
.set_url_move(
&path_buf_to_string(obj.path_from.clone().unwrap()),
obj.path.to_str().unwrap())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error moving file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error moving file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// delete source and create destination blob
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
}
if let Err(err) = Blob::from_path(obj.path_from.clone().unwrap()).rm() {
eprintln!("err: removing ref of {}: {}", obj.name.clone(), err);
}
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

View File

@@ -1,10 +1,10 @@
use std::path::PathBuf; use std::path::PathBuf;
use std::io; use std::io;
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps; use crate::services::req_props::ReqProps;
use crate::services::upload_file::UploadFile; use crate::services::upload_file::UploadFile;
use crate::store::index; use crate::store::object::blob::Blob;
use crate::store::object::blob;
use crate::commands::status::LocalObj; use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState}; use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
@@ -28,15 +28,16 @@ impl PushChange for New {
let res = UploadFile::new() let res = UploadFile::new()
.set_url(obj.path.to_str().unwrap()) .set_url(obj.path.to_str().unwrap())
.set_file(obj.path.clone()) .set_file(obj.path.clone())
.send_with_err(); .send();
match res { match res {
Err(ApiError::IncorrectRequest(err)) => { Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status()); dbg!(&err);
eprintln!("fatal: error pushing file '{}': {}", obj.name, err.status());
std::process::exit(1); std::process::exit(1);
}, },
Err(ApiError::RequestError(_)) => { Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error pushing file {}", obj.name); eprintln!("fatal: request error pushing file '{}'", obj.name);
std::process::exit(1); std::process::exit(1);
} }
_ => (), _ => (),
@@ -67,11 +68,8 @@ impl PushChange for New {
let lastmodified = prop.lastmodified.unwrap().timestamp_millis(); let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update blob // create new blob
blob::add(obj.path.clone(), &lastmodified.to_string(), true)?; Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false)?;
// remove index
index::rm_line(obj.path.to_str().unwrap())?;
Ok(()) Ok(())
} }

View File

@@ -1,10 +1,11 @@
use std::path::PathBuf; use std::path::PathBuf;
use std::io; use std::io;
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps; use crate::services::req_props::ReqProps;
use crate::services::create_folder::CreateFolder; use crate::services::create_folder::CreateFolder;
use crate::store::index; use crate::store::index;
use crate::store::object::tree; use crate::store::object::tree::Tree;
use crate::commands::status::LocalObj; use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState}; use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
@@ -33,7 +34,7 @@ impl PushChange for NewDir {
let obj = &self.obj; let obj = &self.obj;
let res = CreateFolder::new() let res = CreateFolder::new()
.set_url(obj.path.to_str().unwrap()) .set_url(obj.path.to_str().unwrap())
.send_with_err(); .send();
match res { match res {
Err(ApiError::IncorrectRequest(err)) => { Err(ApiError::IncorrectRequest(err)) => {
@@ -74,7 +75,7 @@ impl PushChange for NewDir {
let lastmodified = prop.lastmodified.unwrap().timestamp_millis(); let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update tree // update tree
tree::add(obj.path.clone(), &lastmodified.to_string(), true)?; Tree::from_path(obj.path.clone()).create(&lastmodified.to_string(), true)?;
// remove index // remove index
index::rm_line(obj.path.to_str().unwrap())?; index::rm_line(obj.path.to_str().unwrap())?;

View File

@@ -2,12 +2,16 @@ use std::path::PathBuf;
use std::io; use std::io;
use crate::commands::status::{State, LocalObj}; use crate::commands::status::{State, LocalObj};
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::store::object; use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps; use crate::services::req_props::ReqProps;
use crate::commands::push::new::New; use crate::commands::push::new::New;
use crate::commands::push::new_dir::NewDir; use crate::commands::push::new_dir::NewDir;
use crate::commands::push::rm_dir::RmDir; use crate::commands::push::rm_dir::RmDir;
use crate::commands::push::deleted::Deleted; use crate::commands::push::deleted::Deleted;
use crate::commands::push::modified::Modified;
use crate::commands::push::moved::Moved;
use crate::commands::push::copied::Copied;
use crate::store::object::blob::Blob;
#[derive(Debug)] #[derive(Debug)]
pub enum PushState { pub enum PushState {
@@ -38,6 +42,7 @@ pub trait PushChange {
} }
fn flow(&self, obj: &LocalObj, whitelist: Option<PathBuf>) -> PushFlowState { fn flow(&self, obj: &LocalObj, whitelist: Option<PathBuf>) -> PushFlowState {
// todo moved: from same file, destination doesn't exist but parent do
if self.is_whitelisted(obj, whitelist) { if self.is_whitelisted(obj, whitelist) {
return PushFlowState::Whitelisted; return PushFlowState::Whitelisted;
} }
@@ -54,6 +59,7 @@ pub trait PushChange {
if err.status() == 404 { if err.status() == 404 {
Ok(None) Ok(None)
} else { } else {
eprintln!("err: when requesting properties of {} ({})", obj.name, err.status());
Err(()) Err(())
} }
}, },
@@ -67,7 +73,16 @@ pub trait PushChange {
}; };
// check if remote is newest // check if remote is newest
let last_sync_ts = object::get_timestamp(obj.path.to_str().unwrap().to_string()).unwrap(); let last_sync_ts = {
if obj.otype == String::from("blob") {
Blob::from_path(obj.path.clone())
.saved_remote_ts()
.parse::<i64>().unwrap()
} else {
// todo timestamp on tree
99999999999999
}
};
let remote_ts = obj_data.lastmodified.unwrap().timestamp_millis(); let remote_ts = obj_data.lastmodified.unwrap().timestamp_millis();
if last_sync_ts < remote_ts { if last_sync_ts < remote_ts {
@@ -84,9 +99,10 @@ impl PushFactory {
pub fn new(&self, obj: LocalObj) -> Box<dyn PushChange> { pub fn new(&self, obj: LocalObj) -> Box<dyn PushChange> {
match obj.state { match obj.state {
State::New => Box::new(New { obj }), State::New => Box::new(New { obj }),
State::Renamed => todo!(), State::Modified => Box::new(Modified { obj }),
State::Modified => todo!(),
State::Deleted => Box::new(Deleted { obj }), State::Deleted => Box::new(Deleted { obj }),
State::Moved => Box::new(Moved { obj }),
State::Copied => Box::new(Copied { obj }),
State::Default => todo!(), State::Default => todo!(),
} }
} }
@@ -94,10 +110,10 @@ impl PushFactory {
pub fn new_dir(&self, obj: LocalObj) -> Box<dyn PushChange> { pub fn new_dir(&self, obj: LocalObj) -> Box<dyn PushChange> {
match obj.state { match obj.state {
State::New => Box::new(NewDir { obj }), State::New => Box::new(NewDir { obj }),
State::Renamed => todo!(),
State::Modified => todo!(), State::Modified => todo!(),
State::Deleted => Box::new(RmDir { obj }), State::Deleted => Box::new(RmDir { obj }),
State::Default => todo!(), State::Default => todo!(),
_ => todo!(),
} }
} }
} }

View File

@@ -1,11 +1,13 @@
use std::path::PathBuf; use std::path::PathBuf;
use std::io; use std::io;
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::delete_path::DeletePath; use crate::services::delete_path::DeletePath;
use crate::store::index; use crate::store::index;
use crate::store::object::tree; use crate::store::object::tree::Tree;
use crate::commands::status::LocalObj; use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState}; use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::object::ObjMethods;
pub struct RmDir { pub struct RmDir {
pub obj: LocalObj pub obj: LocalObj
@@ -32,7 +34,7 @@ impl PushChange for RmDir {
let obj = &self.obj; let obj = &self.obj;
let res = DeletePath::new() let res = DeletePath::new()
.set_url(obj.path.to_str().unwrap()) .set_url(obj.path.to_str().unwrap())
.send_with_err(); .send();
match res { match res {
Err(ApiError::IncorrectRequest(err)) => { Err(ApiError::IncorrectRequest(err)) => {
@@ -48,7 +50,7 @@ impl PushChange for RmDir {
// update tree // update tree
// todo update date // todo update date
tree::rm(obj.path.clone())?; Tree::from_path(obj.path.clone()).rm()?;
// remove index // remove index
index::rm_line(obj.path.to_str().unwrap())?; index::rm_line(obj.path.to_str().unwrap())?;

27
src/commands/remote.rs Normal file
View File

@@ -0,0 +1,27 @@
use crate::commands::config;
use super::config::get_all_remote;
pub struct RemoteArgs {
pub name: String,
pub url: String,
}
pub fn remote_add(args: RemoteArgs) {
let _ = config::add_remote(&args.name, &args.url);
}
pub fn remote_list(verbose: bool) {
let remotes = get_all_remote();
for remote in remotes {
if verbose
{
println!("{} {}", remote.0, remote.1);
}
else
{
println!("{}", remote.0);
}
}
}

View File

@@ -1,60 +1,39 @@
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::{ReqProps, ObjProps}; use crate::services::req_props::{ReqProps, ObjProps};
use crate::store::object::{Object, self}; use crate::store::object::Object;
use crate::utils::api::{ApiProps, get_api_props}; use crate::utils::api::{ApiProps, get_api_props};
use crate::utils::path; use crate::utils::path;
use crate::utils::remote::{enumerate_remote, EnumerateOptions}; use crate::utils::remote::{enumerate_remote, EnumerateOptions};
use std::fs::canonicalize;
use std::path::PathBuf; use std::path::PathBuf;
pub struct RemoteDiffArgs { // todo deletion
pub path: Option<String>, pub fn remote_diff() {
let relative_p = path::current()
.unwrap()
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
let (folders, files) = get_diff(relative_p);
for folder in folders {
println!("should pull {}", folder.clone().relative_s.unwrap());
}
for file in files {
println!("should pull {}", file.clone().relative_s.unwrap());
}
} }
pub fn remote_diff(args: RemoteDiffArgs) { pub fn get_diff(path: PathBuf) -> (Vec<ObjProps>, Vec<ObjProps>) {
let path = {
if let Some(path) = args.path {
let mut cur = path::current().unwrap();
cur.push(path);
let canonic = canonicalize(cur).ok().unwrap();
dbg!(&canonic);
dbg!(path::repo_root());
let ok = canonic.strip_prefix(path::repo_root());
dbg!(&ok);
// todo let depth = "6"; // todo opti
PathBuf::from("/")
} else {
PathBuf::from("/")
}
};
let mut folders: Vec<ObjProps> = vec![ObjProps {
contentlength: None,
href: None,
lastmodified: None,
relative_s: Some(path.to_str().unwrap().to_owned()),
}];
let mut files: Vec<ObjProps> = vec![];
let depth = "2"; // todo
// todo origin
let api_props = get_api_props(); let api_props = get_api_props();
let (folders, files) = enumerate_remote(
enumerate_remote(
|a| req(&api_props, depth, a), |a| req(&api_props, depth, a),
&should_skip, Some(&should_skip),
EnumerateOptions { EnumerateOptions {
depth: Some(depth.to_owned()), depth: Some(depth.to_owned()),
relative_s: Some(path.to_str().unwrap().to_owned()) relative_s: Some(path.to_str().unwrap().to_owned())
}); })
for folder in folders {
println!("should pull {}", folder.clone().relative_s.unwrap());
}
for file in files {
println!("should pull {}", file.clone().relative_s.unwrap());
}
} }
fn should_skip(obj: ObjProps) -> bool { fn should_skip(obj: ObjProps) -> bool {
@@ -76,6 +55,7 @@ fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjPro
.set_request(relative_s, &api_props) .set_request(relative_s, &api_props)
.set_depth(depth) .set_depth(depth)
.gethref() .gethref()
.getcontentlength() // todo opti
.getlastmodified() .getlastmodified()
.send_req_multiple() .send_req_multiple()
} }

View File

@@ -1,14 +1,19 @@
use std::fs::File;
use std::path::PathBuf; use std::path::PathBuf;
use std::io::{self, Lines, BufReader};
use std::collections::HashMap; use std::collections::HashMap;
use crypto::digest::Digest; use crypto::digest::Digest;
use crypto::sha1::Sha1; use crypto::sha1::Sha1;
use colored::Colorize; use colored::Colorize;
use crate::utils::path; use crate::utils::path::{self, path_buf_to_string};
use crate::utils::read::{read_folder, read_lines}; use crate::store::object::blob::Blob;
use crate::store::object::tree; use crate::store::object::object::Obj;
use crate::store::object::tree::Tree;
use crate::utils::read::read_folder;
use crate::store::index; use crate::store::index;
use crate::store::object::object::ObjMethods;
pub struct StatusArgs {
pub nostyle: bool,
}
#[derive(PartialEq)] #[derive(PartialEq)]
enum RemoveSide { enum RemoveSide {
@@ -21,27 +26,128 @@ enum RemoveSide {
pub enum State { pub enum State {
Default, Default,
New, New,
Renamed, Moved,
Copied,
Modified, Modified,
Deleted, Deleted,
} }
// todo: relative path, filename, get modified // todo: relative path, filename
// todo: not catch added empty folder // todo: not catch added empty folder
pub fn status() { pub fn status(args: StatusArgs) {
let (mut new_objs_hashes, mut del_objs_hashes) = get_diff(); let mut all_hashes = get_all_objs_hashes();
// get copy, modified let staged_objs = get_staged(&mut all_hashes);
let staged_objs = get_staged(&mut new_objs_hashes, &mut del_objs_hashes);
let mut objs: Vec<LocalObj> = del_objs_hashes.iter().map(|x| { let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
x.1.clone() x.1.clone()
}).collect(); }).collect();
for (_, elt) in new_objs_hashes { if args.nostyle
objs.push(elt.clone()); {
print_status_nostyle(staged_objs, objs);
}
else
{
print_status(staged_objs, objs);
}
}
pub fn get_all_objs() -> Vec<LocalObj> {
let all_hashes = get_all_objs_hashes();
all_hashes.iter().map(|x| {
x.1.clone()
}).collect()
}
fn get_all_objs_hashes() -> HashMap<String, LocalObj> {
let (mut new_objs_hashes, mut del_objs_hashes, objs_modified) = get_diff();
let move_copy_hashes = get_move_copy_objs(&mut new_objs_hashes, &mut del_objs_hashes);
let mut hasher = Sha1::new();
let mut modified_objs_hashes = HashMap::new();
for obj in objs_modified {
hasher.input_str(&obj);
let hash = hasher.result_str();
hasher.reset();
modified_objs_hashes.insert(hash, LocalObj {
// todo otype
otype: get_otype(PathBuf::from(obj.clone())),
name: obj.clone().to_string(),
path: PathBuf::from(obj),
path_from: None,
state: State::Modified
});
} }
print_status(staged_objs, objs); let mut all_hashes = HashMap::new();
all_hashes.extend(move_copy_hashes);
all_hashes.extend(del_objs_hashes);
all_hashes.extend(new_objs_hashes);
all_hashes.extend(modified_objs_hashes);
all_hashes
}
fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> bool {
// todo prevent copied or moved if file empty
// todo deal with directories
if obj.path.is_dir()
{
return true;
}
let mut blob = Blob::from_path(obj.path.clone());
let mut flag = true;
let identical_blobs = blob.get_all_identical_blobs();
// try to find an identical blob among the deleted files (=moved)
for obj_s in identical_blobs.clone() {
if !flag { break; }
hasher.input_str(&obj_s);
let hash = hasher.result_str();
hasher.reset();
if del_objs_h.contains_key(&hash) {
let mut new_move = obj.clone();
let deleted = del_objs_h.get(&hash).unwrap().clone();
del_objs_h.remove(&hash);
new_move.path_from = Some(deleted.path);
new_move.state = State::Moved;
move_copy_hashes.insert(key.clone(), new_move.clone());
flag = false;
}
}
// if did not find anything before try to find a file with the same content (=copy)
if flag {
if let Some(rel_s) = identical_blobs.first() {
let root = path::repo_root();
let rel_p = PathBuf::from(rel_s.clone());
let abs_p = root.join(rel_p.clone());
if abs_p.exists() {
let mut new_copy = obj.clone();
new_copy.path_from = Some(rel_p);
new_copy.state = State::Copied;
move_copy_hashes.insert(key, new_copy.clone());
flag = false;
}
}
}
flag
}
fn get_move_copy_objs(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> HashMap<String, LocalObj> {
let mut hasher = Sha1::new();
let mut move_copy_hashes = HashMap::new();
new_objs_h.retain(|key, obj| {
should_retain(&mut hasher, key.to_owned(), obj.clone(), &mut move_copy_hashes, del_objs_h)
});
move_copy_hashes
} }
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
@@ -49,19 +155,16 @@ pub struct LocalObj {
pub otype: String, pub otype: String,
pub name: String, pub name: String,
pub path: PathBuf, pub path: PathBuf,
pub path_from: Option<PathBuf>, // origin path when state is move or copy
pub state: State, pub state: State,
} }
pub fn get_all_staged() -> Vec<LocalObj> { pub fn get_all_staged() -> Vec<LocalObj> {
let (mut new_objs_hashes, mut del_objs_hashes) = get_diff(); let mut all_hashes = get_all_objs_hashes();
// get copy, modified get_staged(&mut all_hashes)
let staged_objs = get_staged(&mut new_objs_hashes, &mut del_objs_hashes);
staged_objs.clone()
// todo opti getting staged and then finding differences ?
} }
fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> { fn get_staged(hashes: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
let mut lines: Vec<String> = vec![]; let mut lines: Vec<String> = vec![];
if let Ok(entries) = index::read_line() { if let Ok(entries) = index::read_line() {
@@ -70,7 +173,6 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
} }
} }
let mut hasher = Sha1::new(); let mut hasher = Sha1::new();
let mut staged_objs: Vec<LocalObj> = vec![]; let mut staged_objs: Vec<LocalObj> = vec![];
@@ -82,12 +184,9 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
hasher.reset(); hasher.reset();
// find it on the list of hashes // find it on the list of hashes
if new_objs_h.contains_key(&hash) { if hashes.contains_key(&hash) {
staged_objs.push(new_objs_h.get(&hash).unwrap().clone()); staged_objs.push(hashes.get(&hash).unwrap().clone());
new_objs_h.remove(&hash); hashes.remove(&hash);
} else if del_objs_h.contains_key(&hash) {
staged_objs.push(del_objs_h.get(&hash).unwrap().clone());
del_objs_h.remove(&hash);
}else { }else {
let mut t_path = ref_p.clone(); let mut t_path = ref_p.clone();
let relative_p = PathBuf::from(obj.clone()); let relative_p = PathBuf::from(obj.clone());
@@ -96,6 +195,7 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
otype: get_otype(t_path.clone()), otype: get_otype(t_path.clone()),
name: obj.to_string(), name: obj.to_string(),
path: relative_p.clone(), path: relative_p.clone(),
path_from: None,
state: { state: {
if t_path.exists() { if t_path.exists() {
State::New State::New
@@ -110,19 +210,27 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
staged_objs staged_objs
} }
fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) { fn read_tree_to_hashmap(tree: &mut Tree, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
while let Some(child) = tree.next() {
hashes.insert(String::from(child.get_hash_path()), child.get_local_obj());
};
}
fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>, Vec<String>) {
let mut hashes = HashMap::new(); let mut hashes = HashMap::new();
let mut objs: Vec<String> = vec![]; let mut objs: Vec<String> = vec![];
let mut objs_modified: Vec<String> = vec![];
let root = path::repo_root(); let root = path::repo_root();
let nextsync_path = path::nextsync();
let current_p = path::current().unwrap(); let current_p = path::current().unwrap();
// todo use repo_root instead of current
let dist_path = current_p.strip_prefix(root.clone()).unwrap().to_path_buf(); let dist_path = current_p.strip_prefix(root.clone()).unwrap().to_path_buf();
if let Ok(lines) = read_head(nextsync_path.clone()) { read_tree_to_hashmap(&mut Tree::from_head(), &mut hashes, dist_path.clone());
add_to_hashmap(lines, &mut hashes, dist_path.clone()); //if let Ok(lines) = read_lines(head::path()) {
} // add_to_hashmap(lines, &mut hashes, dist_path.clone());
//}
if let Ok(entries) = read_folder(root.clone()) { if let Ok(entries) = read_folder(root.clone()) {
add_to_vec(entries, &mut objs, root.clone()); add_to_vec(entries, &mut objs, root.clone());
@@ -137,18 +245,25 @@ fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) {
let obj_path = root.clone().join(cur_path.clone()); let obj_path = root.clone().join(cur_path.clone());
if obj_path.is_dir() { if obj_path.is_dir() {
if let Some((_, lines)) = tree::read(cur_obj.clone()) { // read virtual tree
add_to_hashmap(lines, &mut hashes, cur_path.clone()); read_tree_to_hashmap(&mut Tree::from_path(cur_obj.clone()), &mut hashes, dist_path.clone());
} //let mut tree = Tree::from_path(cur_obj.clone());
//if let Some(lines) = tree.get_children() {
//add_to_hashmap(lines, &mut hashes, cur_path.clone());
//}
// read physical tree
if let Ok(entries) = read_folder(obj_path.clone()) { if let Ok(entries) = read_folder(obj_path.clone()) {
add_to_vec(entries, &mut objs, root.clone()); add_to_vec(entries, &mut objs, root.clone());
} }
// remove duplicate
let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both); let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
obj_to_analyse.append(&mut diff.clone()); obj_to_analyse.append(&mut diff.clone());
} else { } else {
// todo look for change if Blob::from_path(cur_path).has_changes() {
objs_modified.push(cur_obj);
}
} }
} }
@@ -164,17 +279,20 @@ fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) {
hasher.input_str(&obj); hasher.input_str(&obj);
let hash = hasher.result_str(); let hash = hasher.result_str();
hasher.reset(); hasher.reset();
let p = PathBuf::from(obj.to_string()); let p = PathBuf::from(obj.to_string());
let abs_p = path::repo_root().join(p.clone());
// todo name // todo name
new_objs_hashes.insert(String::from(hash), LocalObj { new_objs_hashes.insert(String::from(hash), LocalObj {
otype: get_otype(p.clone()), otype: get_otype(abs_p),
name: obj.to_string(), name: obj.to_string(),
path: p, path: p,
path_from: None,
state: State::New state: State::New
}); });
} }
(new_objs_hashes, hashes) (new_objs_hashes, hashes, objs_modified)
} }
fn get_otype(p: PathBuf) -> String { fn get_otype(p: PathBuf) -> String {
@@ -185,27 +303,28 @@ fn get_otype(p: PathBuf) -> String {
} }
} }
fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) { //fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
for line in lines { // for line in lines {
if let Ok(ip) = line { // if let Ok(ip) = line {
if ip.clone().len() > 5 { // if ip.clone().len() > 5 {
let (ftype, hash, name) = tree::parse_line(ip); // let (ftype, hash, name) = tree::parse_line(ip);
let mut p = path.clone(); // let mut p = path.clone();
p.push(name.clone()); // p.push(name.clone());
hashes.insert(String::from(hash), LocalObj{ // hashes.insert(String::from(hash), LocalObj{
otype: String::from(ftype), // otype: String::from(ftype),
name: String::from(name), // name: String::from(name),
path: p, // path: p,
state: State::Default, // path_from: None,
}); // state: State::Default,
} // });
} // }
} // }
} // }
//}
fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) { fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) {
for entry in entries { for entry in entries {
if !is_nextsync_config(entry.clone()) { if !path::is_nextsync_config(entry.clone()) {
let object_path = entry.strip_prefix(root.clone()).unwrap(); let object_path = entry.strip_prefix(root.clone()).unwrap();
objects.push(String::from(object_path.to_str().unwrap())); objects.push(String::from(object_path.to_str().unwrap()));
} }
@@ -231,7 +350,7 @@ fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
// not staged files // not staged files
if objs.len() != 0 { if objs.len() != 0 {
println!("Changes not staged for push:"); println!("Changes not staged for push:");
println!(" (Use\"nextsync add <file>...\" to update what will be pushed)"); println!(" (Use \"nextsync add <file>...\" to update what will be pushed)");
for object in objs { for object in objs {
print_object(object); print_object(object);
@@ -239,27 +358,52 @@ fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
} }
} }
fn print_status_nostyle(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
// todo sort
if staged_objs.len() == 0 && objs.len() == 0 {
return;
}
for obj in staged_objs {
if obj.state == State::Deleted {
println!("deleted: {}", obj.name);
} else if obj.state == State::New {
println!("new: {}", obj.name);
} else if obj.state == State::Modified {
println!("modified: {}", obj.name);
} else if obj.state == State::Moved {
println!("moved: {} => {}", path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path));
} else if obj.state == State::Copied {
println!("copied: {} => {}", path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path));
}
}
}
fn print_object(obj: LocalObj) { fn print_object(obj: LocalObj) {
if obj.state == State::Deleted { if obj.state == State::Deleted {
println!(" {} {}", String::from("deleted:").red(), obj.name.red()); println!(" {} {}", String::from("deleted:").red(), obj.name.red());
} else if obj.state == State::Renamed {
println!(" {} {}", String::from("renamed:").red(), obj.name.red());
} else if obj.state == State::New { } else if obj.state == State::New {
println!(" {} {}", String::from("new:").red(), obj.name.red()); println!(" {} {}", String::from("new:").red(), obj.name.red());
} else if obj.state == State::Modified { } else if obj.state == State::Modified {
println!(" {} {}", String::from("modified:").red(), obj.name.red()); println!(" {} {}", String::from("modified:").red(), obj.name.red());
} else if obj.state == State::Moved {
println!(" {} {} => {}", String::from("moved:").red(), path_buf_to_string(obj.path_from.unwrap()).red(), path_buf_to_string(obj.path).red());
} else if obj.state == State::Copied {
println!(" {} {} => {}", String::from("copied:").red(), path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path).red());
} }
} }
fn print_staged_object(obj: LocalObj) { fn print_staged_object(obj: LocalObj) {
if obj.state == State::Deleted { if obj.state == State::Deleted {
println!(" {} {}", String::from("deleted:").green(), obj.name.green()); println!(" {} {}", String::from("deleted:").green(), obj.name.green());
} else if obj.state == State::Renamed {
println!(" {} {}", String::from("renamed:").green(), obj.name.green());
} else if obj.state == State::New { } else if obj.state == State::New {
println!(" {} {}", String::from("new:").green(), obj.name.green()); println!(" {} {}", String::from("new:").green(), obj.name.green());
} else if obj.state == State::Modified { } else if obj.state == State::Modified {
println!(" {} {}", String::from("modified:").green(), obj.name.green()); println!(" {} {}", String::from("modified:").green(), obj.name.green());
} else if obj.state == State::Moved {
println!(" {} {} => {}", String::from("moved:").green(), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
} else if obj.state == State::Copied {
println!(" {} {} => {}", String::from("copied:"), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
} }
} }
@@ -292,15 +436,6 @@ fn remove_duplicate(hashes: &mut HashMap<String, LocalObj>, objects: &mut Vec<St
duplicate duplicate
} }
fn is_nextsync_config(path: PathBuf) -> bool {
path.ends_with(".nextsync")
}
fn read_head(mut path: PathBuf) -> io::Result<io::Lines<io::BufReader<File>>> {
path.push("HEAD");
read_lines(path)
}
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
@@ -325,6 +460,7 @@ mod tests {
otype: String::from("tree"), otype: String::from("tree"),
name: String::from("test"), name: String::from("test"),
path: PathBuf::from(""), path: PathBuf::from(""),
path_from: None,
state: State::Default, state: State::Default,
}; };
hashes.insert(hash1.clone(), default_obj.clone()); hashes.insert(hash1.clone(), default_obj.clone());

View File

@@ -1,9 +1,6 @@
use clap::{App, Arg, SubCommand}; use clap::Command;
use textwrap::{fill, Options};
use crate::commands::add::AddArgs; mod subcommands;
use crate::commands::remote_diff::RemoteDiffArgs;
use crate::commands::clone::{self, CloneArgs};
mod commands; mod commands;
mod utils; mod utils;
@@ -12,176 +9,41 @@ mod global;
mod store; mod store;
fn main() { fn main() {
let matches = App::new("Nextsync") let app = Command::new("Nextsync")
.version("1.0") .version("1.0")
.author("grimhilt") .author("grimhilt")
.about("A git-line command line tool to interact with nextcloud") .about("A git-line command line tool to interact with nextcloud")
.setting(clap::AppSettings::SubcommandRequiredElseHelp) .subcommands([
.subcommand( subcommands::clone::create(),
SubCommand::with_name("clone") subcommands::init::create(),
.arg( subcommands::status::create(),
Arg::with_name("remote") subcommands::add::create(),
.required(true) subcommands::push::create(),
.takes_value(true) subcommands::reset::create(),
.value_name("REMOTE") subcommands::remote::create(),
.help(&fill( subcommands::config::create(),
"The repository to clone from. See the NEXTSYNC URLS section below for more information on specifying repositories.", subcommands::remote_diff::create(),
Options::new(70).width, subcommands::pull::create(),
)) subcommands::credential::create(),
) ]);
.arg( // .setting(clap::AppSettings::SubcommandRequiredElseHelp);
Arg::with_name("depth")
.short("d")
.long("depth")
.required(false)
.takes_value(true)
.help(&fill(
&format!("Depth of the recursive fetch of object properties. This value should be lower when there are a lot of files per directory and higher when there are a lot of subdirectories with fewer files. (Default: {})", clone::DEPTH),
Options::new(70).width,
))
)
.arg(
Arg::with_name("directory")
.required(false)
.takes_value(true)
.value_name("DIRECTORY")
)
.about("Clone a repository into a new directory")
.after_help("NEXTSYNC URLS\nThe following syntaxes may be used:\n\t- user@host.xz/path/to/repo\n\t- http[s]://host.xz/apps/files/?dir=/path/to/repo&fileid=111111\n\t- [http[s]://]host.xz/remote.php/dav/files/user/path/to/repo\n")
)
.subcommand(
SubCommand::with_name("init")
.arg(
Arg::with_name("directory")
.required(false)
.takes_value(true)
.value_name("DIRECTORY")
)
.about("Create an empty Nextsync repository") // Create an empty Git repository or reinitialize an existing one
)
.subcommand(
SubCommand::with_name("status")
.arg(
Arg::with_name("directory")
.required(false)
.takes_value(true)
.value_name("DIRECTORY")
)
.about("Show the working tree status")
)
.subcommand(
SubCommand::with_name("reset")
.about("Clear the index")
)
.subcommand(
SubCommand::with_name("push")
.about("Push changes on nextcloud")
)
.subcommand(
SubCommand::with_name("add")
.arg(
Arg::with_name("files")
.required(true)
.multiple(true)
.takes_value(true)
.value_name("FILE")
.help("Files to add"),
)
.arg(
Arg::with_name("force")
.short("f")
.long("force")
.help("Allow adding otherwise ignored files."),
)
.about("Add changes to the index")
)
.subcommand(
SubCommand::with_name("config")
.arg(
Arg::with_name("variable")
.required(true)
.takes_value(true)
.value_name("VARIABLE")
)
.arg(
Arg::with_name("value")
.required(true)
.takes_value(true)
.value_name("VALUE")
)
)
.subcommand(
SubCommand::with_name("remote-diff")
.arg(
Arg::with_name("path")
.required(false)
.takes_value(true)
.value_name("PATH")
.help("The path to pull."),
)
.about("Fetch new and modifed files from the nextcloud server.")
)
.subcommand(
SubCommand::with_name("test")
)
.get_matches();
if let Some(matches) = matches.subcommand_matches("init") { let matches = app.get_matches();
if let Some(val) = matches.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
}
commands::init::init();
} else if let Some(matches) = matches.subcommand_matches("status") {
if let Some(val) = matches.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
}
commands::status::status();
} else if let Some(matches) = matches.subcommand_matches("add") {
if let Some(files) = matches.values_of("files") {
commands::add::add(AddArgs {
files,
force: matches.is_present("force"),
});
}
} else if let Some(_) = matches.subcommand_matches("reset") {
commands::reset::reset();
} else if let Some(matches) = matches.subcommand_matches("clone") {
if let Some(val) = matches.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
}
if let Some(remote) = matches.values_of("remote") {
commands::clone::clone(CloneArgs {
remote,
depth: matches.values_of("depth").map(
|mut val| val.next().unwrap().to_owned()
),
});
}
} else if let Some(_matches) = matches.subcommand_matches("push") {
commands::push::push();
} else if let Some(matches) = matches.subcommand_matches("config") {
if let Some(mut var) = matches.values_of("variable") {
if let Some(mut val) = matches.values_of("value") {
if commands::config::set(var.next().unwrap(), val.next().unwrap()).is_err() {
eprintln!("fatal: cannot save the value");
}
}
}
} else if let Some(matches) = matches.subcommand_matches("remote-diff") {
commands::remote_diff::remote_diff(RemoteDiffArgs {
path: {
if let Some(mut path) = matches.values_of("path") {
match path.next() {
Some(p) => Some(String::from(p)),
None => None,
}
} else {
None
}
},
});
} else if let Some(_) = matches.subcommand_matches("test") {
} match matches.subcommand() {
Some(("init", args)) => subcommands::init::handler(args),
Some(("status", args)) => subcommands::status::handler(args),
Some(("add", args)) => subcommands::add::handler(args),
Some(("reset", _)) => commands::reset::reset(),
Some(("clone", args)) => subcommands::clone::handler(args),
Some(("push", _)) => commands::push::push(),
Some(("config", args)) => subcommands::config::handler(args),
Some(("remote-diff", args)) => subcommands::remote_diff::handler(args),
Some(("pull", args)) => subcommands::pull::handler(args),
Some(("remote", args)) => subcommands::remote::handler(args),
Some(("credential", args)) => subcommands::credential::handler(args),
Some((_, _)) => {},
None => {},
};
} }

View File

@@ -5,3 +5,10 @@ pub mod req_props;
pub mod upload_file; pub mod upload_file;
pub mod delete_path; pub mod delete_path;
pub mod downloader; pub mod downloader;
pub mod r#move;
pub mod r#copy;
pub mod login;
pub mod request_manager;
pub mod api_call;
//pub mod auth;
//pub mod bulk_upload;

View File

@@ -1,13 +1,19 @@
use std::env; use std::error::Error;
use dotenv::dotenv; use lazy_static::lazy_static;
use std::sync::Mutex;
use reqwest::Client; use reqwest::Client;
use reqwest::RequestBuilder; use reqwest::RequestBuilder;
use reqwest::multipart::Form; use reqwest::multipart::Form;
use reqwest::{Response, Error, Method}; use reqwest::{Response, Method};
use reqwest::header::{HeaderValue, CONTENT_TYPE, HeaderMap, IntoHeaderName}; use reqwest::header::{HeaderValue, CONTENT_TYPE, HeaderMap, IntoHeaderName};
use crate::utils::api::ApiProps; use crate::utils::api::ApiProps;
use crate::commands::config; use crate::commands::config;
use crate::commands::clone::get_url_props; use crate::commands::clone::get_url_props;
use crate::services::request_manager::get_request_manager;
lazy_static! {
static ref HTTP_TOKEN: Mutex<String> = Mutex::new(String::new());
}
#[derive(Debug)] #[derive(Debug)]
pub enum ApiError { pub enum ApiError {
@@ -20,7 +26,9 @@ pub enum ApiError {
pub struct ApiBuilder { pub struct ApiBuilder {
client: Client, client: Client,
request: Option<RequestBuilder>, request: Option<RequestBuilder>,
headers: Option<HeaderMap> headers: Option<HeaderMap>,
auth_set: bool,
host: Option<String>,
} }
impl ApiBuilder { impl ApiBuilder {
@@ -29,28 +37,25 @@ impl ApiBuilder {
client: Client::new(), client: Client::new(),
request: None, request: None,
headers: None, headers: None,
auth_set: false,
host: None,
} }
} }
pub fn set_url(&mut self, method: Method, url: &str) -> &mut ApiBuilder { pub fn set_url(&mut self, method: Method, url: &str) -> &mut ApiBuilder {
let remote = match config::get("remote") { let mut new_url = url.to_owned();
Some(r) => r, if let Some(active) = config::get_core("force_insecure") {
None => { if active == "true" {
eprintln!("fatal: unable to find a remote"); new_url = url.replace("https", "http");
std::process::exit(1);
} }
}; }
let (host, _, _) = get_url_props(&remote); self.request = Some(self.client.request(method, new_url));
let mut u = String::from(host);
u.push_str(url);
self.request = Some(self.client.request(method, u));
self self
} }
pub fn build_request(&mut self, method: Method, path: &str) -> &mut ApiBuilder { pub fn build_request(&mut self, method: Method, path: &str) -> &mut ApiBuilder {
let remote = match config::get("remote") { let remote = match config::get_remote("origin") {
Some(r) => r, Some(r) => r,
None => { None => {
eprintln!("fatal: unable to find a remote"); eprintln!("fatal: unable to find a remote");
@@ -58,6 +63,7 @@ impl ApiBuilder {
} }
}; };
let (host, username, root) = get_url_props(&remote); let (host, username, root) = get_url_props(&remote);
self.host = Some(host.clone());
let mut url = String::from(host); let mut url = String::from(host);
url.push_str("/remote.php/dav/files/"); url.push_str("/remote.php/dav/files/");
url.push_str(username.unwrap()); url.push_str(username.unwrap());
@@ -66,11 +72,12 @@ impl ApiBuilder {
if path != "/" { if path != "/" {
url.push_str(path); url.push_str(path);
} }
self.request = Some(self.client.request(method, url));
self self.set_url(method, &url)
} }
pub fn set_req(&mut self, meth: Method, p: &str, api_props: &ApiProps) -> &mut ApiBuilder { pub fn set_req(&mut self, meth: Method, p: &str, api_props: &ApiProps) -> &mut ApiBuilder {
self.host = Some(api_props.clone().host.clone());
let mut url = String::from(&api_props.host); let mut url = String::from(&api_props.host);
url.push_str("/remote.php/dav/files/"); url.push_str("/remote.php/dav/files/");
url.push_str("/"); url.push_str("/");
@@ -80,27 +87,37 @@ impl ApiBuilder {
if p != "/" { if p != "/" {
url.push_str(p); url.push_str(p);
} }
self.request = Some(self.client.request(meth, url));
self self.set_url(meth, &url)
} }
fn set_auth(&mut self) -> &mut ApiBuilder { pub fn set_basic_auth(&mut self, login: String, pwd: String) -> &mut ApiBuilder {
// todo if not exist
dotenv().ok();
let password = env::var("PASSWORD").unwrap();
let username = env::var("USERNAME").unwrap();
match self.request.take() { match self.request.take() {
None => { None => {
eprintln!("fatal: incorrect request"); eprintln!("fatal: incorrect request");
std::process::exit(1); std::process::exit(1);
}, },
Some(req) => { Some(req) => {
self.request = Some(req.basic_auth(username, Some(password))); self.request = Some(req.basic_auth(login, Some(pwd)));
} }
} }
self.auth_set = true;
self self
} }
pub fn set_token(&mut self, token: String) {
match self.request.take() {
None => {
eprintln!("fatal: incorrect request");
std::process::exit(1);
},
Some(req) => {
self.request = Some(req.bearer_auth(token));
}
}
self.auth_set = true;
}
pub fn set_xml(&mut self, xml_payload: String) -> &mut ApiBuilder { pub fn set_xml(&mut self, xml_payload: String) -> &mut ApiBuilder {
match self.request.take() { match self.request.take() {
None => { None => {
@@ -148,8 +165,80 @@ impl ApiBuilder {
self self
} }
pub async fn send(&mut self) -> Result<Response, Error> { fn set_request_manager(&mut self) {
self.set_auth(); let mut request_manager = get_request_manager().lock().unwrap();
let request_manager = request_manager.as_mut().unwrap();
if !self.host.is_none()
{
request_manager.set_host(self.host.clone().unwrap().replace("https://", ""));
}
if !self.auth_set {
self.set_token(request_manager.get_token());
//self.set_auth();
}
}
pub fn send(&mut self, need_text: bool) -> Result<Option<String>, ApiError> {
if !self.host.is_none() || !self.auth_set {
self.set_request_manager();
}
let res_req = tokio::runtime::Runtime::new().unwrap().block_on(async {
match self.request.take() {
None => {
eprintln!("fatal: incorrect request");
std::process::exit(1);
},
Some(req) => {
if let Some(headers) = &self.headers {
req.headers(headers.clone())
.send().await
} else {
req.send().await
}
},
}
});
// handle request error
let res = match res_req {
Err(err) => {
eprintln!("fatal: {}", err.source().unwrap());
std::process::exit(1);
},
Ok(res) => res,
};
if res.status().is_success() {
if need_text {
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
res.text().await
}).map_err(|err| ApiError::EmptyError(err))?;
Ok(Some(body))
} else {
Ok(None)
}
} else {
Err(ApiError::IncorrectRequest(res))
}
}
pub async fn old_send(&mut self) -> Result<Response, reqwest::Error> {
let mut request_manager = get_request_manager().lock().unwrap();
let request_manager = request_manager.as_mut().unwrap();
if !self.host.is_none()
{
request_manager.set_host(self.host.clone().unwrap());
}
if !self.auth_set {
//self.set_auth();
self.set_token(request_manager.get_token());
}
match self.request.take() { match self.request.take() {
None => { None => {
eprintln!("fatal: incorrect request"); eprintln!("fatal: incorrect request");
@@ -158,9 +247,9 @@ impl ApiBuilder {
Some(req) => { Some(req) => {
if let Some(headers) = &self.headers { if let Some(headers) = &self.headers {
req.headers(headers.clone()) req.headers(headers.clone())
.send().await.map_err(Error::from) .send().await.map_err(reqwest::Error::from)
} else { } else {
req.send().await.map_err(Error::from) req.send().await.map_err(reqwest::Error::from)
} }
}, },
} }

13
src/services/api_call.rs Normal file
View File

@@ -0,0 +1,13 @@
use crate::services::api::ApiError;
pub trait ApiCall {
fn new() -> Self where Self: Sized {
unimplemented!()
}
fn set_url(&mut self, _url: &str) -> &mut Self {
self
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
unimplemented!()
}
}

View File

@@ -1,2 +0,0 @@

53
src/services/copy.rs Normal file
View File

@@ -0,0 +1,53 @@
use reqwest::{Method, header::HeaderValue};
use crate::services::api::{ApiBuilder, ApiError};
use crate::commands::clone::get_url_props;
use crate::commands::config;
use crate::services::api_call::ApiCall;
pub struct Copy {
api_builder: ApiBuilder,
}
impl ApiCall for Copy {
fn new() -> Self {
Copy {
api_builder: ApiBuilder::new(),
}
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send(true)
}
}
impl Copy {
pub fn set_url_copy(&mut self, url: &str, destination: &str) -> &mut Copy {
self.api_builder.build_request(Method::from_bytes(b"COPY").unwrap(), url);
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
let mut url = String::from(host);
url.push_str("/remote.php/dav/files/");
url.push_str(username.unwrap());
url.push_str(&root);
url.push_str("/");
if destination != "/" {
url.push_str(destination);
}
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
self
}
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Copy {
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
if overwrite { "T" } else { "F" }
}).unwrap());
self
}
}

View File

@@ -1,34 +1,24 @@
use reqwest::{Method, Response, Error}; use reqwest::Method;
use crate::services::api::{ApiBuilder, ApiError}; use crate::services::api::{ApiBuilder, ApiError};
use crate::services::api_call::ApiCall;
pub struct CreateFolder { pub struct CreateFolder {
api_builder: ApiBuilder, api_builder: ApiBuilder,
} }
impl CreateFolder { impl ApiCall for CreateFolder {
pub fn new() -> Self { fn new() -> Self {
CreateFolder { CreateFolder {
api_builder: ApiBuilder::new(), api_builder: ApiBuilder::new(),
} }
} }
pub fn set_url(&mut self, url: &str) -> &mut CreateFolder { fn set_url(&mut self, url: &str) -> &mut CreateFolder {
self.api_builder.build_request(Method::from_bytes(b"MKCOL").unwrap(), url); self.api_builder.build_request(Method::from_bytes(b"MKCOL").unwrap(), url);
self self
} }
pub async fn send(&mut self) -> Result<Response, Error> { fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send().await self.api_builder.send(false)
}
pub fn send_with_err(&mut self) -> Result<(), ApiError> {
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
self.send().await
}).map_err(ApiError::RequestError)?;
if res.status().is_success() {
Ok(())
} else {
Err(ApiError::IncorrectRequest(res))
}
} }
} }

View File

@@ -1,38 +1,24 @@
use reqwest::{Method, Response, Error}; use reqwest::Method;
use crate::services::api::{ApiBuilder, ApiError}; use crate::services::api::{ApiBuilder, ApiError};
use crate::services::api_call::ApiCall;
pub struct DeletePath { pub struct DeletePath {
api_builder: ApiBuilder, api_builder: ApiBuilder,
} }
impl DeletePath { impl ApiCall for DeletePath {
pub fn new() -> Self { fn new() -> Self {
DeletePath { DeletePath {
api_builder: ApiBuilder::new(), api_builder: ApiBuilder::new(),
} }
} }
pub fn set_url(&mut self, url: &str) -> &mut DeletePath { fn set_url(&mut self, url: &str) -> &mut DeletePath {
self.api_builder.build_request(Method::DELETE, url); self.api_builder.build_request(Method::DELETE, url);
self self
} }
pub async fn send(&mut self) -> Result<Response, Error> { fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send().await self.api_builder.send(true)
}
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
self.send().await
}).map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
res.text().await
}).map_err(ApiError::EmptyError)?;
Ok(body)
} else {
Err(ApiError::IncorrectRequest(res))
}
} }
} }

View File

@@ -6,38 +6,32 @@ use std::io::{self, Write};
use reqwest::{Method, Response, Error}; use reqwest::{Method, Response, Error};
use crate::utils::api::ApiProps; use crate::utils::api::ApiProps;
use crate::services::api::{ApiBuilder, ApiError}; use crate::services::api::{ApiBuilder, ApiError};
use crate::services::api_call::ApiCall;
pub struct DownloadFiles { pub struct DownloadFiles {
api_builder: ApiBuilder, api_builder: ApiBuilder,
relative_ps: String, relative_ps: String,
} }
impl DownloadFiles { impl ApiCall for DownloadFiles {
pub fn new() -> Self { fn new() -> Self {
DownloadFiles { DownloadFiles {
api_builder: ApiBuilder::new(), api_builder: ApiBuilder::new(),
relative_ps: String::from(""), relative_ps: String::new(),
} }
} }
}
pub fn set_url(&mut self, relative_ps: &str, api_props: &ApiProps) -> &mut DownloadFiles { impl DownloadFiles {
// todo make it beautiful
pub fn set_url_download(&mut self, relative_ps: &str, api_props: &ApiProps) -> &mut DownloadFiles {
self.relative_ps = relative_ps.to_string(); self.relative_ps = relative_ps.to_string();
self.api_builder.set_req(Method::GET, relative_ps, api_props); self.api_builder.set_req(Method::GET, relative_ps, api_props);
self self
} }
pub async fn send(&mut self) -> Result<Response, Error> { pub async fn send_download(&mut self) -> Result<Response, Error> {
self.api_builder.send().await self.api_builder.old_send().await
}
pub async fn _send_with_err(mut self) -> Result<Vec<u8>, ApiError> {
let res = self.send().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = res.bytes().await.map_err(ApiError::EmptyError)?;
Ok(body.to_vec())
} else {
Err(ApiError::IncorrectRequest(res))
}
} }
pub fn save_stream(&mut self, ref_p: PathBuf, callback: Option<impl Fn(u64)>) -> Result<(), ApiError> { pub fn save_stream(&mut self, ref_p: PathBuf, callback: Option<impl Fn(u64)>) -> Result<(), ApiError> {
@@ -45,7 +39,7 @@ impl DownloadFiles {
let mut file = File::create(abs_p).unwrap(); let mut file = File::create(abs_p).unwrap();
tokio::runtime::Runtime::new().unwrap().block_on(async { tokio::runtime::Runtime::new().unwrap().block_on(async {
let res = self.send().await.map_err(ApiError::RequestError)?; let res = self.send_download().await.map_err(ApiError::RequestError)?;
if res.status().is_success() { if res.status().is_success() {
let mut stream = res.bytes_stream(); let mut stream = res.bytes_stream();
@@ -70,11 +64,11 @@ impl DownloadFiles {
pub fn save(&mut self, ref_p: PathBuf) -> Result<(), ApiError> { pub fn save(&mut self, ref_p: PathBuf) -> Result<(), ApiError> {
tokio::runtime::Runtime::new().unwrap().block_on(async { tokio::runtime::Runtime::new().unwrap().block_on(async {
let p = ref_p.join(PathBuf::from(self.relative_ps.clone())); let p = ref_p.join(PathBuf::from(self.relative_ps.clone()));
let res = self.send().await.map_err(ApiError::RequestError)?; let res = self.send_download().await.map_err(ApiError::RequestError)?;
if res.status().is_success() { if res.status().is_success() {
let body = res.bytes().await.map_err(ApiError::EmptyError)?; let body = res.bytes().await.map_err(ApiError::EmptyError)?;
match Self::write_file(p, &body.to_vec()) { match Self::write_file(p, &body.to_vec()) {
Err(_) => Err(ApiError::Unexpected(String::from(""))), Err(_) => Err(ApiError::Unexpected(String::new())),
Ok(_) => Ok(()), Ok(_) => Ok(()),
} }
} else { } else {

View File

@@ -2,6 +2,7 @@ use std::path::PathBuf;
use indicatif::{ProgressBar, MultiProgress, ProgressStyle, HumanBytes}; use indicatif::{ProgressBar, MultiProgress, ProgressStyle, HumanBytes};
use crate::utils::api::ApiProps; use crate::utils::api::ApiProps;
use crate::services::api_call::ApiCall;
use crate::services::api::ApiError; use crate::services::api::ApiError;
use crate::services::download_files::DownloadFiles; use crate::services::download_files::DownloadFiles;
use crate::services::req_props::ObjProps; use crate::services::req_props::ObjProps;
@@ -42,7 +43,7 @@ impl Downloader {
self self
} }
pub fn add_file(&mut self, file: ObjProps) -> &mut Downloader { pub fn _add_file(&mut self, file: ObjProps) -> &mut Downloader {
self.files.push(file); self.files.push(file);
self self
} }
@@ -92,6 +93,7 @@ impl Downloader {
let mut total_size = 0; let mut total_size = 0;
let nb_objs = self.files.len(); let nb_objs = self.files.len();
// set the full size of the download
self.files self.files
.iter() .iter()
.for_each(|f| .for_each(|f|
@@ -106,15 +108,11 @@ impl Downloader {
for file in self.files.clone() { for file in self.files.clone() {
let relative_s = &file.clone().relative_s.unwrap(); let relative_s = &file.clone().relative_s.unwrap();
let mut download = DownloadFiles::new(); let mut download = DownloadFiles::new();
download.set_url(&relative_s, &self.api_props.clone().unwrap()); download.set_url_download(&relative_s, &self.api_props.clone().unwrap());
let should_use_stream = { let should_use_stream = {
if let Some(size) = file.contentlength { if let Some(size) = file.contentlength {
if size > SIZE_TO_STREAM { size > SIZE_TO_STREAM
true
} else {
false
}
} else { } else {
false false
} }
@@ -123,7 +121,7 @@ impl Downloader {
// download // download
let res = { let res = {
if should_use_stream { if should_use_stream {
download.save_stream(ref_p.clone(), Some(|a| self.update_bytes_bar(a))) download.save_stream(ref_p.clone(), if self.should_log { Some(|a| self.update_bytes_bar(a)) } else { None })
} else { } else {
download.save(ref_p.clone()) download.save(ref_p.clone())
} }

112
src/services/login.rs Normal file
View File

@@ -0,0 +1,112 @@
use std::io;
use std::io::Cursor;
use std::io::prelude::*;
use xml::reader::{EventReader, XmlEvent};
use reqwest::{header::HeaderValue, Method};
use rpassword;
use crate::services::api_call::ApiCall;
use crate::services::api::{ApiBuilder, ApiError};
pub struct Login {
api_builder: ApiBuilder,
login: String,
password: String,
host: Option<String>,
}
impl ApiCall for Login {
fn new() -> Self {
Login {
api_builder: ApiBuilder::new(),
login: String::new(),
password: String::new(),
host: None,
}
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
let url = match self.host.clone() {
Some(h) => {
let mut u = if &h[0..8] == "https://" || &h[0..7] == "http://" {
String::new()
} else {
String::from("https://")
};
u.push_str(&h);
u.push_str("/ocs/v2.php/core/getapppassword");
u
},
None => "/ocs/v2.php/core/getapppassword".to_owned(),
};
self.api_builder.set_url(Method::GET, &url);
self.api_builder.set_header("OCS-APIRequest", HeaderValue::from_str("true").unwrap());
self.api_builder.set_header("USER-AGENT", HeaderValue::from_str("nextsync").unwrap());
self.api_builder.set_basic_auth(self.login.clone(), self.password.clone());
self.api_builder.send(true)
}
}
impl Login {
pub fn ask_auth(&mut self) -> &mut Login {
println!("Please enter your username/email: ");
let stdin = io::stdin();
self.login = stdin.lock().lines().next().unwrap().unwrap();
println!("Please enter your password: ");
self.password = rpassword::read_password().unwrap();
self
}
pub fn set_auth(&mut self, username: &str, password: &str) -> &mut Login {
self.login = username.to_owned();
self.password = password.to_owned();
self
}
pub fn set_host(&mut self, host: Option<String>) -> &mut Login {
self.host = host;
self
}
pub fn send_login(&mut self) -> Result<String, ApiError> {
match self.send() {
Ok(Some(body)) => Ok(self.parse(body)),
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
Err(err) => Err(err),
}
}
fn parse(&self, xml: String) -> String {
let cursor = Cursor::new(xml);
let parser = EventReader::new(cursor);
let mut should_get = false;
for event in parser {
match event {
Ok(XmlEvent::StartElement { name, .. }) => {
should_get = {
if &name.local_name == "apppassword" {
true
} else {
false
}
};
}
Ok(XmlEvent::Characters(text)) => {
if !text.trim().is_empty() && should_get {
return text.clone();
}
}
//Ok(XmlEvent::EndElement { name, .. }) => {
//}
Err(e) => {
eprintln!("err: parsing xml: {}", e);
break;
}
_ => {}
}
}
String::new()
}
}

54
src/services/move.rs Normal file
View File

@@ -0,0 +1,54 @@
use reqwest::{Method, header::HeaderValue};
use crate::services::api::{ApiBuilder, ApiError};
use crate::commands::clone::get_url_props;
use crate::commands::config;
use crate::services::api_call::ApiCall;
pub struct Move {
api_builder: ApiBuilder,
}
impl ApiCall for Move {
fn new() -> Self {
Move {
api_builder: ApiBuilder::new(),
}
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send(false)
}
}
impl Move {
pub fn set_url_move(&mut self, url: &str, destination: &str) -> &mut Move {
self.api_builder.build_request(Method::from_bytes(b"MOVE").unwrap(), url);
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
let mut url = String::from(host);
url.push_str("/remote.php/dav/files/");
url.push_str(username.unwrap());
url.push_str(&root);
url.push_str("/");
if destination != "/" {
url.push_str(destination);
}
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
self
}
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Move {
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
if overwrite { "T" } else { "F" }
}).unwrap());
self
}
}

View File

@@ -1,6 +1,6 @@
use std::io::Cursor; use std::io::Cursor;
use chrono::{Utc, DateTime}; use chrono::{Utc, DateTime};
use reqwest::{Method, Response, Error}; use reqwest::Method;
use xml::reader::{EventReader, XmlEvent}; use xml::reader::{EventReader, XmlEvent};
use reqwest::header::HeaderValue; use reqwest::header::HeaderValue;
use crate::commands::clone::get_url_props; use crate::commands::clone::get_url_props;
@@ -8,6 +8,7 @@ use crate::commands::config;
use crate::utils::time::parse_timestamp; use crate::utils::time::parse_timestamp;
use crate::utils::api::{get_relative_s, ApiProps}; use crate::utils::api::{get_relative_s, ApiProps};
use crate::services::api::{ApiBuilder, ApiError}; use crate::services::api::{ApiBuilder, ApiError};
use crate::services::api_call::ApiCall;
#[derive(Debug)] #[derive(Debug)]
pub struct ObjProps { pub struct ObjProps {
@@ -55,8 +56,8 @@ pub struct ReqProps {
api_props: Option<ApiProps> api_props: Option<ApiProps>
} }
impl ReqProps { impl ApiCall for ReqProps {
pub fn new() -> Self { fn new() -> Self {
ReqProps { ReqProps {
api_builder: ApiBuilder::new(), api_builder: ApiBuilder::new(),
xml_balises: vec![], xml_balises: vec![],
@@ -65,8 +66,8 @@ impl ReqProps {
} }
} }
pub fn set_url(&mut self, url: &str) -> &mut ReqProps { fn set_url(&mut self, url: &str) -> &mut ReqProps {
let remote = match config::get("remote") { let remote = match config::get_remote("origin") {
Some(r) => r, Some(r) => r,
None => { None => {
eprintln!("fatal: unable to find a remote"); eprintln!("fatal: unable to find a remote");
@@ -83,6 +84,13 @@ impl ReqProps {
self self
} }
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.validate_xml();
self.api_builder.send(true)
}
}
impl ReqProps {
pub fn set_request(&mut self, p: &str, api_props: &ApiProps) -> &mut ReqProps { pub fn set_request(&mut self, p: &str, api_props: &ApiProps) -> &mut ReqProps {
self.api_props = Some(api_props.clone()); self.api_props = Some(api_props.clone());
self.api_builder.set_req(Method::from_bytes(b"PROPFIND").unwrap(), p, api_props); self.api_builder.set_req(Method::from_bytes(b"PROPFIND").unwrap(), p, api_props);
@@ -145,32 +153,10 @@ impl ReqProps {
self self
} }
pub async fn send(&mut self) -> Result<Response, Error> {
self.validate_xml();
self.api_builder.send().await
}
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
tokio::runtime::Runtime::new().unwrap().block_on(async {
match self.send().await {
Err(res) => Err(ApiError::RequestError(res)),
Ok(res) if res.status().is_success() => {
let body = res
.text()
.await
.map_err(|err| ApiError::EmptyError(err))?;
Ok(body)
},
Ok(res) => {
Err(ApiError::IncorrectRequest(res))
}
}
})
}
pub fn send_req_multiple(&mut self) -> Result<Vec<ObjProps>, ApiError> { pub fn send_req_multiple(&mut self) -> Result<Vec<ObjProps>, ApiError> {
match self.send_with_err() { match self.send() {
Ok(body) => Ok(self.parse(body, true)), Ok(Some(body)) => Ok(self.parse(body, true)),
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
Err(err) => Err(err), Err(err) => Err(err),
} }
} }
@@ -178,12 +164,13 @@ impl ReqProps {
pub fn send_req_single(&mut self) -> Result<ObjProps, ApiError> { pub fn send_req_single(&mut self) -> Result<ObjProps, ApiError> {
// set depth to 0 as we only need one element // set depth to 0 as we only need one element
self.set_depth("0"); self.set_depth("0");
match self.send_with_err() { match self.send() {
Ok(body) => { Ok(Some(body)) => {
let objs = self.parse(body, false); let objs = self.parse(body, false);
let obj = objs[0].clone(); let obj = objs[0].clone();
Ok(obj) Ok(obj)
}, },
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
Err(err) => Err(err), Err(err) => Err(err),
} }
} }
@@ -195,7 +182,7 @@ impl ReqProps {
let mut values: Vec<ObjProps> = vec![]; let mut values: Vec<ObjProps> = vec![];
let mut should_get = false; let mut should_get = false;
let mut val: String = String::from(""); let mut val: String = String::new();
let mut content = ObjProps::new(); let mut content = ObjProps::new();
for event in parser { for event in parser {

View File

@@ -0,0 +1,91 @@
use lazy_static::lazy_static;
use std::sync::Mutex;
use crate::services::login::Login;
use crate::commands::config;
use crate::store::gconfig;
use crate::commands::clone::get_url_props;
use crate::services::api_call::ApiCall;
lazy_static! {
static ref REQUEST_MANAGER: Mutex<Option<RequestManager>> = Mutex::new(None);
}
pub fn get_request_manager() -> &'static Mutex<Option<RequestManager>> {
if REQUEST_MANAGER.lock().unwrap().is_none() {
*REQUEST_MANAGER.lock().unwrap() = Some(RequestManager::new());
}
&REQUEST_MANAGER
}
pub struct RequestManager {
token: Option<String>,
host: Option<String>,
}
impl RequestManager {
pub fn new() -> Self {
RequestManager {
token: None,
host: None,
}
}
pub fn set_host(&mut self, host: String) {
self.host = Some(host);
}
pub fn get_host(&mut self) -> String
{
if self.host.is_none()
{
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
// todo ask user instead
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, _, _) = get_url_props(&remote);
self.host = Some(host.clone());
// todo ask user
}
self.host.clone().unwrap()
}
pub fn get_token(&mut self) -> String {
if self.token.is_none() {
// look in global config
if let Some(token) = gconfig::read_token() {
if !token.is_empty() {
self.token = Some(token);
return self.token.clone().unwrap();
}
}
// look in local config
if let Some(token) = config::find_option_in_cat("core", "token")
{
if !token.is_empty() {
self.token = Some(token);
return self.token.clone().unwrap();
}
}
// ask for a token
let get_token = Login::new()
.ask_auth()
.set_host(Some(self.get_host()))
.send_login();
// todo deal with error cases
self.token = Some(get_token.unwrap());
if let Err(err) = gconfig::write_token(&self.token.clone().unwrap()) {
eprintln!("err: failed to write token ({})", err);
}
}
self.token.clone().unwrap()
}
}

View File

@@ -1,25 +1,32 @@
use std::fs::File; use std::fs::File;
use std::io::Read; use std::io::Read;
use std::path::PathBuf; use std::path::PathBuf;
use reqwest::{Method, Response, Error}; use reqwest::Method;
use crate::services::api::{ApiBuilder, ApiError}; use crate::services::api::{ApiBuilder, ApiError};
use crate::services::api_call::ApiCall;
pub struct UploadFile { pub struct UploadFile {
api_builder: ApiBuilder, api_builder: ApiBuilder,
} }
impl UploadFile { impl ApiCall for UploadFile {
pub fn new() -> Self { fn new() -> Self {
UploadFile { UploadFile {
api_builder: ApiBuilder::new(), api_builder: ApiBuilder::new(),
} }
} }
pub fn set_url(&mut self, url: &str) -> &mut UploadFile { fn set_url(&mut self, url: &str) -> &mut UploadFile {
self.api_builder.build_request(Method::PUT, url); self.api_builder.build_request(Method::PUT, url);
self self
} }
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send(true)
}
}
impl UploadFile {
pub fn set_file(&mut self, path: PathBuf) -> &mut UploadFile { pub fn set_file(&mut self, path: PathBuf) -> &mut UploadFile {
// todo large file // todo large file
// todo small files // todo small files
@@ -29,23 +36,4 @@ impl UploadFile {
self.api_builder.set_body(buffer); self.api_builder.set_body(buffer);
self self
} }
pub async fn send(&mut self) -> Result<Response, Error> {
self.api_builder.send().await
}
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
self.send().await
}).map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
res.text().await
}).map_err(ApiError::EmptyError)?;
Ok(body)
} else {
Err(ApiError::IncorrectRequest(res))
}
}
} }

View File

@@ -1,3 +1,4 @@
pub mod index; pub mod index;
pub mod head; pub mod head;
pub mod object; pub mod object;
pub mod gconfig;

54
src/store/gconfig.rs Normal file
View File

@@ -0,0 +1,54 @@
use std::env;
use std::path::PathBuf;
use std::fs::{self, OpenOptions};
use std::io::{self, Write};
use crate::utils::read;
fn global_path() -> Option<PathBuf> {
if let Some(home_dir) = env::var_os("HOME") {
let mut path = PathBuf::new();
path.push(home_dir);
path.push(".nextsync");
Some(path)
}
else
{
None
}
}
pub fn write_token(token: &str) -> io::Result<()> {
if let Some(mut path_token) = global_path() {
if !path_token.exists() {
fs::create_dir_all(path_token.clone())?;
}
path_token.push("token");
let mut file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(path_token)?;
writeln!(file, "{}", token)?;
}
Ok(())
}
pub fn read_token() -> Option<String> {
if let Some(mut path_token) = global_path() {
if !path_token.exists() {
return None;
}
path_token.push("token");
if let Ok(lines) = read::read_lines(path_token) {
for line in lines {
if let Ok(l) = line {
return Some(l);
}
}
}
}
None
}

View File

@@ -1,12 +1,17 @@
use std::io; use std::io;
use std::path::PathBuf;
use std::fs::File; use std::fs::File;
use std::fs::OpenOptions; use std::fs::OpenOptions;
use crate::utils::{read, path}; use crate::utils::{read, path};
pub fn open() -> File { pub fn path() -> PathBuf {
let mut path = path::nextsync(); let mut path = path::nextsync();
path.push("index"); path.push("index");
path
}
pub fn open() -> File {
let path = path();
OpenOptions::new() OpenOptions::new()
.read(true) .read(true)
.write(true) .write(true)
@@ -27,3 +32,16 @@ pub fn rm_line(line: &str) -> io::Result<()> {
read::rm_line(root, line)?; read::rm_line(root, line)?;
Ok(()) Ok(())
} }
pub fn alread_added(file: String) -> bool {
if let Ok(lines) = read_line() {
for line in lines {
if let Ok(l) = line {
if l == file {
return true;
}
}
}
}
return false;
}

View File

@@ -4,12 +4,12 @@ use std::fs::{self, OpenOptions};
use crypto::sha1::Sha1; use crypto::sha1::Sha1;
use crypto::digest::Digest; use crypto::digest::Digest;
use std::io::{Seek, SeekFrom, Read}; use std::io::{Seek, SeekFrom, Read};
use crate::utils::time::parse_timestamp;
use crate::store::head; use crate::store::head;
use crate::utils::{read, path}; use crate::utils::{read, path};
pub mod tree; pub mod tree;
pub mod blob; pub mod blob;
pub mod object;
pub struct Object { pub struct Object {
path: PathBuf, path: PathBuf,
@@ -31,7 +31,7 @@ impl Object {
if path == "" { if path == "" {
return Object { return Object {
path: PathBuf::from("/"), path: PathBuf::from("/"),
hash: String::from(""), hash: String::new(),
obj_p: head::path(), obj_p: head::path(),
ts: None, ts: None,
} }
@@ -59,9 +59,10 @@ impl Object {
match read::read_lines(&self.obj_p) { match read::read_lines(&self.obj_p) {
Ok(mut reader) => { Ok(mut reader) => {
if let Some(Ok(line)) = reader.next() { if let Some(Ok(line)) = reader.next() {
let mut data = line.rsplit(' '); let mut data = line.rsplit(' ').collect::<Vec<_>>();
if data.clone().count() >= 2 { data.reverse();
self.ts = Some(data.next().unwrap().parse::<i64>().unwrap()) if data.clone().len() >= 2 {
self.ts = Some(data[1].parse::<i64>().unwrap())
} }
} }
}, },
@@ -131,7 +132,7 @@ fn rm(hash: &str) -> io::Result<()> {
fn rm_node(path: &Path, node: &str) -> io::Result<()> { fn rm_node(path: &Path, node: &str) -> io::Result<()> {
let mut root = path::objects(); let mut root = path::objects();
let (dir, rest) = hash_obj(path.clone().to_str().unwrap()); let (dir, rest) = hash_obj(path.to_str().unwrap());
root.push(dir); root.push(dir);
root.push(rest); root.push(rest);
@@ -143,11 +144,11 @@ fn rm_node(path: &Path, node: &str) -> io::Result<()> {
fn add_node(path: &Path, node: &str) -> io::Result<()> { fn add_node(path: &Path, node: &str) -> io::Result<()> {
let mut root = path::objects(); let mut root = path::objects();
let (dir, rest) = hash_obj(path.clone().to_str().unwrap()); let (dir, rest) = hash_obj(path.to_str().unwrap());
root.push(dir); root.push(dir);
if !root.exists() { if !root.exists() {
todo!(); //todo!();
} }
root.push(rest); root.push(rest);
@@ -168,7 +169,7 @@ fn update_dates(mut path: PathBuf, date: &str) -> io::Result<()> {
let (dir, res) = hash_obj(path.to_str().unwrap()); let (dir, res) = hash_obj(path.to_str().unwrap());
obj_p.push(dir); obj_p.push(dir);
obj_p.push(res); obj_p.push(res);
update_date(obj_p.clone(), date.clone())?; update_date(obj_p.clone(), date)?;
obj_p.pop(); obj_p.pop();
obj_p.pop(); obj_p.pop();
} }
@@ -223,31 +224,3 @@ fn create_obj(name: String, content: &str) -> io::Result<()> {
Ok(()) Ok(())
} }
pub fn get_timestamp(path_s: String) -> Option<i64> {
let mut obj_p = path::objects();
let (dir, res) = hash_obj(&path_s);
obj_p.push(dir);
obj_p.push(res);
match read::read_lines(obj_p) {
Ok(mut reader) => {
match reader.next() {
Some(Ok(line)) => {
let mut data = line.rsplit(' ');
if data.clone().count() >= 2 {
Some(data.next().unwrap().parse::<i64>().unwrap())
} else {
None
}
},
_ => None,
}
},
Err(err) => {
eprintln!("error reading object: {}", err);
None
},
}
}

View File

@@ -1,52 +1,330 @@
use std::io; use std::io::{self, Read};
use std::fs::{self, File};
use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf; use std::path::PathBuf;
use std::fs; use std::time::SystemTime;
use crate::utils::path; use crate::commands::status::State;
use crate::store::head; use crate::utils::into::IntoPathBuf;
use crate::store::object::{update_dates, parse_path, add_node, create_obj, rm_node}; use crate::utils::{path, read};
use crate::store::object::update_dates;
pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> { use crate::store::object::object::ObjMethods;
let (line, hash, name) = parse_path(path.clone(), true); use crate::store::object::object::Obj;
// add blob reference to parent
if path.iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(path.parent().unwrap(), &line)?;
}
let mut content = name.clone().to_owned(); const HASH_EMPTY: &str = "d41d8cd98f00b204e9800998ecf8427e";
content.push_str(" ");
content.push_str(date);
// create blob object pub struct Blob {
create_obj(hash, &content)?; pub obj: Obj,
data: Vec<String>, // content of the ref file
// update date for all parent file_hash: Option<String>, // hash of the file's content
if up_parent {
update_dates(path, date)?;
}
Ok(())
} }
pub fn rm(path: PathBuf) -> io::Result<()> { //pub struct Blob {
let (line, hash, _) = parse_path(path.clone(), true); // r_path: PathBuf, // relative path
// a_path: PathBuf, // absolute path
// hash: String, // hash of relative path
// file_hash: Option<String>,
// obj_p: PathBuf, // path of the object file
// data: Vec<String>, // content of the blob
//}
// remove blob reference to parent
if path.iter().count() == 1 {
head::rm_line(&line)?; impl Blob {
} else { pub fn new(obj: Obj) -> Self {
rm_node(path.parent().unwrap(), &line)?; Self {
obj,
data: vec![],
file_hash: None,
}
}
pub fn from_path<S>(r_path: S) -> Blob where S: IntoPathBuf {
let r_path = r_path.into();
Self {
obj: Obj::from_path(r_path),
data: vec![],
file_hash: None,
}
} }
// remove blob object fn get_file_hash(&mut self) -> String {
let mut root = path::objects(); if self.file_hash.is_none() {
let bytes = std::fs::read(self.get_file_path()).unwrap();
let hash = md5::compute(&bytes);
self.file_hash = Some(format!("{:x}", hash))
}
self.file_hash.clone().unwrap()
}
let c = hash.clone(); /// read line of blob to get all informations and store them in self.data
let (dir, rest) = c.split_at(2); pub fn read_data(&mut self) {
root.push(dir); if self.data.len() == 0 {
root.push(rest); if let Ok(mut file) = File::open(self.get_obj_path()) {
fs::remove_file(root)?; let mut buffer = String::new();
let _ = file.read_to_string(&mut buffer);
let data = buffer.rsplit(' ').collect::<Vec<_>>();
for e in data {
self.data.push(String::from(e));
}
self.data.reverse();
Ok(()) // remove \n of last element
if let Some(last) = self.data.last_mut() {
if last.ends_with("\n") {
last.pop();
}
}
}
}
}
fn get_data_index(&mut self, index: usize) -> String {
self.read_data();
if self.data.len() >= index + 1 {
self.data[index].clone()
} else {
String::new()
}
}
fn saved_filename(&mut self) -> String {
self.get_data_index(0)
}
pub fn saved_remote_ts(&mut self) -> String {
self.get_data_index(1)
}
fn saved_local_size(&mut self) -> String {
self.get_data_index(2)
}
fn saved_local_ts(&mut self) -> u64 {
match self.get_data_index(3).as_str() {
"" => 0,
str => str.parse::<u64>().unwrap()
}
}
fn saved_hash(&mut self) -> String {
self.get_data_index(4)
}
fn has_same_size(&mut self) -> bool {
let metadata = match fs::metadata(self.get_file_path()) {
Ok(m) => m,
Err(_) => return true,
};
if self.saved_local_size() == String::new() { return true; }
metadata.len().to_string() == self.saved_local_size()
}
fn is_newer(&mut self) -> bool {
let metadata = match fs::metadata(self.get_file_path()) {
Ok(m) => m,
Err(_) => return true,
};
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if self.saved_local_ts() == 0 { return true; }
secs > self.saved_local_ts()
}
fn has_same_hash(&mut self) -> bool {
if self.saved_hash() == String::new() { return false; }
let file_hash = self.get_file_hash().clone();
self.saved_hash() == file_hash
}
pub fn has_changes(&mut self) -> bool {
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
}
pub fn get_all_identical_blobs(&mut self) -> Vec<String> {
// an empty file is a new file not the copy of another empty file
if self.get_file_hash() == HASH_EMPTY {
return vec![];
}
let refs_p = self.get_obj_path();
let mut blobs: Vec<String> = vec![];
if let Ok(lines) = read::read_lines(refs_p) {
for line in lines {
if let Ok(l) = line {
blobs.push(l.clone());
}
}
}
blobs
}
pub fn status(&mut self, path_from: &mut Option<PathBuf>) -> State {
let has_obj_ref = self.get_obj_path().exists();
let blob_exists = self.get_file_path().exists();
if has_obj_ref && !blob_exists {
State::Deleted
} else if !has_obj_ref && blob_exists {
let identical_blobs = self.get_all_identical_blobs();
if identical_blobs.len() != 0 {
let identical_blob = Blob::from_path(identical_blobs[0].clone()).get_local_obj();
if identical_blob.state == State::Deleted {
*path_from = Some(identical_blob.path);
State::Moved
} else if identical_blob.state == State::Default {
*path_from = Some(identical_blob.path);
State::Copied
} else {
State::New
}
} else {
State::New
}
} else if !has_obj_ref && !blob_exists {
State::Default
} else if self.has_changes() {
State::Modified
} else {
State::Default
}
}
fn create_blob_ref(&mut self, ts_remote: &str) -> io::Result<()> {
let metadata = fs::metadata(self.get_file_path())?;
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
// build line with all needed properties
let content = format!("{} {} {} {} {}",
self.get_name(),
ts_remote,
metadata.len().to_string(),
secs.to_string(),
self.get_file_hash());
// create parent dir if needed
let mut obj_path = self.get_obj_path();
obj_path.pop();
if !obj_path.exists() {
fs::create_dir_all(obj_path)?;
}
// open ref file
let mut file = OpenOptions::new()
.create_new(true)
.write(true)
.open(self.get_obj_path())?;
writeln!(file, "{}", content)?;
Ok(())
}
fn get_file_ref(&mut self) -> PathBuf {
let mut refs_p = path::refs();
let file_hash = self.get_file_hash().clone();
let (dir, res) = file_hash.split_at(2);
refs_p.push(dir);
if !refs_p.exists() {
let _ = fs::create_dir_all(refs_p.clone());
}
refs_p.push(res);
refs_p
}
// create a file in .nextsync/refs with the hash of this blob that
// redirect to the relative path
fn create_hash_ref(&mut self) -> io::Result<()> {
// todo check if the file has been modified for moved and copy
let refs_p = self.get_file_ref();
let mut file = OpenOptions::new()
.create(true)
.write(true)
.open(refs_p)?;
// todo deal with duplicate content
writeln!(file, "{}", self.get_relative_file_path().to_str().unwrap())?;
Ok(())
}
pub fn create(&mut self, ts_remote: &str, up_parent: bool) -> io::Result<()> {
// add blob reference to parent
let _ = self.add_ref_to_parent();
if let Err(err) = self.create_blob_ref(ts_remote.clone()) {
eprintln!("err: saving blob ref of {}: {}", self.get_relative_file_path().display(), err);
}
if let Err(err) = self.create_hash_ref() {
eprintln!("err: saving hash ref of {}: {}", self.get_relative_file_path().display(), err);
}
// update date for all parent
if up_parent {
if let Err(err) = update_dates(self.get_relative_file_path(), ts_remote) {
eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
}
}
Ok(())
}
pub fn update(&mut self, ts_remote: &str) -> io::Result<()> {
// // remove old hash ref
// let mut refs_p = path::refs();
// let binding = self.saved_hash();
// let (dir, res) = binding.split_at(2);
// refs_p.push(dir);
// refs_p.push(res);
// if let Err(err) = fs::remove_file(refs_p) {
// eprintln!("err: removing hash ref of {}: {}", self.r_path.clone().display(), err);
// }
//
// // creating new hash ref
// if let Err(err) = self.create_hash_ref() {
// eprintln!("err: saving hash ref of {}: {}", self.r_path.clone().display(), err);
// }
//
// // updating content of blob's ref
// let metadata = fs::metadata(self.a_path.clone())?;
// let secs = metadata
// .modified()
// .unwrap()
// .duration_since(SystemTime::UNIX_EPOCH)
// .unwrap()
// .as_secs();
//
// let mut content = self.saved_filename();
// content.push_str(" ");
// content.push_str(ts_remote);
// content.push_str(" ");
// content.push_str(&metadata.len().to_string());
// content.push_str(" ");
// content.push_str(&secs.to_string());
// content.push_str(" ");
// content.push_str(&self.get_file_hash());
//
// let mut file = OpenOptions::new()
// .write(true)
// .open(self.obj_p.clone())?;
//
// writeln!(file, "{}", &content)?;
Ok(())
}
} }

415
src/store/object/object.rs Normal file
View File

@@ -0,0 +1,415 @@
use std::io;
use std::fs;
use std::path::PathBuf;
use crate::utils::path;
use crate::store::head;
use crate::store::object::{add_node, rm_node};
use crypto::sha1::Sha1;
use crypto::digest::Digest;
use crate::utils::into::IntoPathBuf;
use crate::store::object::{blob::Blob, tree::Tree};
use crate::commands::status::{State, LocalObj};
#[derive(Clone, Copy)]
pub enum ObjType {
TREE,
BLOB,
DEFAULT
}
pub trait ObjMethods {
fn get_type(&self) -> ObjType;
fn get_obj_path(&self) -> PathBuf;
fn get_file_path(&self) -> PathBuf;
fn get_relative_file_path(&self) -> PathBuf;
fn get_repo_file_path(&self) -> PathBuf;
fn get_name(&self) -> String;
fn get_hash_path(&self) -> String;
fn get_local_obj(&self) -> LocalObj;
fn get_line(&self, obj_type: ObjType) -> String;
fn add_ref_to_parent(&self) -> io::Result<()>;
fn rm(&mut self) -> io::Result<()>;
fn rm_node(&mut self) -> io::Result<()>;
fn rm_node_down(&mut self) -> io::Result<()>;
fn exists_on_remote(&mut self) -> bool;
fn has_changes(&mut self) -> bool;
}
pub struct Obj {
name: String,
obj_path: PathBuf,
obj_type: ObjType,
file_path: PathBuf, // file here is used as both file and directory
relative_file_path: PathBuf,
repo_file_path: PathBuf,
hash_path: String, // hash of the relative path of the file
}
impl ObjMethods for Obj {
fn get_type(&self) -> ObjType {
self.obj_type
}
fn get_obj_path(&self) -> PathBuf {
self.obj_path.clone()
}
fn get_file_path(&self) -> PathBuf {
self.file_path.clone()
}
fn get_relative_file_path(&self) -> PathBuf {
self.relative_file_path.clone()
}
fn get_repo_file_path(&self) -> PathBuf {
self.repo_file_path.clone()
}
fn get_local_obj(&self) -> LocalObj {
LocalObj {
otype: match self.obj_type {
ObjType::BLOB => String::from("blob"),
ObjType::TREE => String::from("tree"),
ObjType::DEFAULT => String::from("default"),
},
name: self.get_name(),
path: self.get_repo_file_path(),
path_from: None,
state: State::New
}
}
fn get_name(&self) -> String {
self.name.clone()
}
fn get_hash_path(&self) -> String {
self.hash_path.clone()
}
// build line for parent reference
fn get_line(&self, obj_type: ObjType) -> String {
let type_str = match obj_type {
ObjType::BLOB => "blob",
ObjType::TREE => "tree",
ObjType::DEFAULT => "default",
};
format!("{} {} {}", type_str, self.get_hash_path(), self.get_name())
}
fn add_ref_to_parent(&self) -> io::Result<()> {
let line = self.get_line(self.obj_type);
if self.get_relative_file_path().iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(self.get_relative_file_path().parent().unwrap(), &line)?;
}
Ok(())
}
fn rm_node(&mut self) -> io::Result<()> {
// remove parent reference to self
let line = self.get_line(self.obj_type);
if self.get_relative_file_path().iter().count() == 1 {
head::rm_line(&line)?;
} else {
rm_node(self.get_relative_file_path().parent().unwrap(), &line)?;
}
Ok(())
}
fn rm_node_down(&mut self) -> io::Result<()> {
eprintln!("rm_node_down: tried to do this on Obj");
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
eprintln!("rm: tried to do this on Obj");
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj_path.exists()
}
fn has_changes(&mut self) -> bool {
if !self.obj_path.exists() {
return true;
}
match self.obj_type {
ObjType::BLOB => Blob::from_path(self.relative_file_path.clone()).has_changes(),
ObjType::TREE => Tree::from_path(self.relative_file_path.clone()).has_changes(),
ObjType::DEFAULT => {
unreachable!();
}
}
}
}
impl ObjMethods for Blob {
fn get_type(&self) -> ObjType {
self.obj.get_type()
}
fn get_obj_path(&self) -> PathBuf {
self.obj.get_obj_path()
}
fn get_file_path(&self) -> PathBuf {
self.obj.get_file_path()
}
fn get_relative_file_path(&self) -> PathBuf {
self.obj.get_relative_file_path()
}
fn get_repo_file_path(&self) -> PathBuf {
self.obj.get_repo_file_path()
}
fn get_local_obj(&self) -> LocalObj {
self.obj.get_local_obj()
}
fn get_name(&self) -> String {
self.obj.get_name()
}
fn get_hash_path(&self) -> String {
self.obj.get_hash_path()
}
fn get_line(&self, _: ObjType) -> String {
self.obj.get_line(ObjType::BLOB)
}
fn add_ref_to_parent(&self) -> io::Result<()> {
self.obj.add_ref_to_parent()
}
fn rm_node(&mut self) -> io::Result<()> {
// remove self object and children object
let _ = self.rm_node_down();
self.obj.rm_node()
}
fn rm_node_down(&mut self) -> io::Result<()> {
// remove reference to self
fs::remove_file(self.get_obj_path())?;
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
// remove all references, including children's one
self.rm_node()?;
// remove file
fs::remove_file(self.get_file_path())?;
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj.exists_on_remote()
}
fn has_changes(&mut self) -> bool {
self.obj.has_changes()
}
}
impl ObjMethods for Tree {
fn get_type(&self) -> ObjType {
self.obj.get_type()
}
fn get_obj_path(&self) -> PathBuf {
self.obj.get_obj_path()
}
fn get_file_path(&self) -> PathBuf {
self.obj.get_file_path()
}
fn get_relative_file_path(&self) -> PathBuf {
self.obj.get_relative_file_path()
}
fn get_repo_file_path(&self) -> PathBuf {
self.obj.get_repo_file_path()
}
fn get_local_obj(&self) -> LocalObj {
self.obj.get_local_obj()
}
fn get_name(&self) -> String {
self.obj.get_name()
}
fn get_hash_path(&self) -> String {
self.obj.get_hash_path()
}
fn get_line(&self, _: ObjType) -> String {
self.obj.get_line(ObjType::TREE)
}
fn add_ref_to_parent(&self) -> io::Result<()> {
self.obj.add_ref_to_parent()
}
fn rm_node(&mut self) -> io::Result<()> {
// remove self object and children object
let _ = self.rm_node_down();
self.obj.rm_node()
}
/// remove objects and children but not parent reference to self
fn rm_node_down(&mut self) -> io::Result<()> {
// remove children
while let Some(mut child) = self.next() {
match child.get_type() {
ObjType::TREE => child.rm_node_down(),
ObjType::BLOB => child.rm_node_down(),
_ => Ok(())
}?;
};
// remove reference to self
fs::remove_file(self.get_obj_path())?;
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
// remove all references, including children's one
self.rm_node()?;
// remove directory and all subfiles
fs::remove_dir_all(self.get_file_path())?;
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj.exists_on_remote()
}
fn has_changes(&mut self) -> bool {
self.obj.has_changes()
}
}
impl Obj {
fn new() -> Self {
Obj {
name: String::new(),
obj_path: PathBuf::new(),
file_path: PathBuf::new(),
obj_type: ObjType::DEFAULT,
hash_path: String::new(),
relative_file_path: PathBuf::new(),
repo_file_path: PathBuf::new()
}
}
pub fn from_path<S>(path: S) -> Obj where S: IntoPathBuf {
let path = path.into();
let mut hasher = Sha1::new();
hasher.input_str(path.to_str().unwrap());
let hash = hasher.result_str();
let (dir, res) = hash.split_at(2);
let mut obj_path = path::objects();
obj_path.push(dir);
obj_path.push(res);
// set to absolute path if not already
let root = path::repo_root();
let abs_path = match path.clone().starts_with(root.clone()) {
true => path.clone(),
false => root.join(path.clone())
};
Obj {
name: match abs_path.file_name() {
None => String::new(),
Some(name) => name.to_str().unwrap().to_owned()
},
obj_path,
obj_type: match path.exists() {
true => match path.is_dir() {
true => ObjType::TREE,
false => ObjType::BLOB
},
false => ObjType::DEFAULT
},
file_path: abs_path,
relative_file_path: path.clone(),
repo_file_path: path,
hash_path: hash,
}
}
/// load from the information line stored in the object
pub fn from_line(line: String, base_dir: Option<PathBuf>) -> Box<dyn ObjMethods> {
let mut split = line.trim().rsplit(' ');
if split.clone().count() != 3 {
eprintln!("fatal: invalid object(s) ({})", line.trim());
std::process::exit(1);
}
let name = split.next().unwrap();
let hash_path = split.next().unwrap();
let obj_type = split.next().unwrap();
let (dir, res) = hash_path.split_at(2);
let mut obj_path = path::objects();
obj_path.push(dir);
obj_path.push(res);
let path = match base_dir {
Some(dir) => dir.join(name),
None => PathBuf::from(name),
};
let root = path::repo_root();
let abs_path = root.join(path.clone());
let obj = Obj {
name: String::from(name),
obj_path,
obj_type: match obj_type {
"tree" => ObjType::TREE,
"blob" => ObjType::BLOB,
_ => ObjType::DEFAULT
},
file_path: abs_path,
relative_file_path: path.clone(),
repo_file_path: path,
hash_path: String::from(hash_path),
};
match obj.obj_type {
ObjType::TREE => Box::new(Tree::new(obj)),
ObjType::BLOB => Box::new(Blob::new(obj)),
ObjType::DEFAULT => Box::new(Tree::new(obj))
}
}
pub fn from_head() -> Self {
Obj {
name: String::new(),
obj_path: head::path(),
obj_type: ObjType::TREE,
file_path: PathBuf::new(),
relative_file_path: PathBuf::new(),
repo_file_path: PathBuf::new(),
hash_path: String::new(),
}
}
}

View File

@@ -1,103 +1,119 @@
use std::fs::File; use crate::utils::into::IntoPathBuf;
use std::io; use crate::store::object::object::Obj;
use std::path::PathBuf; use crate::store::object::update_dates;
use crate::utils::{read, path}; use crate::store::object::object::ObjMethods;
use crate::store::head; use std::fs::{self, File, OpenOptions};
use crate::store::object::{self, update_dates, parse_path, hash_obj, add_node, create_obj}; use std::io::{self, BufRead, BufReader, Write};
pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> { pub struct Tree {
let (line, hash, name) = parse_path(path.clone(), false); pub obj: Obj,
pub buf_reader: Option<BufReader<File>>,
// add tree reference to parent is_head: bool,
if path.iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(path.parent().unwrap(), &line)?;
}
// create tree object
let mut content = name;
content.push_str(" ");
content.push_str(date);
create_obj(hash, &content)?;
// update date for all parent
if up_parent {
update_dates(path, date)?;
}
Ok(())
} }
pub fn rm(path: PathBuf) -> io::Result<()> {
let (_, lines) = read(path.to_path_buf().to_str().unwrap().to_string()).unwrap(); impl Tree {
for line in lines { pub fn new(obj: Obj) -> Self {
let (ftype, hash, _) = parse_line(line.unwrap()); Tree {
if ftype == String::from("blob") { obj,
object::rm(&hash)?; buf_reader: None,
} else { is_head: false,
rm_hash(hash)?;
} }
} }
Ok(())
}
fn rm_hash(hash: String) -> io::Result<()> { pub fn from_head() -> Self {
let mut obj_p = path::objects(); Tree {
let (dir, res) = hash.split_at(2); obj: Obj::from_head(),
obj_p.push(dir); buf_reader: None,
obj_p.push(res); is_head: true,
}
}
match read::read_lines(obj_p) { pub fn from_path<S>(r_path: S) -> Tree where S: IntoPathBuf {
Ok(mut reader) => { Tree {
reader.next(); obj: Obj::from_path(r_path.into()),
for line in reader { buf_reader: None,
let (ftype, hash, _) = parse_line(line.unwrap()); is_head: false,
if ftype == String::from("blob") { }
object::rm(&hash)?; }
} else {
rm_hash(hash)?; pub fn read(&mut self) {
if self.buf_reader.is_none() {
if let Ok(file) = File::open(self.get_obj_path()) {
self.buf_reader = Some(BufReader::new(file));
// skip first line (declaration) if is not head
if !self.is_head {
let mut line = String::new();
self.buf_reader.as_mut().unwrap().read_line(&mut line);
} }
} }
}, }
Err(err) => { }
eprintln!("error reading tree: {}", err);
}, pub fn has_changes(&mut self) -> bool {
} todo!();
Ok(()) return true;
} }
pub fn read(tree: String) -> Option<(String, io::Lines<io::BufReader<File>>)> { pub fn next(&mut self) -> Option<Box<dyn ObjMethods>> {
let mut obj_p = path::objects(); self.read();
//if let Some(ref mut file) = self.buf_reader {
let (dir, res) = hash_obj(&tree); // let mut line = String::new();
obj_p.push(dir); // match file.read_line(&mut line) {
obj_p.push(res); // Ok(0) => Ok(None), // End of file
// Ok(_) => Ok(Some(line.trim_end().len())), // Return length of line
match read::read_lines(obj_p) { // Err(e) => Err(e),
Ok(mut reader) => { // }
let name = match reader.next() { //} else {
Some(Ok(line)) => line, // Ok(None) // If file is None, return None
_ => String::from(""), //}
}; match self.buf_reader {
Some((name, reader)) Some(ref mut file) => {
}, let mut line = String::new();
Err(err) => { match file.read_line(&mut line) {
eprintln!("error reading tree: {}", err); Ok(0) => None,
None Ok(_) => Some(Obj::from_line(line, Some(self.get_relative_file_path()))),
}, Err(e) => {
} eprintln!("tree::next: failed to read next line: {}", e);
} None
}
pub fn parse_line(line: String) -> (String, String, String) { }
let mut split = line.rsplit(' '); },
if split.clone().count() != 3 { None => None
eprintln!("fatal: invalid object(s)"); }
std::process::exit(1); }
pub fn create(&self, date: &str, up_parent: bool) -> io::Result<()> {
// add tree reference to parent
let _ = self.add_ref_to_parent();
// create tree object
let content = format!("{} {}", self.get_name(), date);
// create parent dir if needed
let mut obj_path = self.get_obj_path();
obj_path.pop();
if !obj_path.exists() {
fs::create_dir_all(obj_path)?;
}
// open ref file
let mut file = OpenOptions::new()
.create_new(true)
.write(true)
.open(self.get_obj_path())?;
// update date for all parent
// if up_parent {
// if let Err(err) = update_dates(self.get_relative_file_path(), date) {
// eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
// }
// }
writeln!(file, "{}", content)?;
Ok(())
} }
let name = split.next().unwrap();
let hash = split.next().unwrap();
let ftype = split.next().unwrap();
(String::from(ftype), String::from(hash), String::from(name))
} }

11
src/subcommands.rs Normal file
View File

@@ -0,0 +1,11 @@
pub mod init;
pub mod status;
pub mod add;
pub mod reset;
pub mod clone;
pub mod push;
pub mod config;
pub mod remote_diff;
pub mod pull;
pub mod remote;
pub mod credential;

42
src/subcommands/add.rs Normal file
View File

@@ -0,0 +1,42 @@
use clap::{Arg, ArgMatches, Command, ArgAction};
use crate::commands;
use crate::commands::add::AddArgs;
pub fn create() -> Command {
Command::new("add")
.arg(
Arg::new("files")
.required_unless_present("all")
.conflicts_with("all")
.num_args(1..)
.value_name("FILE")
.help("Files to add"),
)
.arg(
Arg::new("force")
.short('f')
.long("force")
.action(ArgAction::SetTrue)
.help("Allow adding otherwise ignored files."),
)
.arg(
Arg::new("all")
.short('A')
.long("all")
.action(ArgAction::SetTrue)
.help("This adds, modifies, and removes index entries to match the working tree"),
)
.about("Add changes to the index")
}
pub fn handler(args: &ArgMatches) {
commands::add::add(AddArgs {
files: match args.get_many::<String>("files") {
None => vec![],
Some(vals) => vals.map(|s| s.to_string()).collect(),
},
force: *args.get_one::<bool>("force").unwrap(),
all: *args.get_one::<bool>("all").unwrap(),
});
}

52
src/subcommands/clone.rs Normal file
View File

@@ -0,0 +1,52 @@
use clap::{Arg, Command, ArgMatches};
// use textwrap::{fill, Options};
use crate::commands::clone::CloneArgs;
use crate::global;
use crate::commands;
// fn sized_str<'a>(content: &'a str) -> &'a str {
// fill(content, Options::new(70).width).as_str();
// "ok"
// }
pub fn create() -> Command {
// let remote_desc = sized_str(&format!("The repository to clone from. See the NEXTSYNC URLS section below for more information on specifying repositories."));
// let depth_desc = sized_str(&format!("Depth of the recursive fetch of object properties. This value should be lower when there are a lot of files per directory and higher when there are a lot of subdirectories with fewer files. (Default: {})", clone::DEPTH));
Command::new("clone")
.arg(
Arg::new("remote")
.required(true)
.num_args(1)
.value_name("REMOTE")
//.help(_desc)
)
.arg(
Arg::new("depth")
.short('d')
.long("depth")
.required(false)
.num_args(1)
//.help(&depth_desc)
)
.arg(
Arg::new("directory")
.required(false)
.num_args(1)
.value_name("DIRECTORY")
)
.about("Clone a repository into a new directory")
.after_help("NEXTSYNC URLS\nThe following syntaxes may be used:\n\t- user@host.xz/path/to/repo\n\t- http[s]://host.xz/apps/files/?dir=/path/to/repo&fileid=111111\n\t- [http[s]://]host.xz/remote.php/dav/files/user/path/to/repo\n")
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(String::from(val.to_string()));
}
if let Some(remote) = args.get_one::<String>("remote") {
commands::clone::clone(CloneArgs {
remote: remote.to_string(),
depth: args.get_one::<String>("depth").cloned(),
});
}
}

48
src/subcommands/config.rs Normal file
View File

@@ -0,0 +1,48 @@
use clap::{Arg, Command, ArgMatches};
use crate::commands::config::ConfigSetArgs;
use crate::commands;
pub fn create() -> Command {
Command::new("config")
.about("Get and set repository or global options")
.subcommand(
Command::new("get")
.about("Get the value of a configuration variable")
.arg(
Arg::new("name")
.help("The name of the configuration variable")
.required(true)
.index(1)
)
)
.subcommand(
Command::new("set")
.about("Set a configuration variable")
.arg(
Arg::new("name")
.help("The name of the configuration variable")
.required(true)
.index(1)
)
.arg(
Arg::new("value")
.help("The value to set")
.required(true)
.index(2)
)
)
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("set", set_matches)) => {
commands::config::config_set(ConfigSetArgs {
name: set_matches.get_one::<String>("name").unwrap().to_string(),
value: set_matches.get_one::<String>("value").unwrap().to_string(),
});
}
_ => println!("Invalid or missing subcommand for 'config'"),
}
}

View File

@@ -0,0 +1,39 @@
use clap::{Arg, Command, ArgMatches};
use crate::commands;
use crate::commands::credential::CredentialArgs;
pub fn create() -> Command {
Command::new("credential")
.about("Manage set of credentials")
.subcommand(
Command::new("add")
.arg(
Arg::new("username")
.required(true)
.num_args(1)
.value_name("NAME")
.help("The username used to connect to nextcloud"),
)
.arg(
Arg::new("password")
.required(false)
.num_args(1)
.value_name("PASSWORD")
.help("The passowd used to connect to nextcloud (optional)"),
)
.about("Add a new set of credential")
)
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("add", add_matches)) => {
commands::credential::credential_add(CredentialArgs {
username: add_matches.get_one::<String>("username").unwrap().to_string(),
password: add_matches.get_one::<String>("password").cloned(),
});
}
_ => println!("Invalid or missing subcommand for 'credential'"),
}
}

23
src/subcommands/init.rs Normal file
View File

@@ -0,0 +1,23 @@
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> Command {
Command::new("init")
.arg(
Arg::new("directory")
.required(false)
.num_args(1)
.value_name("DIRECTORY")
)
.about("Create an empty Nextsync repository")
// Create an empty nextsync repository or reinitialize an existing one
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(val.to_string());
}
commands::init::init();
}

23
src/subcommands/pull.rs Normal file
View File

@@ -0,0 +1,23 @@
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> Command {
Command::new("pull")
.arg(
Arg::new("path")
.required(false)
.num_args(1)
.value_name("PATH")
.help("The path to pull."),
)
.about("Fetch and integrate changes from the nextcloud server.")
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("path") {
global::global::set_dir_path(val.to_string());
}
commands::pull::pull();
}

6
src/subcommands/push.rs Normal file
View File

@@ -0,0 +1,6 @@
use clap::Command;
pub fn create() -> Command {
Command::new("push")
.about("Push changes on nextcloud")
}

46
src/subcommands/remote.rs Normal file
View File

@@ -0,0 +1,46 @@
use clap::{Arg, Command, ArgMatches, ArgAction};
use crate::commands;
use crate::commands::remote::RemoteArgs;
pub fn create() -> Command {
Command::new("remote")
.about("Manage set of tracked repositories")
.subcommand(
Command::new("add")
.arg(
Arg::new("name")
.required(true)
.index(1)
.help("The name of the remote"),
)
.arg(
Arg::new("url")
.required(true)
.index(2)
.help("The url of the remote"),
)
.about("Add a new remote to this repository")
)
.arg(
Arg::new("verbose")
.short('v')
.long("verbose")
.action(ArgAction::SetTrue)
.help("Be a little more verbose and show remote url after name.")
)
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("add", add_matches)) => {
commands::remote::remote_add(RemoteArgs {
name: add_matches.get_one::<String>("name").unwrap().to_string(),
url: add_matches.get_one::<String>("url").unwrap().to_string(),
});
}
_ => {
commands::remote::remote_list(*args.get_one::<bool>("verbose").unwrap());
}
}
}

View File

@@ -0,0 +1,24 @@
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> Command {
Command::new("remote-diff")
.arg(
Arg::new("path")
.required(false)
.num_args(1)
.value_name("PATH")
.help("The path to pull."),
)
.about("Fetch changes from the nextcloud server.")
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("path") {
global::global::set_dir_path(val.to_string());
}
commands::remote_diff::remote_diff();
}

6
src/subcommands/reset.rs Normal file
View File

@@ -0,0 +1,6 @@
use clap::Command;
pub fn create() -> Command {
Command::new("reset")
.about("Clear the index")
}

30
src/subcommands/status.rs Normal file
View File

@@ -0,0 +1,30 @@
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
use crate::commands::status::StatusArgs;
pub fn create() -> Command {
Command::new("status")
.arg(
Arg::new("directory")
.num_args(1)
.value_name("DIRECTORY")
)
.arg(
Arg::new("nostyle")
.long("nostyle")
.help("Status with minium information and style"),
)
.about("Show the working tree status")
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(val.to_string());
}
commands::status::status(StatusArgs {
nostyle: args.contains_id("nostyle"),
});
}

View File

@@ -4,3 +4,4 @@ pub mod nextsyncignore;
pub mod api; pub mod api;
pub mod time; pub mod time;
pub mod remote; pub mod remote;
pub mod into;

View File

@@ -1,3 +1,5 @@
use crate::commands::{clone::get_url_props, config};
#[derive(Debug)] #[derive(Debug)]
pub struct ApiProps { pub struct ApiProps {
pub host: String, // nextcloud.example.com pub host: String, // nextcloud.example.com
@@ -15,11 +17,30 @@ impl Clone for ApiProps {
} }
} }
pub fn get_api_props() -> ApiProps {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
ApiProps {
host,
username: username.unwrap().to_owned(),
root: root.to_owned(),
}
}
pub fn get_relative_s(p: String, api_props: &ApiProps) -> String { pub fn get_relative_s(p: String, api_props: &ApiProps) -> String {
let mut final_p = p.clone(); let mut final_p = p.clone();
final_p = final_p.strip_prefix("/remote.php/dav/files/").unwrap().to_string(); final_p = final_p.strip_prefix("/remote.php/dav/files/").unwrap().to_string();
final_p = final_p.strip_prefix(&api_props.username).unwrap().to_string(); final_p = final_p.strip_prefix(&api_props.username).unwrap().to_string();
final_p = final_p.strip_prefix(&api_props.root).unwrap().to_string(); final_p = final_p.strip_prefix(&api_props.root).unwrap().to_string();
final_p = final_p.strip_prefix("/").unwrap().to_string(); if final_p.starts_with("/") {
final_p = final_p.strip_prefix("/").unwrap().to_string();
}
final_p final_p
} }

30
src/utils/into.rs Normal file
View File

@@ -0,0 +1,30 @@
use std::path::{PathBuf, Path};
pub trait IntoPathBuf {
fn into(self) -> PathBuf;
}
impl IntoPathBuf for PathBuf {
fn into(self) -> PathBuf {
self
}
}
impl IntoPathBuf for &Path {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}
impl IntoPathBuf for String {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}
impl IntoPathBuf for &str {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}

View File

@@ -23,6 +23,13 @@ pub fn read_lines() -> Result<Vec<String>, ()> {
Ok(vec![]) Ok(vec![])
} }
pub fn get_rules() -> Vec<String> {
match read_lines() {
Ok(r) => r,
Err(_) => vec![],
}
}
pub fn _ignore_files(files: &mut Vec<String>) -> (bool, Vec<String>) { pub fn _ignore_files(files: &mut Vec<String>) -> (bool, Vec<String>) {
let mut ignored_f = vec![]; let mut ignored_f = vec![];
if let Ok(lines) = read_lines() { if let Ok(lines) = read_lines() {
@@ -80,6 +87,7 @@ pub fn ignore_file(path: &String, lines: Vec<String>, ignored_f: &mut Vec<String
#[cfg(test)] #[cfg(test)]
mod tests { mod tests {
use super::*; use super::*;
use std::io::Cursor;
#[test] #[test]
fn test_ignore_files() { fn test_ignore_files() {

View File

@@ -1,8 +1,61 @@
use std::env; use std::env;
use std::fs::canonicalize; use std::fs::canonicalize;
use std::path::{PathBuf, Path}; use std::path::{PathBuf, Path, Component};
use crate::global::global::DIR_PATH; use crate::global::global::DIR_PATH;
/// Improve the path to try remove and solve .. token.
/// Taken from https://stackoverflow.com/questions/68231306/stdfscanonicalize-for-files-that-dont-exist
///
/// This assumes that `a/b/../c` is `a/c` which might be different from
/// what the OS would have chosen when b is a link. This is OK
/// for broot verb arguments but can't be generally used elsewhere
///
/// This function ensures a given path ending with '/' still
/// ends with '/' after normalization.
pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
let ends_with_slash = path.as_ref()
.to_str()
.map_or(false, |s| s.ends_with('/'));
let mut normalized = PathBuf::new();
for component in path.as_ref().components() {
match &component {
Component::ParentDir => {
if !normalized.pop() {
normalized.push(component);
}
}
_ => {
normalized.push(component);
}
}
}
if ends_with_slash {
normalized.push("");
}
normalized
}
pub fn normalize_relative(file: &str) -> Result<String, String> {
let current = match current() {
Some(p) => p,
None => {
return Err("cannot find current location".to_owned());
}
};
let p = {
let tmp_p = current.join(PathBuf::from(file));
normalize_path(tmp_p)
};
let relative_p = match p.strip_prefix(repo_root()) {
Ok(p) => p,
Err(_) => return Err("is not in a nextsync repo or doesn't exist".to_owned()),
};
Ok(relative_p.to_str().unwrap().to_owned())
}
pub fn current() -> Option<PathBuf> { pub fn current() -> Option<PathBuf> {
let d = DIR_PATH.lock().unwrap(); let d = DIR_PATH.lock().unwrap();
@@ -51,12 +104,23 @@ pub fn repo_root() -> PathBuf {
} }
} }
pub fn is_nextsync_config(path: PathBuf) -> bool {
path.ends_with(".nextsync") || path.starts_with(".nextsync")
}
pub fn nextsync() -> PathBuf { pub fn nextsync() -> PathBuf {
let mut path = repo_root(); let mut path = repo_root();
path.push(".nextsync"); path.push(".nextsync");
path path
} }
pub fn config() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");
path.push("config");
path
}
pub fn objects() -> PathBuf { pub fn objects() -> PathBuf {
let mut path = repo_root(); let mut path = repo_root();
path.push(".nextsync"); path.push(".nextsync");
@@ -64,6 +128,13 @@ pub fn objects() -> PathBuf {
path path
} }
pub fn refs() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");
path.push("refs");
path
}
pub fn nextsyncignore() -> Option<PathBuf> { pub fn nextsyncignore() -> Option<PathBuf> {
let mut path = repo_root(); let mut path = repo_root();
path.push(".nextsyncignore"); path.push(".nextsyncignore");
@@ -73,3 +144,11 @@ pub fn nextsyncignore() -> Option<PathBuf> {
None None
} }
} }
pub fn path_buf_to_string(p: PathBuf) -> String {
if let Some(str) = p.to_str() {
str.to_string()
} else {
String::new()
}
}

View File

@@ -1,4 +1,8 @@
use crate::services::{req_props::ObjProps, api::ApiError}; use std::path::PathBuf;
use crate::{services::{req_props::ObjProps, api::ApiError}, store::object::{blob::Blob, Object}, commands::status::State};
use std::collections::HashMap;
use super::{path::{path_buf_to_string, self}, read};
pub struct EnumerateOptions { pub struct EnumerateOptions {
pub depth: Option<String>, pub depth: Option<String>,
@@ -7,20 +11,26 @@ pub struct EnumerateOptions {
pub fn enumerate_remote( pub fn enumerate_remote(
req: impl Fn(&str) -> Result<Vec<ObjProps>, ApiError>, req: impl Fn(&str) -> Result<Vec<ObjProps>, ApiError>,
should_skip: &dyn Fn(ObjProps) -> bool, should_skip: Option<&dyn Fn(ObjProps) -> bool>,
options: EnumerateOptions options: EnumerateOptions
) -> (Vec<ObjProps>, Vec<ObjProps>) { ) -> (Vec<ObjProps>, Vec<ObjProps>) {
let mut folders: Vec<ObjProps> = vec![ObjProps::new()]; let mut folders: Vec<ObjProps> = vec![ObjProps::new()];
let mut all_folders: Vec<ObjProps> = vec![]; let mut all_folders: Vec<ObjProps> = vec![];
let mut deleted: Vec<PathBuf> = vec![];
let mut files: Vec<ObjProps> = vec![]; let mut files: Vec<ObjProps> = vec![];
let mut objs_hashmap: HashMap<String, Vec<String>> = HashMap::new();
objs_hashmap.insert(
options.relative_s.clone().unwrap_or(String::new()),
Vec::new());
while folders.len() > 0 { while folders.len() > 0 {
let folder = folders.pop().unwrap(); let folder = folders.pop().unwrap();
let relative_s = match folder.relative_s { let relative_s = match folder.relative_s {
Some(relative_s) => relative_s, Some(relative_s) => relative_s,
None => options.relative_s.clone().unwrap_or(String::from("")), None => options.relative_s.clone().unwrap_or(String::new())
}; };
// request folder content // request folder content
@@ -44,43 +54,46 @@ pub fn enumerate_remote(
}; };
// separate folders and files in response // separate folders and files in response
let mut iter = objs.iter(); let d = options.depth.clone()
.unwrap_or("0".to_owned())
.parse::<u16>()
.unwrap();
// first element is not used as it is the fetched folder // first element is not used as it is the fetched folder
let default_depth = calc_depth(iter.next().unwrap()); if let Some(should_skip_fct) = should_skip.clone() {
let d = options.depth.clone().unwrap_or("0".to_owned()).parse::<u16>().unwrap(); iter_with_skip_fct(
let mut skip_depth = 0; objs,
for object in iter { d,
if object.is_dir() { &mut files,
let current_depth = calc_depth(object); &mut folders,
// skip children of skiped folder should_skip_fct,
if skip_depth != 0 && skip_depth < current_depth { &mut objs_hashmap,
continue; &mut all_folders);
}
let should_skip = should_skip(object.clone()); // check for deletion only when folder are not empty
if should_skip { // as the folder's content may not have been fetched yet
skip_depth = current_depth; for (key, children) in objs_hashmap.clone() {
} else { if children.len() != 0 {
skip_depth = 0; get_deleted(key.clone(), children, &mut deleted);
all_folders.push(object.clone()); objs_hashmap.remove(&key);
}
// should get content of this folder if it is not already in this reponse
if current_depth - default_depth == d && !should_skip {
folders.push(object.clone());
}
} else {
let current_depth = calc_depth(object);
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
if !should_skip(object.clone()) {
skip_depth = 0;
files.push(object.clone());
} }
} }
} else {
iter_without_skip_fct(
objs,
d,
&mut files,
&mut folders,
&mut all_folders);
}
}
// go through all folders not checked for deletion before
// as they were empty
if let Some(_) = should_skip.clone() {
for (key, children) in objs_hashmap.clone() {
get_deleted(key.clone(), children, &mut deleted);
objs_hashmap.remove(&key);
} }
} }
@@ -88,6 +101,158 @@ pub fn enumerate_remote(
} }
fn calc_depth(obj: &ObjProps) -> u16 { fn calc_depth(obj: &ObjProps) -> u16 {
obj.relative_s.clone().unwrap_or(String::from("")).split("/").count() as u16 let path = obj.relative_s.clone().unwrap_or(String::new());
path.split("/").count() as u16
}
fn iter_with_skip_fct(
objs: Vec<ObjProps>,
d: u16,
files: &mut Vec<ObjProps>,
folders: &mut Vec<ObjProps>,
should_skip: &dyn Fn(ObjProps) -> bool,
objs_hashmap: &mut HashMap<String, Vec<String>>,
all_folders: &mut Vec<ObjProps>) {
let mut iter = objs.iter();
let default_depth = calc_depth(iter.next().unwrap());
let mut skip_depth = 0;
for object in iter {
let current_depth = calc_depth(object);
if object.is_dir() {
// add folder to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
let r_ps = path_buf_to_string(r_path);
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
values.push(object.relative_s.clone().unwrap());
}
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
let should_skip = should_skip(object.clone());
if should_skip {
skip_depth = current_depth;
} else {
// if this folder is not skipped then we initialised its vector
let r_ps_dir = object.relative_s.clone().unwrap();
let mut r_ps_key = r_ps_dir.chars();
r_ps_key.next_back();
objs_hashmap.insert(r_ps_key.as_str().to_owned(), Vec::new());
skip_depth = 0;
all_folders.push(object.clone());
}
// should get content of this folder if it is not already in this reponse
if current_depth - default_depth == d && !should_skip {
folders.push(object.clone());
}
} else {
// add file to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
let r_ps = path_buf_to_string(r_path);
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
values.push(object.relative_s.clone().unwrap());
}
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
if !should_skip(object.clone()) {
skip_depth = 0;
files.push(object.clone());
}
}
}
}
fn iter_without_skip_fct(
objs: Vec<ObjProps>,
d: u16,
files: &mut Vec<ObjProps>,
folders: &mut Vec<ObjProps>,
all_folders: &mut Vec<ObjProps>) {
let mut iter = objs.iter();
let default_depth = calc_depth(iter.next().unwrap());
for object in iter {
if object.is_dir() {
// should get content of this folder if it is not already in this reponse
let current_depth = calc_depth(object);
if current_depth - default_depth == d {
folders.push(object.clone());
}
all_folders.push(object.clone());
} else {
files.push(object.clone());
}
}
}
fn get_non_new_local_element(iter: &mut dyn Iterator<Item = &PathBuf>) -> Option<PathBuf> {
let mut el = iter.next();
while !el.is_none() && {
if el.unwrap().is_dir() {
// ignore newly created directory (not sync)
!Object::new(el.unwrap().clone().to_str().unwrap()).exists()
} else {
// ignore newly created file (not sync)
Blob::from_path(el.unwrap().clone()).status(&mut None) == State::New
}
} {
el = iter.next();
}
match el {
Some(e) => Some(e.to_owned()),
None => None
}
}
fn get_deleted(source: String, children: Vec<String>, deleted: &mut Vec<PathBuf>) {
let root = path::repo_root();
let abs_p = root.join(PathBuf::from(source.clone()));
let folder_read = read::read_folder(abs_p.clone());
if let Ok(mut local_objs) = folder_read {
// set path to be ref one not abs
local_objs.iter_mut().for_each(|e| {
*e = e.strip_prefix(path_buf_to_string(root.clone())).unwrap().to_path_buf();
});
let mut iter = local_objs.iter();
let mut local_element = get_non_new_local_element(&mut iter);
while let Some(local) = local_element {
if let None = children.iter().position(|child| {
let child_compared = {
// remove traling / of directory
if child.ends_with("/") {
let t = child.clone();
let mut ts = t.chars();
ts.next_back();
ts.as_str().to_owned()
} else {
child.clone()
}
};
child_compared == path_buf_to_string(local.clone())
}) {
deleted.push(local.clone());
}
local_element = get_non_new_local_element(&mut iter);
}
}
} }

116
tests/add.rs Normal file
View File

@@ -0,0 +1,116 @@
use std::str;
mod utils;
use utils::{utils::*, client::ClientTest};
fn line_should_contains(lines: &Vec<String>, nb: usize, str: &str) {
if lines[nb].find(str).is_none()
{
eprintln!("'{}' not found in '{}'", str, lines[nb]);
dbg!(lines);
}
assert!(lines[nb].find(str).is_some());
}
fn lines_should_not_contains(lines: Vec<String>, str: &str) {
for line in lines {
if line.find("Changes not staged for push").is_some() {
return;
}
if line.find(str).is_some() {
eprintln!("'{}' found in '{}'", str, line);
}
assert!(line.find(str).is_none());
}
}
fn collect_status_lines(client: &mut ClientTest) -> Vec<String> {
let out = client.run_cmd("status");
str::from_utf8(&out.stdout)
.unwrap()
.split("\n")
.map(|s| s.to_owned())
.collect()
}
#[cfg(test)]
mod add_tests {
use crate::utils::{server::ServerTest, status_utils::status_should_be_empty};
use super::*;
#[test]
fn simple_add() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
let lines = collect_status_lines(&mut client);
// test
line_should_contains(&lines, 2, "file1");
client.clean();
}
#[test]
fn add_config_file() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add .nextsync -f");
let lines = collect_status_lines(&mut client);
// test
lines_should_not_contains(lines, ".nextsync");
client.clean();
}
#[test]
fn add_dir_implicit() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file1", "foo");
// adding the file should add the dir
client.run_cmd_ok("add dir/file1");
let lines = collect_status_lines(&mut client);
// tests
line_should_contains(&lines, 2, "dir");
line_should_contains(&lines, 3, "dir/file1");
client.clean();
}
#[test]
fn add_file_no_changes() {
// add a file push it and add it again
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
status_should_be_empty(&mut client);
client.run_cmd_ok("add file1");
status_should_be_empty(&mut client);
clean_test(client, &mut server)
}
}

36
tests/pull.rs Normal file
View File

@@ -0,0 +1,36 @@
mod utils;
use utils::{utils::*};
#[cfg(test)]
mod pull_tests {
use super::*;
#[test]
fn simple_pull() {
let (mut client, mut server) = init_test();
let _ = server.add_file("file1", "foo");
client.run_cmd_ok("pull");
// tests
assert!(client.has_file("file1", "foo"));
clean_test(client, &mut server);
}
#[test]
fn simple_pull_directory() {
let (mut client, mut server) = init_test();
let _ = server.add_dir("dir");
let _ = server.add_file("dir/file1", "foo");
client.run_cmd_ok("pull");
// tests
assert!(client.has_file("dir/file1", "foo"));
clean_test(client, &mut server);
}
}

168
tests/push.rs Normal file
View File

@@ -0,0 +1,168 @@
mod utils;
use utils::{utils::*, status_utils::*};
#[cfg(test)]
mod push_tests {
use super::*;
#[test]
fn simple_push() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
clean_test(client, &mut server);
}
#[test]
fn push_update() {
let (mut client, mut server) = init_test();
// init content of file1
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
// change content of file1
let _ = client.add_file("file1", "bar");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
clean_test(client, &mut server);
}
#[test]
fn push_dir_explicit() {
let (mut client, mut server) = init_test();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged.clone(), "file2");
lines_should_not_contains(staged, "foo");
lines_should_not_contains(not_staged.clone(), "file2");
lines_should_not_contains(not_staged, "foo");
clean_test(client, &mut server);
}
#[test]
fn push_dir_implicit() {
let (mut client, mut server) = init_test();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add dir/file2");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged.clone(), "file2");
lines_should_not_contains(staged, "foo");
lines_should_not_contains(not_staged.clone(), "file2");
lines_should_not_contains(not_staged, "foo");
clean_test(client, &mut server);
}
#[test]
fn push_all() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add *");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
assert!(staged.len() == 0);
assert!(not_staged.len() == 0);
clean_test(client, &mut server);
}
#[test]
fn push_file_deletion() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
// push file1
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
status_should_be_empty(&mut client);
// remove it
let _ = client.remove_file("file1");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_not_file("file1"));
status_should_be_empty(&mut client);
clean_test(client, &mut server);
}
#[test]
fn push_dir_deletion() {
let (mut client, mut server) = init_test();
// push dir and file2
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
// push deletion
let _ = client.remove_dir("dir");
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
assert!(server.has_not_dir("dir"));
clean_test(client, &mut server);
}
}

14
tests/utils.rs Normal file
View File

@@ -0,0 +1,14 @@
#[path = "utils/server.rs"]
pub mod server;
#[path = "utils/client.rs"]
pub mod client;
#[path = "utils/utils.rs"]
pub mod utils;
#[path = "utils/status_utils.rs"]
pub mod status_utils;
#[path = "utils/files_utils.rs"]
pub mod files_utils;

162
tests/utils/client.rs Normal file
View File

@@ -0,0 +1,162 @@
use std::str;
use std::process::{Command, Output};
use std::fs::{self, File};
use std::io::Write;
use std::env;
use std::path::PathBuf;
use super::files_utils::has_files;
#[cfg(test)]
pub struct ClientTest {
user: String, // the nextcloud user
volume: String, // temp dir for the test
pub test_id: String, // name of the test (e.g nextsync_rand)
exe_path: PathBuf, // absolute path of nextsync executable
}
#[cfg(test)]
impl ClientTest {
pub fn new(id: String) -> Self {
// create a directory in /tmp with the given id
let mut vol = String::from("/tmp/");
vol.push_str(&id);
let _ = fs::create_dir(vol.clone());
// get nextsync path
let mut exe_path = env::current_dir().unwrap();
exe_path = exe_path.join("target/debug/nextsync");
// build the client
ClientTest {
user: String::from("admin"),
volume: vol,
test_id: id,
exe_path
}
}
pub fn init(mut self) -> Self {
self.run_cmd_ok("init");
// set remote url
let url = String::from(format!("{}@nextcloud.local/{}", self.user, self.test_id));
self.run_cmd_ok(&format!("remote add origin {}", url));
// set force_unsecure as debug server has not certificate
self.run_cmd_ok("config set force_insecure true");
// set token for request
self.run_cmd_ok(&format!("credential add {} {}", self.user, self.user));
self
}
pub fn clean(self) -> Self {
let _ = fs::remove_dir_all(&self.volume);
self
}
pub fn run_cmd_ok(&mut self, args: &str) -> Output {
let output = self.run_cmd(args);
if !output.status.success() {
println!("id: {}", self.test_id.clone());
println!("Failed to execute: '{}'", args);
println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
}
assert!(output.status.success());
output
}
pub fn run_cmd(&mut self, args: &str) -> Output {
let output = Command::new(self.exe_path.to_str().unwrap())
.current_dir(self.volume.clone())
.args(args.split(" "))
.output()
.expect("Could not execute nextsync command");
return output;
}
pub fn add_dir(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
let _ = fs::create_dir_all(path)?;
Ok(())
}
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
let mut file = File::create(path)?;
file.write_all(content.as_bytes())?;
Ok(())
}
pub fn remove_file(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
fs::remove_file(path)?;
Ok(())
}
pub fn remove_dir(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
fs::remove_dir_all(path)?;
Ok(())
}
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
let full_path = PathBuf::from(self.volume.clone()).join(file);
has_files(full_path, file, content, self.test_id.clone())
}
/// get the files given by the status command in two vector (staged and not staged)
pub fn get_status(&mut self) -> (Vec<String>, Vec<String>) {
let out = self.run_cmd("status");
let lines: Vec<String> = str::from_utf8(&out.stdout)
.unwrap()
.split("\n")
.map(|s| s.to_owned())
.collect();
let mut staged = vec![];
let mut not_staged = vec![];
let mut in_staged = true;
let mut counter = 0;
for line in lines {
if line.find("not staged").is_some() {
in_staged = false;
counter = 1;
continue;
}
// skip two first line as there are not files
if counter < 2 {
counter += 1;
continue;
}
if line == String::from("") {
continue;
}
if in_staged {
staged.push(line);
} else {
not_staged.push(line);
}
}
return (staged, not_staged);
}
}

View File

@@ -0,0 +1,50 @@
use std::io::{BufReader, BufRead};
use std::fs::File;
use std::path::PathBuf;
#[cfg(test)]
pub fn has_files(full_path: PathBuf, file: &str, content: &str, test_id: String) -> bool
{
if !full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("File '{}' doesn't exists", file);
return false;
}
let f = File::open(full_path).unwrap();
for line in BufReader::new(f).lines(){
if let Ok(line) = line {
if line != content {
println!("id: {}", test_id);
eprintln!("File '{}' is not equal, {} != {}", file, line, content);
return false;
}
return line == content;
}
}
return true;
}
#[cfg(test)]
pub fn has_not_file(full_path: PathBuf, file: &str, test_id: String) -> bool
{
if full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("File '{}' exists but it shouldn't", file);
return false;
}
return true;
}
#[cfg(test)]
pub fn has_not_dir(full_path: PathBuf, dir: &str, test_id: String) -> bool
{
if full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("Dir '{}' exists but it shouldn't", dir);
return false;
}
return true;
}

123
tests/utils/server.rs Normal file
View File

@@ -0,0 +1,123 @@
use std::process::Command;
use std::os::unix::fs::PermissionsExt;
use std::fs::{self, File, Permissions};
use std::io::Write;
use std::env;
use std::path::PathBuf;
use super::files_utils::{self, has_files};
#[cfg(test)]
pub struct ServerTest {
user: String,
volume: PathBuf,
pub test_id: String
}
#[cfg(test)]
impl ServerTest {
pub fn new(id: String) -> Self {
let mut volume = env::current_dir().unwrap();
volume = volume.join("tests/data/admin/files");
ServerTest {
user: String::from("admin"),
volume,
test_id: id
}
}
pub fn init(&mut self) -> &mut ServerTest{
self.add_dir(&self.test_id.clone());
self.volume = self.volume.join(self.test_id.clone());
self.sync_root();
self
}
pub fn clean(&mut self) -> &mut ServerTest{
self.remove_dir(self.test_id.clone());
self.sync_root();
self
}
pub fn add_dir(&mut self, path: &str) -> &mut ServerTest {
let mut full_path = self.volume.clone();
full_path.push(path);
match fs::create_dir(&full_path) {
Ok(_) => {
// Set permissions to 777 to allow nextcloud to access it (workaround avoiding to
// set group and owner to www-data)
if let Err(e) = fs::set_permissions(&full_path, Permissions::from_mode(0o777)) {
eprintln!("Error setting permissions: {}", e);
}
},
Err(e) => eprintln!("Error creating directory: {}", e),
}
// do not sync test directory when creating it
if !path.ends_with("_nextsync")
{
self.sync_test();
}
self
}
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
let mut full_path = self.volume.clone();
full_path.push(name);
let mut file = File::create(full_path)?;
file.write_all(content.as_bytes())?;
self.sync_test();
Ok(())
}
pub fn remove_dir(&mut self, path: String) -> &mut ServerTest {
let mut full_path = self.volume.clone();
full_path.push(path);
let _ = fs::remove_dir_all(&full_path);
self.sync_test();
self
}
fn sync_root(&self) -> &Self {
self.sync("")
}
fn sync_test(&self) -> &Self {
let test_id = self.test_id.clone();
self.sync(&test_id)
}
fn sync(&self, path: &str) -> &Self {
// perform the occ files:scan command inside the nextcloud docker container
let nextcloud_docker = "master-nextcloud-1";
let args = format!("exec -t --user www-data {} /var/www/html/occ files:scan --path=/{}/files/{}", nextcloud_docker, &self.user, path);
let _output = Command::new("docker")
.args(args.split(" "))
.output()
.expect("Could not execute docker exec command");
self
}
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
let full_path = self.volume.clone().join(file);
has_files(full_path, file, content, self.test_id.clone())
}
pub fn has_not_file(&mut self, file: &str) -> bool {
let full_path = self.volume.clone().join(file);
files_utils::has_not_file(full_path, file, self.test_id.clone())
}
pub fn has_not_dir(&mut self, dir: &str) -> bool {
let full_path = self.volume.clone().join(dir);
dbg!(full_path.clone());
files_utils::has_not_file(full_path, dir, self.test_id.clone())
}
}

View File

@@ -0,0 +1,27 @@
use super::client::ClientTest;
#[cfg(test)]
pub fn lines_should_not_contains(lines: Vec<String>, str: &str) {
for line in lines {
if line.find(str).is_some() {
eprintln!("'{}' found in '{}'", str, line);
}
assert!(line.find(str).is_none());
}
}
#[cfg(test)]
pub fn status_should_be_empty(client: &mut ClientTest) {
let (staged, not_staged) = client.get_status();
if staged.len() != 0 {
eprintln!("id: {}", client.test_id.clone());
eprintln!("Staged should be empty but has '{}' line(s)", staged.len());
assert!(staged.len() == 0);
}
if staged.len() != 0 {
eprintln!("id: {}", client.test_id.clone());
eprintln!("Not Staged should be empty but has '{}' line(s)", not_staged.len());
assert!(not_staged.len() == 0);
}
}

31
tests/utils/utils.rs Normal file
View File

@@ -0,0 +1,31 @@
use rand::{distributions::Alphanumeric, Rng};
use super::client::ClientTest;
use super::server::ServerTest;
#[cfg(test)]
pub fn get_random_test_id() -> String {
let mut id: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(7)
.map(char::from)
.collect();
id.push_str("_nextsync");
id.to_owned()
}
#[cfg(test)]
pub fn init_test() -> (ClientTest, ServerTest) {
let id = get_random_test_id();
let mut server = ServerTest::new(id.clone());
server.init();
let client = ClientTest::new(id).init();
(client, server)
}
#[cfg(test)]
pub fn clean_test(client: ClientTest, server: &mut ServerTest) {
client.clean();
server.clean();
}