Compare commits
72 Commits
4e20ec94f9
...
master
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4533b9a72d | ||
|
|
980d2d9a5d | ||
|
|
939b6f2fe3 | ||
|
|
4504b98112 | ||
|
|
e8c8ab9dfe | ||
|
|
3420634bea | ||
|
|
1aa02a24af | ||
|
|
5e43800d6c | ||
|
|
dc7df00ac9 | ||
|
|
a1b9cde71a | ||
|
|
7180647d26 | ||
|
|
d5891a1a93 | ||
|
|
3207391fdb | ||
|
|
fa65b6b071 | ||
|
|
34dee1ceb6 | ||
|
|
fe628ffc9f | ||
|
|
6b7a82bec6 | ||
|
|
fdcd4633e5 | ||
|
|
06bb51476b | ||
|
|
d8b2116aeb | ||
|
|
8ed86a05ea | ||
|
|
7951ad0520 | ||
|
|
faf7341525 | ||
|
|
642c358737 | ||
|
|
e67082b85a | ||
|
|
211e3702a3 | ||
|
|
a2f746d7f6 | ||
|
|
69614b0c9f | ||
|
|
a5c5f4a713 | ||
|
|
eaacff0e55 | ||
|
|
287953c086 | ||
|
|
6a11bb494b | ||
|
|
1c60560c6e | ||
|
|
c6534cfd40 | ||
|
|
7719e27fe8 | ||
|
|
fc8e976c9c | ||
|
|
53b103af9e | ||
|
|
81c24b5e3c | ||
|
|
22b9351862 | ||
|
|
0c925bc4f4 | ||
|
|
d34b9bab5e | ||
|
|
56234eaa3d | ||
|
|
fd477a8139 | ||
|
|
559316e756 | ||
|
|
f4a905c57f | ||
|
|
c6cf8a9730 | ||
|
|
f6db6992a0 | ||
|
|
908ead5b11 | ||
|
|
9ea1d01c27 | ||
|
|
07f6405b26 | ||
|
|
dadf00f4a5 | ||
|
|
a35c7b20d8 | ||
|
|
863e3bd68a | ||
|
|
57647e5df2 | ||
|
|
41c4796555 | ||
|
|
aced8b992a | ||
|
|
d323ae3070 | ||
|
|
d476622305 | ||
|
|
498fada9ec | ||
|
|
f64d719b31 | ||
|
|
dcf137667b | ||
|
|
5b46b1e2f1 | ||
|
|
4b12edbe5c | ||
|
|
16dbd25168 | ||
|
|
91a29480df | ||
|
|
ce047eba12 | ||
|
|
94220be935 | ||
|
|
d5097727cb | ||
|
|
cb43a46456 | ||
|
|
4c34df7cfe | ||
|
|
29def4967c | ||
|
|
2775c77c55 |
15
.gitignore
vendored
15
.gitignore
vendored
@@ -1,7 +1,10 @@
|
||||
*
|
||||
!/**/
|
||||
!*.rs
|
||||
!.gitignore
|
||||
!README.md
|
||||
!LICENSE
|
||||
|
||||
target
|
||||
*.test
|
||||
.env
|
||||
todo
|
||||
.nextsync
|
||||
.nextsyncignore
|
||||
test
|
||||
tests/nextcloud-docker-dev
|
||||
tests/data
|
||||
|
||||
761
Cargo.lock
generated
761
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
24
Cargo.toml
24
Cargo.toml
@@ -6,21 +6,25 @@ edition = "2021"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
reqwest = { version = "0.11", features = ["stream", "json", "multipart"] }
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
rustc-serialize="0.3.25"
|
||||
reqwest = { version = "0.12", features = ["stream", "json", "multipart"] }
|
||||
tokio = { version = "1.37", features = ["full"] }
|
||||
dotenv ="0.15.0"
|
||||
clap = "2.33"
|
||||
clap = "4.5.4"
|
||||
rust-crypto = "0.2.36"
|
||||
colored = "2.0.0"
|
||||
xml-rs = "0.8.0"
|
||||
regex = "1.8.3"
|
||||
colored = "2.1.0"
|
||||
xml-rs = "0.8.19"
|
||||
regex = "1.10.4"
|
||||
lazy_static = "1.4.0"
|
||||
glob = "0.3.1"
|
||||
textwrap = "0.13"
|
||||
chrono = "0.4.26"
|
||||
indicatif = "0.17.5"
|
||||
textwrap = "0.16.1"
|
||||
chrono = "0.4.37"
|
||||
indicatif = "0.17.8"
|
||||
md5 = "0.7.0"
|
||||
futures-util = "0.3.28"
|
||||
futures-util = "0.3.30"
|
||||
rpassword = "7.3.1"
|
||||
rand = "0.8.5"
|
||||
tempfile = "3.10.1"
|
||||
|
||||
[profile.release]
|
||||
debug = true
|
||||
|
||||
10
README.md
10
README.md
@@ -9,18 +9,16 @@ This should work pretty much like git with some adaptations to be more debuggabl
|
||||
## Features
|
||||
|
||||
- [x] Cloning
|
||||
- [x] Status (only for new and deleted files/folders)
|
||||
- [x] Pushing updates (only deletion and addition no changes)
|
||||
- [x] Status (new, deleted, modified, copied, moved)
|
||||
- [x] Pushing updates (new, deleted, modified)
|
||||
- [x] Using a .nextsyncignore to ignore files
|
||||
- [ ] Pulling changes
|
||||
- [ ] Auth without using env variables
|
||||
- [ ] Detecting local changes
|
||||
- [x] Auth with a token
|
||||
- [ ] Remember token
|
||||
- [ ] Various optimisation
|
||||
|
||||
## Usage
|
||||
|
||||
For the authentification, I use env variables (USERNAME and PASSWORD), this is temporary.
|
||||
|
||||
```
|
||||
USAGE:
|
||||
nextsync [SUBCOMMAND]
|
||||
|
||||
@@ -2,8 +2,10 @@
|
||||
## Blob object
|
||||
|
||||
```
|
||||
file_name timestamp size hash
|
||||
file_name timestamp1 size timestamp2 hash
|
||||
```
|
||||
timestamp1: timestamp of file on server to know if the server has an update
|
||||
timestamp2: timestamp of file locally to know when the file has changed on the system
|
||||
|
||||
## Tree object
|
||||
```
|
||||
|
||||
@@ -6,3 +6,6 @@ pub mod clone;
|
||||
pub mod push;
|
||||
pub mod config;
|
||||
pub mod remote_diff;
|
||||
pub mod remote;
|
||||
pub mod pull;
|
||||
pub mod credential;
|
||||
|
||||
@@ -1,59 +1,121 @@
|
||||
use std::io::Write;
|
||||
use std::path::{Path, PathBuf};
|
||||
use clap::Values;
|
||||
use crate::store;
|
||||
use crate::utils::{self};
|
||||
use glob::glob;
|
||||
use crate::store::{self, object::Object};
|
||||
use crate::utils::{self, path};
|
||||
use crate::store::object::object::{Obj, ObjMethods};
|
||||
use crate::utils::nextsyncignore::{self, ignore_file};
|
||||
use crate::utils::path::{normalize_relative, repo_root, path_buf_to_string};
|
||||
|
||||
pub struct AddArgs<'a> {
|
||||
pub files: Values<'a>,
|
||||
pub struct AddArgs {
|
||||
pub files: Vec<String>,
|
||||
pub force: bool,
|
||||
pub all: bool,
|
||||
}
|
||||
|
||||
// todo match deleted files
|
||||
// todo match weird reg expression
|
||||
// todo normalize path
|
||||
pub fn add(args: AddArgs) {
|
||||
let mut index_file = store::index::open();
|
||||
let mut added_files: Vec<String> = vec![];
|
||||
let rules = match nextsyncignore::read_lines() {
|
||||
Ok(r) => r,
|
||||
Err(_) => vec![],
|
||||
|
||||
let mut pattern: String;
|
||||
let file_vec: Vec<String> = match args.all {
|
||||
true => {
|
||||
pattern = path_buf_to_string(repo_root());
|
||||
pattern.push_str("/*");
|
||||
vec![pattern]
|
||||
},
|
||||
false => args.files,
|
||||
};
|
||||
|
||||
let mut added_files: Vec<String> = vec![];
|
||||
let mut ignored_f = vec![];
|
||||
let file_vec: Vec<&str> = args.files.collect();
|
||||
let rules = nextsyncignore::get_rules();
|
||||
|
||||
for file in file_vec {
|
||||
if !args.force && ignore_file(&file.to_string(), rules.clone(), &mut ignored_f) {
|
||||
let f = match normalize_relative(&file) {
|
||||
Ok(f) => f,
|
||||
Err(err) => {
|
||||
eprintln!("err: {} {}", file, err);
|
||||
continue;
|
||||
}
|
||||
let path = Path::new(file);
|
||||
};
|
||||
|
||||
let path = repo_root().join(Path::new(&f));
|
||||
match path.exists() {
|
||||
true => {
|
||||
if path.is_dir() {
|
||||
added_files.push(String::from(path.to_str().unwrap()));
|
||||
add_folder_content(path.to_path_buf(), &mut added_files);
|
||||
} else {
|
||||
added_files.push(String::from(path.to_str().unwrap()));
|
||||
let mut obj = Obj::from_path(f.clone());
|
||||
if obj.has_changes() {
|
||||
add_entry(path, args.force, &mut added_files, rules.clone(), &mut ignored_f);
|
||||
}
|
||||
},
|
||||
false => {
|
||||
// todo deleted file/folder verif if exists
|
||||
added_files.push(String::from(path.to_str().unwrap()));
|
||||
if Obj::from_path(file.clone()).exists_on_remote() {
|
||||
// object is deleted so not present but can still be added for deletion
|
||||
added_files.push(String::from(f));
|
||||
} else {
|
||||
// try globbing if nothing has been found
|
||||
for entry in try_globbing(path) {
|
||||
add_entry(entry, args.force, &mut added_files, rules.clone(), &mut ignored_f);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ignored_f.len() > 0 {
|
||||
print_ignored_files(ignored_f);
|
||||
write_added_files(added_files);
|
||||
}
|
||||
|
||||
fn add_entry(entry: PathBuf, force: bool, added_files: &mut Vec<String>, rules: Vec<String>, ignored_f: &mut Vec<String>) {
|
||||
// ignore nextsync config files
|
||||
if path::is_nextsync_config(entry.clone()) {
|
||||
return;
|
||||
}
|
||||
|
||||
// check if the file must be ignored
|
||||
if !force && ignore_file(&path_buf_to_string(entry.clone()), rules, ignored_f) {
|
||||
return;
|
||||
}
|
||||
|
||||
// add the parent if there is one and it is not already created
|
||||
add_parent(entry.clone(), added_files);
|
||||
|
||||
added_files.push(path_buf_to_string(entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
|
||||
if entry.is_dir() {
|
||||
add_folder_content(entry.to_path_buf(), added_files);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
fn add_parent(entry: PathBuf, added_files: &mut Vec<String>) {
|
||||
let test_parent = entry.strip_prefix(repo_root()).unwrap().parent();
|
||||
if test_parent.is_none() || test_parent.unwrap() == PathBuf::new() {
|
||||
return;
|
||||
}
|
||||
|
||||
let parent = entry.parent().unwrap();
|
||||
|
||||
if !Obj::from_path(parent).exists_on_remote() {
|
||||
add_parent(parent.to_path_buf(), added_files);
|
||||
added_files.push(path_buf_to_string(parent.strip_prefix(repo_root()).unwrap().to_path_buf()));
|
||||
}
|
||||
}
|
||||
|
||||
fn print_ignored_files(ignored_files: Vec<String>) {
|
||||
if ignored_files.len() > 0 {
|
||||
// todo multiple nextsyncignore
|
||||
println!("The following paths are ignored by your .nextsyncignore file:");
|
||||
for file in ignored_f {
|
||||
for file in ignored_files {
|
||||
println!("{}", file);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// save all added_files in index
|
||||
// todo avoid duplication
|
||||
fn write_added_files(added_files: Vec<String>) {
|
||||
let mut index_file = store::index::open();
|
||||
for file in added_files {
|
||||
if store::index::alread_added(file.clone()) {
|
||||
continue;
|
||||
}
|
||||
match writeln!(index_file, "{}", file) {
|
||||
Ok(()) => (),
|
||||
Err(err) => eprintln!("{}", err),
|
||||
@@ -62,7 +124,25 @@ pub fn add(args: AddArgs) {
|
||||
drop(index_file);
|
||||
}
|
||||
|
||||
fn try_globbing(path: PathBuf) -> Vec<PathBuf> {
|
||||
let mut paths: Vec<PathBuf> = vec![];
|
||||
if let Ok(entries) = glob(path.to_str().unwrap()) {
|
||||
for entry in entries {
|
||||
match entry {
|
||||
Ok(ppath) => paths.push(ppath),
|
||||
Err(e) => {
|
||||
eprintln!("err: {} incorrect pattern ({})", path.display(), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
eprintln!("err: {} is not something you can add.", path.to_str().unwrap());
|
||||
}
|
||||
return paths;
|
||||
}
|
||||
|
||||
fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
|
||||
// todo check for changes
|
||||
let mut folders: Vec<PathBuf> = vec![];
|
||||
folders.push(path);
|
||||
|
||||
@@ -70,12 +150,16 @@ fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
|
||||
if let Ok(entries) = utils::read::read_folder(folder.clone()) {
|
||||
for entry in entries {
|
||||
let path_entry = PathBuf::from(entry);
|
||||
if !path::is_nextsync_config(path_entry.clone())
|
||||
{
|
||||
if path_entry.is_dir() {
|
||||
folders.push(path_entry.clone());
|
||||
}
|
||||
added_files.push(String::from(path_entry.to_str().unwrap()));
|
||||
}
|
||||
}
|
||||
}
|
||||
added_files.push(path_buf_to_string(path_entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -2,30 +2,31 @@ use std::io;
|
||||
use std::io::prelude::*;
|
||||
use std::fs::DirBuilder;
|
||||
use std::path::{Path, PathBuf};
|
||||
use clap::Values;
|
||||
use regex::Regex;
|
||||
use crate::services::downloader::Downloader;
|
||||
use crate::utils::api::ApiProps;
|
||||
use crate::utils::path::path_buf_to_string;
|
||||
use crate::utils::remote::{enumerate_remote, EnumerateOptions};
|
||||
use crate::global::global::{DIR_PATH, set_dir_path};
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::req_props::{ReqProps, ObjProps};
|
||||
use crate::store::object::{tree, blob};
|
||||
use crate::store::object::{tree::Tree, blob::Blob};
|
||||
use crate::commands::config;
|
||||
use crate::commands::init;
|
||||
|
||||
pub const DEPTH: &str = "3";
|
||||
|
||||
pub struct CloneArgs<'a> {
|
||||
pub remote: Values<'a>,
|
||||
pub struct CloneArgs {
|
||||
pub remote: String,
|
||||
pub depth: Option<String>,
|
||||
}
|
||||
|
||||
pub fn clone(args: CloneArgs) {
|
||||
let d = DIR_PATH.lock().unwrap().clone();
|
||||
|
||||
let url = args.remote.clone().next().unwrap();
|
||||
let (host, tmp_user, dist_path_str) = get_url_props(url);
|
||||
let url = args.remote.clone();
|
||||
let (host, tmp_user, dist_path_str) = get_url_props(&url);
|
||||
let username = match tmp_user {
|
||||
Some(u) => u.to_string(),
|
||||
None => {
|
||||
@@ -46,7 +47,7 @@ pub fn clone(args: CloneArgs) {
|
||||
let iter = Path::new(dist_path_str).iter();
|
||||
let dest_dir = iter.last().unwrap();
|
||||
let lp = std::env::current_dir().unwrap().join(dest_dir);
|
||||
set_dir_path(lp.to_str().unwrap().to_string());
|
||||
set_dir_path(path_buf_to_string(lp.clone()));
|
||||
lp
|
||||
},
|
||||
};
|
||||
@@ -57,11 +58,15 @@ pub fn clone(args: CloneArgs) {
|
||||
std::process::exit(1);
|
||||
} else {
|
||||
init::init();
|
||||
let mut remote_config = api_props.username.clone();
|
||||
remote_config.push_str("@");
|
||||
remote_config.push_str(api_props.host.strip_prefix("https://").unwrap());
|
||||
remote_config.push_str(&api_props.root);
|
||||
if config::set("remote", &remote_config).is_err() {
|
||||
|
||||
// set remote origin in config file
|
||||
let mut remote_url = api_props.username.clone();
|
||||
remote_url.push_str("@");
|
||||
remote_url.push_str(api_props.host.strip_prefix("https://").unwrap());
|
||||
remote_url.push_str(&api_props.root);
|
||||
|
||||
if config::add_remote("origin", &remote_url).is_err()
|
||||
{
|
||||
eprintln!("err: not able to save remote");
|
||||
}
|
||||
}
|
||||
@@ -69,7 +74,7 @@ pub fn clone(args: CloneArgs) {
|
||||
let depth = &args.depth.clone().unwrap_or(DEPTH.to_string());
|
||||
let (folders, files) = enumerate_remote(
|
||||
|a| req(&api_props, depth, a),
|
||||
&should_skip,
|
||||
None,
|
||||
EnumerateOptions {
|
||||
depth: Some(depth.to_owned()),
|
||||
relative_s: None
|
||||
@@ -85,12 +90,12 @@ pub fn clone(args: CloneArgs) {
|
||||
// add tree
|
||||
let path_folder = p.strip_prefix(ref_path.clone()).unwrap();
|
||||
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
|
||||
if let Err(err) = tree::add(path_folder.to_path_buf(), &lastmodified.to_string(), false) {
|
||||
if let Err(err) = Tree::from_path(path_folder.to_path_buf()).create(&lastmodified.to_string(), false) {
|
||||
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
|
||||
}
|
||||
}
|
||||
|
||||
let downloader = Downloader::new()
|
||||
Downloader::new()
|
||||
.set_api_props(api_props.clone())
|
||||
.set_files(files)
|
||||
.should_log()
|
||||
@@ -101,15 +106,11 @@ fn save_blob(obj: ObjProps) {
|
||||
let relative_s = &obj.clone().relative_s.unwrap();
|
||||
let relative_p = PathBuf::from(&relative_s);
|
||||
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
|
||||
if let Err(err) = blob::add(relative_p, &lastmodified.to_string(), false) {
|
||||
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
|
||||
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
|
||||
}
|
||||
}
|
||||
|
||||
fn should_skip(_: ObjProps) -> bool {
|
||||
return false;
|
||||
}
|
||||
|
||||
fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> {
|
||||
ReqProps::new()
|
||||
.set_request(relative_s, &api_props)
|
||||
@@ -176,7 +177,7 @@ mod tests {
|
||||
fn test_get_url_props() {
|
||||
let p = "/foo/bar";
|
||||
let u = Some("user");
|
||||
let d = String::from("http://nextcloud.com");
|
||||
// let d = String::from("http://nextcloud.com");
|
||||
let sd = String::from("https://nextcloud.com");
|
||||
let sld = String::from("https://nextcloud.example.com");
|
||||
let ld = String::from("http://nextcloud.example.com");
|
||||
|
||||
@@ -1,39 +1,190 @@
|
||||
use std::fs::OpenOptions;
|
||||
use std::io::{self, Write};
|
||||
use std::io::{self, Write, BufRead, Seek, SeekFrom};
|
||||
use crate::utils::{path, read};
|
||||
use std::collections::HashMap;
|
||||
|
||||
pub fn set(var: &str, val: &str) -> io::Result<()> {
|
||||
let mut root = path::nextsync();
|
||||
root.push("config");
|
||||
pub struct ConfigSetArgs {
|
||||
pub name: String,
|
||||
pub value: String,
|
||||
}
|
||||
|
||||
pub fn config_set(args: ConfigSetArgs) {
|
||||
// configure possible options and their associated category
|
||||
let mut option_categories: HashMap<&str, &str> = HashMap::new();
|
||||
option_categories.insert("force_insecure", "core");
|
||||
option_categories.insert("token", "core");
|
||||
|
||||
// get category of option
|
||||
let category = option_categories.get(args.name.as_str());
|
||||
if category.is_none() {
|
||||
eprintln!("fatal: '{}' is not a valid option.", args.name.clone());
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let _ = write_option_in_cat(category.unwrap(), &args.name, &args.value);
|
||||
}
|
||||
|
||||
|
||||
pub fn find_option_in_cat(category: &str, option: &str) -> Option<String> {
|
||||
let mut config = path::nextsync();
|
||||
config.push("config");
|
||||
|
||||
let mut in_target_category = false;
|
||||
if let Ok(lines) = read::read_lines(config) {
|
||||
|
||||
for line in lines {
|
||||
if let Ok(line) = line {
|
||||
let trimmed_line = line.trim();
|
||||
|
||||
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
|
||||
in_target_category = trimmed_line == format!("[{}]", category);
|
||||
} else if in_target_category {
|
||||
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
|
||||
if parts.len() == 2 && parts[0].trim() == option {
|
||||
return Some(parts[1].trim().to_string());
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
pub fn write_option_in_cat(category: &str, option: &str, value: &str) -> io::Result<()> {
|
||||
let mut config = path::nextsync();
|
||||
config.push("config");
|
||||
|
||||
let mut file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.open(&config)?;
|
||||
|
||||
let mut in_target_category = false;
|
||||
let mut option_found = false;
|
||||
|
||||
// Go to the beginning of the file
|
||||
file.seek(SeekFrom::Start(0))?;
|
||||
|
||||
// Create a temporary file to hold the modified content
|
||||
let mut tmp_file = tempfile::Builder::new()
|
||||
.prefix(".nextsyncconfig")
|
||||
.tempfile()?;
|
||||
|
||||
let reader = io::BufReader::new(&file);
|
||||
for line in reader.lines() {
|
||||
let line = line?;
|
||||
let trimmed_line = line.trim();
|
||||
|
||||
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
|
||||
// if we were already in target category we are now leaving it
|
||||
// add option only if not found before
|
||||
if in_target_category && !option_found {
|
||||
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
|
||||
} else if !in_target_category {
|
||||
in_target_category = trimmed_line == format!("[{}]", category);
|
||||
}
|
||||
}
|
||||
|
||||
if in_target_category && !option_found && trimmed_line.starts_with(&format!("{} =", option)) {
|
||||
// Option already exists, update its value
|
||||
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
|
||||
option_found = true;
|
||||
} else {
|
||||
// Write the original line
|
||||
writeln!(&mut tmp_file, "{}", line)?;
|
||||
}
|
||||
}
|
||||
|
||||
// add to last category
|
||||
if in_target_category && !option_found {
|
||||
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
|
||||
}
|
||||
|
||||
// if the category didn't exist create it and add the option
|
||||
if !in_target_category {
|
||||
writeln!(&mut tmp_file, "[{}]", category)?;
|
||||
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
|
||||
}
|
||||
|
||||
// Flush and sync the temporary file to ensure data is written to disk
|
||||
tmp_file.flush()?;
|
||||
|
||||
// Go back to the beginning of the file
|
||||
tmp_file.seek(SeekFrom::Start(0))?;
|
||||
file.seek(SeekFrom::Start(0))?;
|
||||
|
||||
// Copy the contents of the temporary file to the original file
|
||||
io::copy(&mut tmp_file, &mut file)?;
|
||||
|
||||
Ok(())
|
||||
|
||||
}
|
||||
|
||||
pub fn add_remote(name: &str, url: &str) -> io::Result<()> {
|
||||
let config = path::config();
|
||||
|
||||
// check if there is already a remote with this name
|
||||
if get_remote(name).is_some()
|
||||
{
|
||||
eprintln!("error: remote {} already exists.", name);
|
||||
std::process::exit(3);
|
||||
}
|
||||
|
||||
// todo check if exist
|
||||
let mut file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.append(true)
|
||||
.open(root)?;
|
||||
.open(config)?;
|
||||
|
||||
writeln!(file, "[remote \"{}\"]", name)?;
|
||||
writeln!(file, "\turl = {}", url)?;
|
||||
|
||||
let mut line = var.to_owned();
|
||||
line.push_str(" ");
|
||||
line.push_str(val);
|
||||
writeln!(file, "{}", line)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get(var: &str) -> Option<String> {
|
||||
let mut root = path::nextsync();
|
||||
root.push("config");
|
||||
|
||||
if let Ok(lines) = read::read_lines(root) {
|
||||
for line in lines {
|
||||
if let Ok(l) = line {
|
||||
if l.starts_with(var.clone()) {
|
||||
let (_, val) = l.split_once(" ").unwrap();
|
||||
return Some(val.to_owned());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
None
|
||||
pub fn get_remote(name: &str) -> Option<String> {
|
||||
find_option_in_cat(&format!("remote \"{}\"", name), "url")
|
||||
}
|
||||
|
||||
/// return a vector of remote found in config file (e.g: ("origin", "https://example.com"))
|
||||
pub fn get_all_remote() -> Vec<(String, String)> {
|
||||
let config = path::config();
|
||||
|
||||
let mut remotes: Vec<(String, String)> = vec![];
|
||||
|
||||
let mut in_remote = false;
|
||||
let mut remote_name = String::new();
|
||||
if let Ok(lines) = read::read_lines(config) {
|
||||
|
||||
for line in lines {
|
||||
if let Ok(line) = line {
|
||||
let trimmed_line = line.trim();
|
||||
|
||||
if trimmed_line.starts_with("[remote ") {
|
||||
in_remote = true;
|
||||
remote_name = trimmed_line.strip_prefix("[remote \"").unwrap().strip_suffix("\"]").unwrap().to_string();
|
||||
}
|
||||
else if trimmed_line.starts_with('[')
|
||||
{
|
||||
in_remote = false;
|
||||
}
|
||||
else if in_remote {
|
||||
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
|
||||
if parts.len() == 2 {
|
||||
remotes.push((remote_name.to_string(), parts[1].trim().to_string()))
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
remotes
|
||||
}
|
||||
|
||||
pub fn get_core(name: &str) -> Option<String> {
|
||||
find_option_in_cat("core", name)
|
||||
}
|
||||
|
||||
54
src/commands/credential.rs
Normal file
54
src/commands/credential.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use crate::commands::clone::get_url_props;
|
||||
use crate::services::api::ApiError::RequestError;
|
||||
|
||||
use crate::services::login::Login;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::commands::config;
|
||||
|
||||
pub struct CredentialArgs {
|
||||
pub username: String,
|
||||
pub password: Option<String>,
|
||||
}
|
||||
|
||||
pub fn credential_add(args: CredentialArgs) {
|
||||
// get remote if exists
|
||||
let remote = match config::get_remote("origin") {
|
||||
None => {
|
||||
eprintln!("fatal: No remote origin, impossible to send request to get token");
|
||||
std::process::exit(1);
|
||||
},
|
||||
Some(remote) => remote
|
||||
};
|
||||
let (host, _, _) = get_url_props(&remote);
|
||||
|
||||
// get username and password
|
||||
let username = args.username.to_owned();
|
||||
let password = match args.password {
|
||||
Some(mut pwd) => pwd.to_owned(),
|
||||
None => {
|
||||
println!("Please enter the password for {}: ", username);
|
||||
rpassword::read_password().unwrap()
|
||||
}
|
||||
};
|
||||
|
||||
// get token
|
||||
let get_token = Login::new()
|
||||
.set_auth(&username, &password)
|
||||
.set_host(Some(host))
|
||||
.send_login();
|
||||
|
||||
// deal with error
|
||||
if let Err(err) = get_token {
|
||||
if let RequestError(err) = err {
|
||||
eprintln!("fatal: Failed to get token for these credential. ({})", err);
|
||||
}
|
||||
else {
|
||||
eprintln!("fatal: Failed to get token for these credential.");
|
||||
}
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
// save token
|
||||
let _ = config::write_option_in_cat("core", "token", get_token.unwrap().as_str());
|
||||
}
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
use std::env;
|
||||
use std::fs::{DirBuilder, File};
|
||||
use std::path::PathBuf;
|
||||
use crate::utils::read::read_folder;
|
||||
use crate::global::global::DIR_PATH;
|
||||
|
||||
pub fn init() {
|
||||
@@ -12,23 +11,24 @@ pub fn init() {
|
||||
None => env::current_dir().unwrap(),
|
||||
};
|
||||
|
||||
// todo
|
||||
// check if dir is empty
|
||||
if let Ok(entries) = read_folder(path.clone()) {
|
||||
if entries.len() != 0 {
|
||||
eprintln!("fatal: destination path '{}' already exists and is not an empty directory.", path.display());
|
||||
std::process::exit(1);
|
||||
}
|
||||
} else {
|
||||
eprintln!("fatal: cannot open the destination directory");
|
||||
std::process::exit(1);
|
||||
}
|
||||
// if let Ok(entries) = read_folder(path.clone()) {
|
||||
// if entries.len() != 0 {
|
||||
// eprintln!("fatal: destination path '{}' already exists and is not an empty directory.", path.display());
|
||||
// std::process::exit(1);
|
||||
// }
|
||||
// } else {
|
||||
// eprintln!("fatal: cannot open the destination directory");
|
||||
// std::process::exit(1);
|
||||
// }
|
||||
|
||||
let builder = DirBuilder::new();
|
||||
|
||||
path.push(".nextsync");
|
||||
match builder.create(path.clone()) {
|
||||
Ok(()) => (),
|
||||
Err(_) => println!("Error: cannot create .nextsync"),
|
||||
Err(err) => println!("Error: cannot create .nextsync ({})", err),
|
||||
};
|
||||
|
||||
path.push("objects");
|
||||
@@ -38,6 +38,13 @@ pub fn init() {
|
||||
};
|
||||
path.pop();
|
||||
|
||||
path.push("refs");
|
||||
match builder.create(path.clone()) {
|
||||
Ok(()) => (),
|
||||
Err(_) => println!("Error: cannot create refs"),
|
||||
};
|
||||
path.pop();
|
||||
|
||||
path.push("HEAD");
|
||||
match File::create(path.clone()) {
|
||||
Ok(_) => (),
|
||||
@@ -51,12 +58,13 @@ pub fn init() {
|
||||
Err(_) => println!("Error: cannot create index"),
|
||||
}
|
||||
|
||||
path.pop();
|
||||
path.pop();
|
||||
path.push(".nextsyncignore");
|
||||
|
||||
match File::create(path) {
|
||||
Ok(_) => (),
|
||||
Err(_) => println!("Error: cannot create .nextsyncignore"),
|
||||
}
|
||||
// todo
|
||||
// path.pop();
|
||||
// path.pop();
|
||||
// path.push(".nextsyncignore");
|
||||
//
|
||||
// match File::create(path) {
|
||||
// Ok(_) => (),
|
||||
// Err(_) => println!("Error: cannot create .nextsyncignore"),
|
||||
// }
|
||||
}
|
||||
|
||||
54
src/commands/pull.rs
Normal file
54
src/commands/pull.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use std::path::PathBuf;
|
||||
use std::fs::DirBuilder;
|
||||
|
||||
use crate::services::downloader::Downloader;
|
||||
use crate::services::req_props::ObjProps;
|
||||
use crate::store::object::blob::Blob;
|
||||
use crate::store::object::tree::Tree;
|
||||
use crate::utils::api::get_api_props;
|
||||
use crate::utils::path;
|
||||
use crate::commands::remote_diff::get_diff;
|
||||
|
||||
|
||||
pub fn pull() {
|
||||
let relative_p = path::current()
|
||||
.unwrap()
|
||||
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
|
||||
let (folders, files) = get_diff(relative_p);
|
||||
|
||||
let root = path::repo_root();
|
||||
|
||||
for folder in folders {
|
||||
let p = root.clone().join(PathBuf::from(folder.relative_s.unwrap()));
|
||||
if !p.exists() {
|
||||
// create folder
|
||||
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
|
||||
eprintln!("err: cannot create directory {} ({})", p.display(), err);
|
||||
}
|
||||
|
||||
// add tree
|
||||
let path_folder = p.strip_prefix(root.clone()).unwrap();
|
||||
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
|
||||
if let Err(err) = Tree::from_path(path_folder).create(&lastmodified.to_string(), false) {
|
||||
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Downloader::new()
|
||||
.set_api_props(get_api_props())
|
||||
.set_files(files)
|
||||
.should_log()
|
||||
.download(root, Some(&update_blob));
|
||||
// todo look if need to download or update
|
||||
}
|
||||
|
||||
fn update_blob(obj: ObjProps) {
|
||||
let relative_s = &obj.clone().relative_s.unwrap();
|
||||
let relative_p = PathBuf::from(&relative_s);
|
||||
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
|
||||
// todo update function
|
||||
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
|
||||
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
|
||||
}
|
||||
}
|
||||
@@ -1,26 +1,38 @@
|
||||
use std::path::PathBuf;
|
||||
use crate::commands::{status, config};
|
||||
use crate::commands::push::push_factory::{PushFactory, PushState};
|
||||
use crate::store::index;
|
||||
|
||||
use super::status::LocalObj;
|
||||
|
||||
pub mod push_factory;
|
||||
pub mod new;
|
||||
pub mod new_dir;
|
||||
pub mod rm_dir;
|
||||
pub mod deleted;
|
||||
pub mod modified;
|
||||
pub mod moved;
|
||||
pub mod copied;
|
||||
|
||||
pub fn push() {
|
||||
// todo
|
||||
let _remote = match config::get("remote") {
|
||||
let _remote = match config::get_remote("origin") {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
eprintln!("fatal: no remote set in configuration");
|
||||
// todo debug
|
||||
//std::process::exit(1);
|
||||
String::from("")
|
||||
String::new()
|
||||
}
|
||||
};
|
||||
|
||||
let staged_objs = status::get_all_staged();
|
||||
|
||||
// exit if there is nothing to push
|
||||
if staged_objs.len() == 0 {
|
||||
println!("Everything up-to-date");
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
// path that certify that all its children can be push whithout hesistation
|
||||
// (e.g. if remote dir has no changes since last sync all children
|
||||
// can be pushed without verification)
|
||||
@@ -51,20 +63,25 @@ pub fn push() {
|
||||
match push_factory.can_push(&mut whitelist) {
|
||||
PushState::Valid => {
|
||||
match push_factory.push() {
|
||||
Ok(()) => (),
|
||||
Ok(()) => remove_obj_from_index(obj.clone()),
|
||||
Err(err) => {
|
||||
eprintln!("err: pushing {}: {}", obj.name, err);
|
||||
}
|
||||
}
|
||||
},
|
||||
PushState::Done => (),
|
||||
PushState::Done => remove_obj_from_index(obj.clone()),
|
||||
PushState::Conflict => {
|
||||
eprintln!("conflict when pushing blob");
|
||||
// download file
|
||||
}
|
||||
_ => todo!(),
|
||||
PushState::Error => (eprintln!("error when pushing changes blob")),
|
||||
}
|
||||
}
|
||||
}
|
||||
// read index
|
||||
// if dir upload dir
|
||||
}
|
||||
|
||||
fn remove_obj_from_index(obj: LocalObj) {
|
||||
if let Err(err) = index::rm_line(obj.path.to_str().unwrap()) {
|
||||
eprintln!("err: removing {} from index: {}", obj.name, err);
|
||||
}
|
||||
}
|
||||
|
||||
84
src/commands/push/copied.rs
Normal file
84
src/commands/push/copied.rs
Normal file
@@ -0,0 +1,84 @@
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::r#copy::Copy;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::req_props::ReqProps;
|
||||
use crate::commands::status::LocalObj;
|
||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||
use crate::store::object::blob::Blob;
|
||||
use crate::utils::path::path_buf_to_string;
|
||||
|
||||
pub struct Copied {
|
||||
pub obj: LocalObj,
|
||||
}
|
||||
|
||||
impl PushChange for Copied {
|
||||
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||
match self.flow(&self.obj, whitelist.clone()) {
|
||||
PushFlowState::Whitelisted => PushState::Done,
|
||||
PushFlowState::NotOnRemote => PushState::Valid,
|
||||
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||
PushFlowState::LocalIsNewer => PushState::Conflict,
|
||||
PushFlowState::Error => PushState::Error,
|
||||
}
|
||||
}
|
||||
|
||||
fn push(&self) -> io::Result<()> {
|
||||
let obj = &self.obj;
|
||||
let res = Copy::new()
|
||||
.set_url_copy(
|
||||
&path_buf_to_string(obj.path_from.clone().unwrap()),
|
||||
obj.path.to_str().unwrap())
|
||||
.send();
|
||||
|
||||
match res {
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
eprintln!("fatal: error copying file {}: {}", obj.name, err.status());
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::RequestError(_)) => {
|
||||
eprintln!("fatal: request error copying file {}", obj.name);
|
||||
std::process::exit(1);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
// get lastmodified props to update it
|
||||
let props = ReqProps::new()
|
||||
.set_url(obj.path.to_str().unwrap())
|
||||
.getlastmodified()
|
||||
.send_req_single();
|
||||
|
||||
let prop = match props {
|
||||
Ok(o) => o,
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
eprintln!("fatal: {}", err.status());
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::EmptyError(_)) => {
|
||||
eprintln!("Failed to get body");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Err(ApiError::RequestError(err)) => {
|
||||
eprintln!("fatal: {}", err);
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::Unexpected(_)) => todo!()
|
||||
};
|
||||
|
||||
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||
|
||||
// create destination blob
|
||||
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
|
||||
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// download file with .distant at the end
|
||||
fn conflict(&self) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
@@ -1,11 +1,13 @@
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::delete_path::DeletePath;
|
||||
use crate::store::index;
|
||||
use crate::store::object::blob;
|
||||
use crate::store::object::blob::Blob;
|
||||
use crate::commands::status::LocalObj;
|
||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||
use crate::store::object::object::ObjMethods;
|
||||
|
||||
pub struct Deleted {
|
||||
pub obj: LocalObj
|
||||
@@ -26,7 +28,7 @@ impl PushChange for Deleted {
|
||||
let obj = &self.obj;
|
||||
let res = DeletePath::new()
|
||||
.set_url(obj.path.to_str().unwrap())
|
||||
.send_with_err();
|
||||
.send();
|
||||
|
||||
match res {
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
@@ -42,7 +44,7 @@ impl PushChange for Deleted {
|
||||
|
||||
// update tree
|
||||
// todo date
|
||||
blob::rm(obj.path.clone())?;
|
||||
Blob::from_path(obj.path.clone()).rm_node()?;
|
||||
|
||||
// remove index
|
||||
index::rm_line(obj.path.to_str().unwrap())?;
|
||||
|
||||
80
src/commands/push/modified.rs
Normal file
80
src/commands/push/modified.rs
Normal file
@@ -0,0 +1,80 @@
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::req_props::ReqProps;
|
||||
use crate::services::upload_file::UploadFile;
|
||||
use crate::commands::status::LocalObj;
|
||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||
use crate::store::object::blob::Blob;
|
||||
|
||||
pub struct Modified {
|
||||
pub obj: LocalObj,
|
||||
}
|
||||
|
||||
impl PushChange for Modified {
|
||||
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||
match self.flow(&self.obj, whitelist.clone()) {
|
||||
PushFlowState::Whitelisted => PushState::Done,
|
||||
PushFlowState::NotOnRemote => PushState::Valid,
|
||||
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||
PushFlowState::LocalIsNewer => PushState::Valid,
|
||||
PushFlowState::Error => PushState::Error,
|
||||
}
|
||||
}
|
||||
|
||||
fn push(&self) -> io::Result<()> {
|
||||
let obj = &self.obj;
|
||||
let res = UploadFile::new()
|
||||
.set_url(obj.path.to_str().unwrap())
|
||||
.set_file(obj.path.clone())
|
||||
.send();
|
||||
|
||||
match res {
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::RequestError(_)) => {
|
||||
eprintln!("fatal: request error pushing file {}", obj.name);
|
||||
std::process::exit(1);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
// get lastmodified props to update it
|
||||
let props = ReqProps::new()
|
||||
.set_url(obj.path.to_str().unwrap())
|
||||
.getlastmodified()
|
||||
.send_req_single();
|
||||
|
||||
let prop = match props {
|
||||
Ok(o) => o,
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
eprintln!("fatal: {}", err.status());
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::EmptyError(_)) => {
|
||||
eprintln!("Failed to get body");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Err(ApiError::RequestError(err)) => {
|
||||
eprintln!("fatal: {}", err);
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::Unexpected(_)) => todo!()
|
||||
};
|
||||
|
||||
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||
|
||||
// update blob
|
||||
Blob::from_path(obj.path.clone()).update(&lastmodified.to_string())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// download file with .distant at the end
|
||||
fn conflict(&self) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
88
src/commands/push/moved.rs
Normal file
88
src/commands/push/moved.rs
Normal file
@@ -0,0 +1,88 @@
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::r#move::Move;
|
||||
use crate::services::req_props::ReqProps;
|
||||
use crate::commands::status::LocalObj;
|
||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||
use crate::store::object::blob::Blob;
|
||||
use crate::utils::path::path_buf_to_string;
|
||||
use crate::store::object::object::ObjMethods;
|
||||
|
||||
pub struct Moved {
|
||||
pub obj: LocalObj,
|
||||
}
|
||||
|
||||
impl PushChange for Moved {
|
||||
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||
match self.flow(&self.obj, whitelist.clone()) {
|
||||
PushFlowState::Whitelisted => PushState::Done,
|
||||
PushFlowState::NotOnRemote => PushState::Valid,
|
||||
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||
PushFlowState::LocalIsNewer => PushState::Conflict,
|
||||
PushFlowState::Error => PushState::Error,
|
||||
}
|
||||
}
|
||||
|
||||
fn push(&self) -> io::Result<()> {
|
||||
let obj = &self.obj;
|
||||
let res = Move::new()
|
||||
.set_url_move(
|
||||
&path_buf_to_string(obj.path_from.clone().unwrap()),
|
||||
obj.path.to_str().unwrap())
|
||||
.send();
|
||||
|
||||
match res {
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
eprintln!("fatal: error moving file {}: {}", obj.name, err.status());
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::RequestError(_)) => {
|
||||
eprintln!("fatal: request error moving file {}", obj.name);
|
||||
std::process::exit(1);
|
||||
}
|
||||
_ => (),
|
||||
}
|
||||
|
||||
// get lastmodified props to update it
|
||||
let props = ReqProps::new()
|
||||
.set_url(obj.path.to_str().unwrap())
|
||||
.getlastmodified()
|
||||
.send_req_single();
|
||||
|
||||
let prop = match props {
|
||||
Ok(o) => o,
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
eprintln!("fatal: {}", err.status());
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::EmptyError(_)) => {
|
||||
eprintln!("Failed to get body");
|
||||
std::process::exit(1);
|
||||
}
|
||||
Err(ApiError::RequestError(err)) => {
|
||||
eprintln!("fatal: {}", err);
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::Unexpected(_)) => todo!()
|
||||
};
|
||||
|
||||
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||
|
||||
// delete source and create destination blob
|
||||
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
|
||||
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
|
||||
}
|
||||
if let Err(err) = Blob::from_path(obj.path_from.clone().unwrap()).rm() {
|
||||
eprintln!("err: removing ref of {}: {}", obj.name.clone(), err);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// download file with .distant at the end
|
||||
fn conflict(&self) {
|
||||
todo!()
|
||||
}
|
||||
}
|
||||
@@ -1,10 +1,10 @@
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::req_props::ReqProps;
|
||||
use crate::services::upload_file::UploadFile;
|
||||
use crate::store::index;
|
||||
use crate::store::object::blob;
|
||||
use crate::store::object::blob::Blob;
|
||||
use crate::commands::status::LocalObj;
|
||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||
|
||||
@@ -28,15 +28,16 @@ impl PushChange for New {
|
||||
let res = UploadFile::new()
|
||||
.set_url(obj.path.to_str().unwrap())
|
||||
.set_file(obj.path.clone())
|
||||
.send_with_err();
|
||||
.send();
|
||||
|
||||
match res {
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
|
||||
dbg!(&err);
|
||||
eprintln!("fatal: error pushing file '{}': {}", obj.name, err.status());
|
||||
std::process::exit(1);
|
||||
},
|
||||
Err(ApiError::RequestError(_)) => {
|
||||
eprintln!("fatal: request error pushing file {}", obj.name);
|
||||
eprintln!("fatal: request error pushing file '{}'", obj.name);
|
||||
std::process::exit(1);
|
||||
}
|
||||
_ => (),
|
||||
@@ -67,11 +68,8 @@ impl PushChange for New {
|
||||
|
||||
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||
|
||||
// update blob
|
||||
blob::add(obj.path.clone(), &lastmodified.to_string(), true)?;
|
||||
|
||||
// remove index
|
||||
index::rm_line(obj.path.to_str().unwrap())?;
|
||||
// create new blob
|
||||
Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::req_props::ReqProps;
|
||||
use crate::services::create_folder::CreateFolder;
|
||||
use crate::store::index;
|
||||
use crate::store::object::tree;
|
||||
use crate::store::object::tree::Tree;
|
||||
use crate::commands::status::LocalObj;
|
||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||
|
||||
@@ -33,7 +34,7 @@ impl PushChange for NewDir {
|
||||
let obj = &self.obj;
|
||||
let res = CreateFolder::new()
|
||||
.set_url(obj.path.to_str().unwrap())
|
||||
.send_with_err();
|
||||
.send();
|
||||
|
||||
match res {
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
@@ -74,7 +75,7 @@ impl PushChange for NewDir {
|
||||
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||
|
||||
// update tree
|
||||
tree::add(obj.path.clone(), &lastmodified.to_string(), true)?;
|
||||
Tree::from_path(obj.path.clone()).create(&lastmodified.to_string(), true)?;
|
||||
|
||||
// remove index
|
||||
index::rm_line(obj.path.to_str().unwrap())?;
|
||||
|
||||
@@ -2,12 +2,16 @@ use std::path::PathBuf;
|
||||
use std::io;
|
||||
use crate::commands::status::{State, LocalObj};
|
||||
use crate::services::api::ApiError;
|
||||
use crate::store::object;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::req_props::ReqProps;
|
||||
use crate::commands::push::new::New;
|
||||
use crate::commands::push::new_dir::NewDir;
|
||||
use crate::commands::push::rm_dir::RmDir;
|
||||
use crate::commands::push::deleted::Deleted;
|
||||
use crate::commands::push::modified::Modified;
|
||||
use crate::commands::push::moved::Moved;
|
||||
use crate::commands::push::copied::Copied;
|
||||
use crate::store::object::blob::Blob;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum PushState {
|
||||
@@ -38,6 +42,7 @@ pub trait PushChange {
|
||||
}
|
||||
|
||||
fn flow(&self, obj: &LocalObj, whitelist: Option<PathBuf>) -> PushFlowState {
|
||||
// todo moved: from same file, destination doesn't exist but parent do
|
||||
if self.is_whitelisted(obj, whitelist) {
|
||||
return PushFlowState::Whitelisted;
|
||||
}
|
||||
@@ -54,6 +59,7 @@ pub trait PushChange {
|
||||
if err.status() == 404 {
|
||||
Ok(None)
|
||||
} else {
|
||||
eprintln!("err: when requesting properties of {} ({})", obj.name, err.status());
|
||||
Err(())
|
||||
}
|
||||
},
|
||||
@@ -67,7 +73,16 @@ pub trait PushChange {
|
||||
};
|
||||
|
||||
// check if remote is newest
|
||||
let last_sync_ts = object::get_timestamp(obj.path.to_str().unwrap().to_string()).unwrap();
|
||||
let last_sync_ts = {
|
||||
if obj.otype == String::from("blob") {
|
||||
Blob::from_path(obj.path.clone())
|
||||
.saved_remote_ts()
|
||||
.parse::<i64>().unwrap()
|
||||
} else {
|
||||
// todo timestamp on tree
|
||||
99999999999999
|
||||
}
|
||||
};
|
||||
let remote_ts = obj_data.lastmodified.unwrap().timestamp_millis();
|
||||
|
||||
if last_sync_ts < remote_ts {
|
||||
@@ -84,9 +99,10 @@ impl PushFactory {
|
||||
pub fn new(&self, obj: LocalObj) -> Box<dyn PushChange> {
|
||||
match obj.state {
|
||||
State::New => Box::new(New { obj }),
|
||||
State::Renamed => todo!(),
|
||||
State::Modified => todo!(),
|
||||
State::Modified => Box::new(Modified { obj }),
|
||||
State::Deleted => Box::new(Deleted { obj }),
|
||||
State::Moved => Box::new(Moved { obj }),
|
||||
State::Copied => Box::new(Copied { obj }),
|
||||
State::Default => todo!(),
|
||||
}
|
||||
}
|
||||
@@ -94,10 +110,10 @@ impl PushFactory {
|
||||
pub fn new_dir(&self, obj: LocalObj) -> Box<dyn PushChange> {
|
||||
match obj.state {
|
||||
State::New => Box::new(NewDir { obj }),
|
||||
State::Renamed => todo!(),
|
||||
State::Modified => todo!(),
|
||||
State::Deleted => Box::new(RmDir { obj }),
|
||||
State::Default => todo!(),
|
||||
_ => todo!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
use std::path::PathBuf;
|
||||
use std::io;
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::delete_path::DeletePath;
|
||||
use crate::store::index;
|
||||
use crate::store::object::tree;
|
||||
use crate::store::object::tree::Tree;
|
||||
use crate::commands::status::LocalObj;
|
||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||
use crate::store::object::object::ObjMethods;
|
||||
|
||||
pub struct RmDir {
|
||||
pub obj: LocalObj
|
||||
@@ -32,7 +34,7 @@ impl PushChange for RmDir {
|
||||
let obj = &self.obj;
|
||||
let res = DeletePath::new()
|
||||
.set_url(obj.path.to_str().unwrap())
|
||||
.send_with_err();
|
||||
.send();
|
||||
|
||||
match res {
|
||||
Err(ApiError::IncorrectRequest(err)) => {
|
||||
@@ -48,7 +50,7 @@ impl PushChange for RmDir {
|
||||
|
||||
// update tree
|
||||
// todo update date
|
||||
tree::rm(obj.path.clone())?;
|
||||
Tree::from_path(obj.path.clone()).rm()?;
|
||||
|
||||
// remove index
|
||||
index::rm_line(obj.path.to_str().unwrap())?;
|
||||
|
||||
27
src/commands/remote.rs
Normal file
27
src/commands/remote.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use crate::commands::config;
|
||||
|
||||
use super::config::get_all_remote;
|
||||
|
||||
pub struct RemoteArgs {
|
||||
pub name: String,
|
||||
pub url: String,
|
||||
}
|
||||
|
||||
pub fn remote_add(args: RemoteArgs) {
|
||||
let _ = config::add_remote(&args.name, &args.url);
|
||||
}
|
||||
|
||||
pub fn remote_list(verbose: bool) {
|
||||
let remotes = get_all_remote();
|
||||
for remote in remotes {
|
||||
if verbose
|
||||
{
|
||||
println!("{} {}", remote.0, remote.1);
|
||||
}
|
||||
else
|
||||
{
|
||||
println!("{}", remote.0);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,52 +1,18 @@
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::req_props::{ReqProps, ObjProps};
|
||||
use crate::store::object::{Object, self};
|
||||
use crate::store::object::Object;
|
||||
use crate::utils::api::{ApiProps, get_api_props};
|
||||
use crate::utils::path;
|
||||
use crate::utils::remote::{enumerate_remote, EnumerateOptions};
|
||||
use std::fs::canonicalize;
|
||||
use std::path::PathBuf;
|
||||
|
||||
pub struct RemoteDiffArgs {
|
||||
pub path: Option<String>,
|
||||
}
|
||||
|
||||
pub fn remote_diff(args: RemoteDiffArgs) {
|
||||
let path = {
|
||||
if let Some(path) = args.path {
|
||||
let mut cur = path::current().unwrap();
|
||||
cur.push(path);
|
||||
let canonic = canonicalize(cur).ok().unwrap();
|
||||
dbg!(&canonic);
|
||||
dbg!(path::repo_root());
|
||||
let ok = canonic.strip_prefix(path::repo_root());
|
||||
dbg!(&ok);
|
||||
|
||||
// todo
|
||||
PathBuf::from("/")
|
||||
} else {
|
||||
PathBuf::from("/")
|
||||
}
|
||||
};
|
||||
|
||||
let mut folders: Vec<ObjProps> = vec![ObjProps {
|
||||
contentlength: None,
|
||||
href: None,
|
||||
lastmodified: None,
|
||||
relative_s: Some(path.to_str().unwrap().to_owned()),
|
||||
}];
|
||||
let mut files: Vec<ObjProps> = vec![];
|
||||
|
||||
let depth = "2"; // todo
|
||||
// todo origin
|
||||
let api_props = get_api_props();
|
||||
let (folders, files) = enumerate_remote(
|
||||
|a| req(&api_props, depth, a),
|
||||
&should_skip,
|
||||
EnumerateOptions {
|
||||
depth: Some(depth.to_owned()),
|
||||
relative_s: Some(path.to_str().unwrap().to_owned())
|
||||
});
|
||||
// todo deletion
|
||||
pub fn remote_diff() {
|
||||
let relative_p = path::current()
|
||||
.unwrap()
|
||||
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
|
||||
let (folders, files) = get_diff(relative_p);
|
||||
|
||||
for folder in folders {
|
||||
println!("should pull {}", folder.clone().relative_s.unwrap());
|
||||
@@ -54,7 +20,20 @@ pub fn remote_diff(args: RemoteDiffArgs) {
|
||||
for file in files {
|
||||
println!("should pull {}", file.clone().relative_s.unwrap());
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_diff(path: PathBuf) -> (Vec<ObjProps>, Vec<ObjProps>) {
|
||||
|
||||
let depth = "6"; // todo opti
|
||||
let api_props = get_api_props();
|
||||
|
||||
enumerate_remote(
|
||||
|a| req(&api_props, depth, a),
|
||||
Some(&should_skip),
|
||||
EnumerateOptions {
|
||||
depth: Some(depth.to_owned()),
|
||||
relative_s: Some(path.to_str().unwrap().to_owned())
|
||||
})
|
||||
}
|
||||
|
||||
fn should_skip(obj: ObjProps) -> bool {
|
||||
@@ -76,6 +55,7 @@ fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjPro
|
||||
.set_request(relative_s, &api_props)
|
||||
.set_depth(depth)
|
||||
.gethref()
|
||||
.getcontentlength() // todo opti
|
||||
.getlastmodified()
|
||||
.send_req_multiple()
|
||||
}
|
||||
|
||||
@@ -1,14 +1,19 @@
|
||||
use std::fs::File;
|
||||
use std::path::PathBuf;
|
||||
use std::io::{self, Lines, BufReader};
|
||||
use std::collections::HashMap;
|
||||
use crypto::digest::Digest;
|
||||
use crypto::sha1::Sha1;
|
||||
use colored::Colorize;
|
||||
use crate::utils::path;
|
||||
use crate::utils::read::{read_folder, read_lines};
|
||||
use crate::store::object::tree;
|
||||
use crate::utils::path::{self, path_buf_to_string};
|
||||
use crate::store::object::blob::Blob;
|
||||
use crate::store::object::object::Obj;
|
||||
use crate::store::object::tree::Tree;
|
||||
use crate::utils::read::read_folder;
|
||||
use crate::store::index;
|
||||
use crate::store::object::object::ObjMethods;
|
||||
|
||||
pub struct StatusArgs {
|
||||
pub nostyle: bool,
|
||||
}
|
||||
|
||||
#[derive(PartialEq)]
|
||||
enum RemoveSide {
|
||||
@@ -21,27 +26,128 @@ enum RemoveSide {
|
||||
pub enum State {
|
||||
Default,
|
||||
New,
|
||||
Renamed,
|
||||
Moved,
|
||||
Copied,
|
||||
Modified,
|
||||
Deleted,
|
||||
}
|
||||
|
||||
// todo: relative path, filename, get modified
|
||||
// todo: relative path, filename
|
||||
// todo: not catch added empty folder
|
||||
pub fn status() {
|
||||
let (mut new_objs_hashes, mut del_objs_hashes) = get_diff();
|
||||
// get copy, modified
|
||||
let staged_objs = get_staged(&mut new_objs_hashes, &mut del_objs_hashes);
|
||||
pub fn status(args: StatusArgs) {
|
||||
let mut all_hashes = get_all_objs_hashes();
|
||||
let staged_objs = get_staged(&mut all_hashes);
|
||||
|
||||
let mut objs: Vec<LocalObj> = del_objs_hashes.iter().map(|x| {
|
||||
let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
|
||||
x.1.clone()
|
||||
}).collect();
|
||||
|
||||
for (_, elt) in new_objs_hashes {
|
||||
objs.push(elt.clone());
|
||||
if args.nostyle
|
||||
{
|
||||
print_status_nostyle(staged_objs, objs);
|
||||
}
|
||||
else
|
||||
{
|
||||
print_status(staged_objs, objs);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_all_objs() -> Vec<LocalObj> {
|
||||
let all_hashes = get_all_objs_hashes();
|
||||
all_hashes.iter().map(|x| {
|
||||
x.1.clone()
|
||||
}).collect()
|
||||
}
|
||||
|
||||
fn get_all_objs_hashes() -> HashMap<String, LocalObj> {
|
||||
let (mut new_objs_hashes, mut del_objs_hashes, objs_modified) = get_diff();
|
||||
let move_copy_hashes = get_move_copy_objs(&mut new_objs_hashes, &mut del_objs_hashes);
|
||||
|
||||
let mut hasher = Sha1::new();
|
||||
let mut modified_objs_hashes = HashMap::new();
|
||||
for obj in objs_modified {
|
||||
hasher.input_str(&obj);
|
||||
let hash = hasher.result_str();
|
||||
hasher.reset();
|
||||
|
||||
modified_objs_hashes.insert(hash, LocalObj {
|
||||
// todo otype
|
||||
otype: get_otype(PathBuf::from(obj.clone())),
|
||||
name: obj.clone().to_string(),
|
||||
path: PathBuf::from(obj),
|
||||
path_from: None,
|
||||
state: State::Modified
|
||||
});
|
||||
}
|
||||
|
||||
print_status(staged_objs, objs);
|
||||
let mut all_hashes = HashMap::new();
|
||||
all_hashes.extend(move_copy_hashes);
|
||||
all_hashes.extend(del_objs_hashes);
|
||||
all_hashes.extend(new_objs_hashes);
|
||||
all_hashes.extend(modified_objs_hashes);
|
||||
|
||||
all_hashes
|
||||
}
|
||||
|
||||
fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> bool {
|
||||
// todo prevent copied or moved if file empty
|
||||
// todo deal with directories
|
||||
if obj.path.is_dir()
|
||||
{
|
||||
return true;
|
||||
}
|
||||
let mut blob = Blob::from_path(obj.path.clone());
|
||||
let mut flag = true;
|
||||
let identical_blobs = blob.get_all_identical_blobs();
|
||||
|
||||
// try to find an identical blob among the deleted files (=moved)
|
||||
for obj_s in identical_blobs.clone() {
|
||||
if !flag { break; }
|
||||
|
||||
hasher.input_str(&obj_s);
|
||||
let hash = hasher.result_str();
|
||||
hasher.reset();
|
||||
|
||||
if del_objs_h.contains_key(&hash) {
|
||||
let mut new_move = obj.clone();
|
||||
|
||||
let deleted = del_objs_h.get(&hash).unwrap().clone();
|
||||
del_objs_h.remove(&hash);
|
||||
|
||||
new_move.path_from = Some(deleted.path);
|
||||
new_move.state = State::Moved;
|
||||
move_copy_hashes.insert(key.clone(), new_move.clone());
|
||||
flag = false;
|
||||
}
|
||||
}
|
||||
|
||||
// if did not find anything before try to find a file with the same content (=copy)
|
||||
if flag {
|
||||
if let Some(rel_s) = identical_blobs.first() {
|
||||
let root = path::repo_root();
|
||||
let rel_p = PathBuf::from(rel_s.clone());
|
||||
let abs_p = root.join(rel_p.clone());
|
||||
|
||||
if abs_p.exists() {
|
||||
let mut new_copy = obj.clone();
|
||||
new_copy.path_from = Some(rel_p);
|
||||
new_copy.state = State::Copied;
|
||||
move_copy_hashes.insert(key, new_copy.clone());
|
||||
flag = false;
|
||||
}
|
||||
}
|
||||
}
|
||||
flag
|
||||
}
|
||||
|
||||
fn get_move_copy_objs(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> HashMap<String, LocalObj> {
|
||||
let mut hasher = Sha1::new();
|
||||
let mut move_copy_hashes = HashMap::new();
|
||||
|
||||
new_objs_h.retain(|key, obj| {
|
||||
should_retain(&mut hasher, key.to_owned(), obj.clone(), &mut move_copy_hashes, del_objs_h)
|
||||
});
|
||||
move_copy_hashes
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
@@ -49,19 +155,16 @@ pub struct LocalObj {
|
||||
pub otype: String,
|
||||
pub name: String,
|
||||
pub path: PathBuf,
|
||||
pub path_from: Option<PathBuf>, // origin path when state is move or copy
|
||||
pub state: State,
|
||||
}
|
||||
|
||||
pub fn get_all_staged() -> Vec<LocalObj> {
|
||||
let (mut new_objs_hashes, mut del_objs_hashes) = get_diff();
|
||||
// get copy, modified
|
||||
let staged_objs = get_staged(&mut new_objs_hashes, &mut del_objs_hashes);
|
||||
|
||||
staged_objs.clone()
|
||||
// todo opti getting staged and then finding differences ?
|
||||
let mut all_hashes = get_all_objs_hashes();
|
||||
get_staged(&mut all_hashes)
|
||||
}
|
||||
|
||||
fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
|
||||
fn get_staged(hashes: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
|
||||
let mut lines: Vec<String> = vec![];
|
||||
|
||||
if let Ok(entries) = index::read_line() {
|
||||
@@ -70,7 +173,6 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
let mut hasher = Sha1::new();
|
||||
let mut staged_objs: Vec<LocalObj> = vec![];
|
||||
|
||||
@@ -82,12 +184,9 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
|
||||
hasher.reset();
|
||||
|
||||
// find it on the list of hashes
|
||||
if new_objs_h.contains_key(&hash) {
|
||||
staged_objs.push(new_objs_h.get(&hash).unwrap().clone());
|
||||
new_objs_h.remove(&hash);
|
||||
} else if del_objs_h.contains_key(&hash) {
|
||||
staged_objs.push(del_objs_h.get(&hash).unwrap().clone());
|
||||
del_objs_h.remove(&hash);
|
||||
if hashes.contains_key(&hash) {
|
||||
staged_objs.push(hashes.get(&hash).unwrap().clone());
|
||||
hashes.remove(&hash);
|
||||
}else {
|
||||
let mut t_path = ref_p.clone();
|
||||
let relative_p = PathBuf::from(obj.clone());
|
||||
@@ -96,6 +195,7 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
|
||||
otype: get_otype(t_path.clone()),
|
||||
name: obj.to_string(),
|
||||
path: relative_p.clone(),
|
||||
path_from: None,
|
||||
state: {
|
||||
if t_path.exists() {
|
||||
State::New
|
||||
@@ -110,19 +210,27 @@ fn get_staged(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashM
|
||||
staged_objs
|
||||
}
|
||||
|
||||
fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) {
|
||||
fn read_tree_to_hashmap(tree: &mut Tree, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
|
||||
while let Some(child) = tree.next() {
|
||||
hashes.insert(String::from(child.get_hash_path()), child.get_local_obj());
|
||||
};
|
||||
}
|
||||
|
||||
fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>, Vec<String>) {
|
||||
let mut hashes = HashMap::new();
|
||||
let mut objs: Vec<String> = vec![];
|
||||
let mut objs_modified: Vec<String> = vec![];
|
||||
|
||||
let root = path::repo_root();
|
||||
|
||||
let nextsync_path = path::nextsync();
|
||||
let current_p = path::current().unwrap();
|
||||
// todo use repo_root instead of current
|
||||
let dist_path = current_p.strip_prefix(root.clone()).unwrap().to_path_buf();
|
||||
|
||||
if let Ok(lines) = read_head(nextsync_path.clone()) {
|
||||
add_to_hashmap(lines, &mut hashes, dist_path.clone());
|
||||
}
|
||||
read_tree_to_hashmap(&mut Tree::from_head(), &mut hashes, dist_path.clone());
|
||||
//if let Ok(lines) = read_lines(head::path()) {
|
||||
// add_to_hashmap(lines, &mut hashes, dist_path.clone());
|
||||
//}
|
||||
|
||||
if let Ok(entries) = read_folder(root.clone()) {
|
||||
add_to_vec(entries, &mut objs, root.clone());
|
||||
@@ -137,18 +245,25 @@ fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) {
|
||||
let obj_path = root.clone().join(cur_path.clone());
|
||||
|
||||
if obj_path.is_dir() {
|
||||
if let Some((_, lines)) = tree::read(cur_obj.clone()) {
|
||||
add_to_hashmap(lines, &mut hashes, cur_path.clone());
|
||||
}
|
||||
// read virtual tree
|
||||
read_tree_to_hashmap(&mut Tree::from_path(cur_obj.clone()), &mut hashes, dist_path.clone());
|
||||
//let mut tree = Tree::from_path(cur_obj.clone());
|
||||
//if let Some(lines) = tree.get_children() {
|
||||
//add_to_hashmap(lines, &mut hashes, cur_path.clone());
|
||||
//}
|
||||
|
||||
// read physical tree
|
||||
if let Ok(entries) = read_folder(obj_path.clone()) {
|
||||
add_to_vec(entries, &mut objs, root.clone());
|
||||
}
|
||||
|
||||
// remove duplicate
|
||||
let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
|
||||
obj_to_analyse.append(&mut diff.clone());
|
||||
} else {
|
||||
// todo look for change
|
||||
if Blob::from_path(cur_path).has_changes() {
|
||||
objs_modified.push(cur_obj);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
@@ -164,17 +279,20 @@ fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>) {
|
||||
hasher.input_str(&obj);
|
||||
let hash = hasher.result_str();
|
||||
hasher.reset();
|
||||
|
||||
let p = PathBuf::from(obj.to_string());
|
||||
let abs_p = path::repo_root().join(p.clone());
|
||||
// todo name
|
||||
new_objs_hashes.insert(String::from(hash), LocalObj {
|
||||
otype: get_otype(p.clone()),
|
||||
otype: get_otype(abs_p),
|
||||
name: obj.to_string(),
|
||||
path: p,
|
||||
path_from: None,
|
||||
state: State::New
|
||||
});
|
||||
}
|
||||
|
||||
(new_objs_hashes, hashes)
|
||||
(new_objs_hashes, hashes, objs_modified)
|
||||
}
|
||||
|
||||
fn get_otype(p: PathBuf) -> String {
|
||||
@@ -185,27 +303,28 @@ fn get_otype(p: PathBuf) -> String {
|
||||
}
|
||||
}
|
||||
|
||||
fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
|
||||
for line in lines {
|
||||
if let Ok(ip) = line {
|
||||
if ip.clone().len() > 5 {
|
||||
let (ftype, hash, name) = tree::parse_line(ip);
|
||||
let mut p = path.clone();
|
||||
p.push(name.clone());
|
||||
hashes.insert(String::from(hash), LocalObj{
|
||||
otype: String::from(ftype),
|
||||
name: String::from(name),
|
||||
path: p,
|
||||
state: State::Default,
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
//fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
|
||||
// for line in lines {
|
||||
// if let Ok(ip) = line {
|
||||
// if ip.clone().len() > 5 {
|
||||
// let (ftype, hash, name) = tree::parse_line(ip);
|
||||
// let mut p = path.clone();
|
||||
// p.push(name.clone());
|
||||
// hashes.insert(String::from(hash), LocalObj{
|
||||
// otype: String::from(ftype),
|
||||
// name: String::from(name),
|
||||
// path: p,
|
||||
// path_from: None,
|
||||
// state: State::Default,
|
||||
// });
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) {
|
||||
for entry in entries {
|
||||
if !is_nextsync_config(entry.clone()) {
|
||||
if !path::is_nextsync_config(entry.clone()) {
|
||||
let object_path = entry.strip_prefix(root.clone()).unwrap();
|
||||
objects.push(String::from(object_path.to_str().unwrap()));
|
||||
}
|
||||
@@ -231,7 +350,7 @@ fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
|
||||
// not staged files
|
||||
if objs.len() != 0 {
|
||||
println!("Changes not staged for push:");
|
||||
println!(" (Use\"nextsync add <file>...\" to update what will be pushed)");
|
||||
println!(" (Use \"nextsync add <file>...\" to update what will be pushed)");
|
||||
|
||||
for object in objs {
|
||||
print_object(object);
|
||||
@@ -239,27 +358,52 @@ fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
|
||||
}
|
||||
}
|
||||
|
||||
fn print_status_nostyle(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
|
||||
// todo sort
|
||||
if staged_objs.len() == 0 && objs.len() == 0 {
|
||||
return;
|
||||
}
|
||||
for obj in staged_objs {
|
||||
if obj.state == State::Deleted {
|
||||
println!("deleted: {}", obj.name);
|
||||
} else if obj.state == State::New {
|
||||
println!("new: {}", obj.name);
|
||||
} else if obj.state == State::Modified {
|
||||
println!("modified: {}", obj.name);
|
||||
} else if obj.state == State::Moved {
|
||||
println!("moved: {} => {}", path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path));
|
||||
} else if obj.state == State::Copied {
|
||||
println!("copied: {} => {}", path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn print_object(obj: LocalObj) {
|
||||
if obj.state == State::Deleted {
|
||||
println!(" {} {}", String::from("deleted:").red(), obj.name.red());
|
||||
} else if obj.state == State::Renamed {
|
||||
println!(" {} {}", String::from("renamed:").red(), obj.name.red());
|
||||
} else if obj.state == State::New {
|
||||
println!(" {} {}", String::from("new:").red(), obj.name.red());
|
||||
} else if obj.state == State::Modified {
|
||||
println!(" {} {}", String::from("modified:").red(), obj.name.red());
|
||||
} else if obj.state == State::Moved {
|
||||
println!(" {} {} => {}", String::from("moved:").red(), path_buf_to_string(obj.path_from.unwrap()).red(), path_buf_to_string(obj.path).red());
|
||||
} else if obj.state == State::Copied {
|
||||
println!(" {} {} => {}", String::from("copied:").red(), path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path).red());
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn print_staged_object(obj: LocalObj) {
|
||||
if obj.state == State::Deleted {
|
||||
println!(" {} {}", String::from("deleted:").green(), obj.name.green());
|
||||
} else if obj.state == State::Renamed {
|
||||
println!(" {} {}", String::from("renamed:").green(), obj.name.green());
|
||||
} else if obj.state == State::New {
|
||||
println!(" {} {}", String::from("new:").green(), obj.name.green());
|
||||
} else if obj.state == State::Modified {
|
||||
println!(" {} {}", String::from("modified:").green(), obj.name.green());
|
||||
} else if obj.state == State::Moved {
|
||||
println!(" {} {} => {}", String::from("moved:").green(), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
|
||||
} else if obj.state == State::Copied {
|
||||
println!(" {} {} => {}", String::from("copied:"), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -292,15 +436,6 @@ fn remove_duplicate(hashes: &mut HashMap<String, LocalObj>, objects: &mut Vec<St
|
||||
duplicate
|
||||
}
|
||||
|
||||
fn is_nextsync_config(path: PathBuf) -> bool {
|
||||
path.ends_with(".nextsync")
|
||||
}
|
||||
|
||||
fn read_head(mut path: PathBuf) -> io::Result<io::Lines<io::BufReader<File>>> {
|
||||
path.push("HEAD");
|
||||
read_lines(path)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
@@ -325,6 +460,7 @@ mod tests {
|
||||
otype: String::from("tree"),
|
||||
name: String::from("test"),
|
||||
path: PathBuf::from(""),
|
||||
path_from: None,
|
||||
state: State::Default,
|
||||
};
|
||||
hashes.insert(hash1.clone(), default_obj.clone());
|
||||
|
||||
204
src/main.rs
204
src/main.rs
@@ -1,9 +1,6 @@
|
||||
use clap::{App, Arg, SubCommand};
|
||||
use textwrap::{fill, Options};
|
||||
use clap::Command;
|
||||
|
||||
use crate::commands::add::AddArgs;
|
||||
use crate::commands::remote_diff::RemoteDiffArgs;
|
||||
use crate::commands::clone::{self, CloneArgs};
|
||||
mod subcommands;
|
||||
|
||||
mod commands;
|
||||
mod utils;
|
||||
@@ -12,176 +9,41 @@ mod global;
|
||||
mod store;
|
||||
|
||||
fn main() {
|
||||
let matches = App::new("Nextsync")
|
||||
let app = Command::new("Nextsync")
|
||||
.version("1.0")
|
||||
.author("grimhilt")
|
||||
.about("A git-line command line tool to interact with nextcloud")
|
||||
.setting(clap::AppSettings::SubcommandRequiredElseHelp)
|
||||
.subcommand(
|
||||
SubCommand::with_name("clone")
|
||||
.arg(
|
||||
Arg::with_name("remote")
|
||||
.required(true)
|
||||
.takes_value(true)
|
||||
.value_name("REMOTE")
|
||||
.help(&fill(
|
||||
"The repository to clone from. See the NEXTSYNC URLS section below for more information on specifying repositories.",
|
||||
Options::new(70).width,
|
||||
))
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("depth")
|
||||
.short("d")
|
||||
.long("depth")
|
||||
.required(false)
|
||||
.takes_value(true)
|
||||
.help(&fill(
|
||||
&format!("Depth of the recursive fetch of object properties. This value should be lower when there are a lot of files per directory and higher when there are a lot of subdirectories with fewer files. (Default: {})", clone::DEPTH),
|
||||
Options::new(70).width,
|
||||
))
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("directory")
|
||||
.required(false)
|
||||
.takes_value(true)
|
||||
.value_name("DIRECTORY")
|
||||
)
|
||||
.about("Clone a repository into a new directory")
|
||||
.after_help("NEXTSYNC URLS\nThe following syntaxes may be used:\n\t- user@host.xz/path/to/repo\n\t- http[s]://host.xz/apps/files/?dir=/path/to/repo&fileid=111111\n\t- [http[s]://]host.xz/remote.php/dav/files/user/path/to/repo\n")
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("init")
|
||||
.arg(
|
||||
Arg::with_name("directory")
|
||||
.required(false)
|
||||
.takes_value(true)
|
||||
.value_name("DIRECTORY")
|
||||
)
|
||||
.about("Create an empty Nextsync repository") // Create an empty Git repository or reinitialize an existing one
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("status")
|
||||
.arg(
|
||||
Arg::with_name("directory")
|
||||
.required(false)
|
||||
.takes_value(true)
|
||||
.value_name("DIRECTORY")
|
||||
)
|
||||
.about("Show the working tree status")
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("reset")
|
||||
.about("Clear the index")
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("push")
|
||||
.about("Push changes on nextcloud")
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("add")
|
||||
.arg(
|
||||
Arg::with_name("files")
|
||||
.required(true)
|
||||
.multiple(true)
|
||||
.takes_value(true)
|
||||
.value_name("FILE")
|
||||
.help("Files to add"),
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("force")
|
||||
.short("f")
|
||||
.long("force")
|
||||
.help("Allow adding otherwise ignored files."),
|
||||
)
|
||||
.about("Add changes to the index")
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("config")
|
||||
.arg(
|
||||
Arg::with_name("variable")
|
||||
.required(true)
|
||||
.takes_value(true)
|
||||
.value_name("VARIABLE")
|
||||
)
|
||||
.arg(
|
||||
Arg::with_name("value")
|
||||
.required(true)
|
||||
.takes_value(true)
|
||||
.value_name("VALUE")
|
||||
)
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("remote-diff")
|
||||
.arg(
|
||||
Arg::with_name("path")
|
||||
.required(false)
|
||||
.takes_value(true)
|
||||
.value_name("PATH")
|
||||
.help("The path to pull."),
|
||||
)
|
||||
.about("Fetch new and modifed files from the nextcloud server.")
|
||||
)
|
||||
.subcommand(
|
||||
SubCommand::with_name("test")
|
||||
)
|
||||
.get_matches();
|
||||
.subcommands([
|
||||
subcommands::clone::create(),
|
||||
subcommands::init::create(),
|
||||
subcommands::status::create(),
|
||||
subcommands::add::create(),
|
||||
subcommands::push::create(),
|
||||
subcommands::reset::create(),
|
||||
subcommands::remote::create(),
|
||||
subcommands::config::create(),
|
||||
subcommands::remote_diff::create(),
|
||||
subcommands::pull::create(),
|
||||
subcommands::credential::create(),
|
||||
]);
|
||||
// .setting(clap::AppSettings::SubcommandRequiredElseHelp);
|
||||
|
||||
if let Some(matches) = matches.subcommand_matches("init") {
|
||||
if let Some(val) = matches.values_of("directory") {
|
||||
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
|
||||
}
|
||||
commands::init::init();
|
||||
} else if let Some(matches) = matches.subcommand_matches("status") {
|
||||
if let Some(val) = matches.values_of("directory") {
|
||||
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
|
||||
}
|
||||
commands::status::status();
|
||||
} else if let Some(matches) = matches.subcommand_matches("add") {
|
||||
if let Some(files) = matches.values_of("files") {
|
||||
commands::add::add(AddArgs {
|
||||
files,
|
||||
force: matches.is_present("force"),
|
||||
});
|
||||
}
|
||||
} else if let Some(_) = matches.subcommand_matches("reset") {
|
||||
commands::reset::reset();
|
||||
} else if let Some(matches) = matches.subcommand_matches("clone") {
|
||||
if let Some(val) = matches.values_of("directory") {
|
||||
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
|
||||
}
|
||||
if let Some(remote) = matches.values_of("remote") {
|
||||
commands::clone::clone(CloneArgs {
|
||||
remote,
|
||||
depth: matches.values_of("depth").map(
|
||||
|mut val| val.next().unwrap().to_owned()
|
||||
),
|
||||
});
|
||||
}
|
||||
} else if let Some(_matches) = matches.subcommand_matches("push") {
|
||||
commands::push::push();
|
||||
} else if let Some(matches) = matches.subcommand_matches("config") {
|
||||
if let Some(mut var) = matches.values_of("variable") {
|
||||
if let Some(mut val) = matches.values_of("value") {
|
||||
if commands::config::set(var.next().unwrap(), val.next().unwrap()).is_err() {
|
||||
eprintln!("fatal: cannot save the value");
|
||||
}
|
||||
}
|
||||
}
|
||||
} else if let Some(matches) = matches.subcommand_matches("remote-diff") {
|
||||
commands::remote_diff::remote_diff(RemoteDiffArgs {
|
||||
path: {
|
||||
if let Some(mut path) = matches.values_of("path") {
|
||||
match path.next() {
|
||||
Some(p) => Some(String::from(p)),
|
||||
None => None,
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
});
|
||||
} else if let Some(_) = matches.subcommand_matches("test") {
|
||||
let matches = app.get_matches();
|
||||
|
||||
}
|
||||
match matches.subcommand() {
|
||||
Some(("init", args)) => subcommands::init::handler(args),
|
||||
Some(("status", args)) => subcommands::status::handler(args),
|
||||
Some(("add", args)) => subcommands::add::handler(args),
|
||||
Some(("reset", _)) => commands::reset::reset(),
|
||||
Some(("clone", args)) => subcommands::clone::handler(args),
|
||||
Some(("push", _)) => commands::push::push(),
|
||||
Some(("config", args)) => subcommands::config::handler(args),
|
||||
Some(("remote-diff", args)) => subcommands::remote_diff::handler(args),
|
||||
Some(("pull", args)) => subcommands::pull::handler(args),
|
||||
Some(("remote", args)) => subcommands::remote::handler(args),
|
||||
Some(("credential", args)) => subcommands::credential::handler(args),
|
||||
Some((_, _)) => {},
|
||||
None => {},
|
||||
};
|
||||
}
|
||||
|
||||
|
||||
@@ -5,3 +5,10 @@ pub mod req_props;
|
||||
pub mod upload_file;
|
||||
pub mod delete_path;
|
||||
pub mod downloader;
|
||||
pub mod r#move;
|
||||
pub mod r#copy;
|
||||
pub mod login;
|
||||
pub mod request_manager;
|
||||
pub mod api_call;
|
||||
//pub mod auth;
|
||||
//pub mod bulk_upload;
|
||||
|
||||
@@ -1,13 +1,19 @@
|
||||
use std::env;
|
||||
use dotenv::dotenv;
|
||||
use std::error::Error;
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::Mutex;
|
||||
use reqwest::Client;
|
||||
use reqwest::RequestBuilder;
|
||||
use reqwest::multipart::Form;
|
||||
use reqwest::{Response, Error, Method};
|
||||
use reqwest::{Response, Method};
|
||||
use reqwest::header::{HeaderValue, CONTENT_TYPE, HeaderMap, IntoHeaderName};
|
||||
use crate::utils::api::ApiProps;
|
||||
use crate::commands::config;
|
||||
use crate::commands::clone::get_url_props;
|
||||
use crate::services::request_manager::get_request_manager;
|
||||
|
||||
lazy_static! {
|
||||
static ref HTTP_TOKEN: Mutex<String> = Mutex::new(String::new());
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum ApiError {
|
||||
@@ -20,7 +26,9 @@ pub enum ApiError {
|
||||
pub struct ApiBuilder {
|
||||
client: Client,
|
||||
request: Option<RequestBuilder>,
|
||||
headers: Option<HeaderMap>
|
||||
headers: Option<HeaderMap>,
|
||||
auth_set: bool,
|
||||
host: Option<String>,
|
||||
}
|
||||
|
||||
impl ApiBuilder {
|
||||
@@ -29,28 +37,25 @@ impl ApiBuilder {
|
||||
client: Client::new(),
|
||||
request: None,
|
||||
headers: None,
|
||||
auth_set: false,
|
||||
host: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_url(&mut self, method: Method, url: &str) -> &mut ApiBuilder {
|
||||
let remote = match config::get("remote") {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
eprintln!("fatal: unable to find a remote");
|
||||
std::process::exit(1);
|
||||
let mut new_url = url.to_owned();
|
||||
if let Some(active) = config::get_core("force_insecure") {
|
||||
if active == "true" {
|
||||
new_url = url.replace("https", "http");
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
let (host, _, _) = get_url_props(&remote);
|
||||
let mut u = String::from(host);
|
||||
u.push_str(url);
|
||||
self.request = Some(self.client.request(method, u));
|
||||
self.request = Some(self.client.request(method, new_url));
|
||||
self
|
||||
|
||||
}
|
||||
|
||||
pub fn build_request(&mut self, method: Method, path: &str) -> &mut ApiBuilder {
|
||||
let remote = match config::get("remote") {
|
||||
let remote = match config::get_remote("origin") {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
eprintln!("fatal: unable to find a remote");
|
||||
@@ -58,6 +63,7 @@ impl ApiBuilder {
|
||||
}
|
||||
};
|
||||
let (host, username, root) = get_url_props(&remote);
|
||||
self.host = Some(host.clone());
|
||||
let mut url = String::from(host);
|
||||
url.push_str("/remote.php/dav/files/");
|
||||
url.push_str(username.unwrap());
|
||||
@@ -66,11 +72,12 @@ impl ApiBuilder {
|
||||
if path != "/" {
|
||||
url.push_str(path);
|
||||
}
|
||||
self.request = Some(self.client.request(method, url));
|
||||
self
|
||||
|
||||
self.set_url(method, &url)
|
||||
}
|
||||
|
||||
pub fn set_req(&mut self, meth: Method, p: &str, api_props: &ApiProps) -> &mut ApiBuilder {
|
||||
self.host = Some(api_props.clone().host.clone());
|
||||
let mut url = String::from(&api_props.host);
|
||||
url.push_str("/remote.php/dav/files/");
|
||||
url.push_str("/");
|
||||
@@ -80,27 +87,37 @@ impl ApiBuilder {
|
||||
if p != "/" {
|
||||
url.push_str(p);
|
||||
}
|
||||
self.request = Some(self.client.request(meth, url));
|
||||
self
|
||||
|
||||
self.set_url(meth, &url)
|
||||
}
|
||||
|
||||
fn set_auth(&mut self) -> &mut ApiBuilder {
|
||||
// todo if not exist
|
||||
dotenv().ok();
|
||||
let password = env::var("PASSWORD").unwrap();
|
||||
let username = env::var("USERNAME").unwrap();
|
||||
pub fn set_basic_auth(&mut self, login: String, pwd: String) -> &mut ApiBuilder {
|
||||
match self.request.take() {
|
||||
None => {
|
||||
eprintln!("fatal: incorrect request");
|
||||
std::process::exit(1);
|
||||
},
|
||||
Some(req) => {
|
||||
self.request = Some(req.basic_auth(username, Some(password)));
|
||||
self.request = Some(req.basic_auth(login, Some(pwd)));
|
||||
}
|
||||
}
|
||||
self.auth_set = true;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_token(&mut self, token: String) {
|
||||
match self.request.take() {
|
||||
None => {
|
||||
eprintln!("fatal: incorrect request");
|
||||
std::process::exit(1);
|
||||
},
|
||||
Some(req) => {
|
||||
self.request = Some(req.bearer_auth(token));
|
||||
}
|
||||
}
|
||||
self.auth_set = true;
|
||||
}
|
||||
|
||||
pub fn set_xml(&mut self, xml_payload: String) -> &mut ApiBuilder {
|
||||
match self.request.take() {
|
||||
None => {
|
||||
@@ -148,8 +165,27 @@ impl ApiBuilder {
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
||||
self.set_auth();
|
||||
fn set_request_manager(&mut self) {
|
||||
let mut request_manager = get_request_manager().lock().unwrap();
|
||||
let request_manager = request_manager.as_mut().unwrap();
|
||||
|
||||
if !self.host.is_none()
|
||||
{
|
||||
request_manager.set_host(self.host.clone().unwrap().replace("https://", ""));
|
||||
}
|
||||
|
||||
if !self.auth_set {
|
||||
self.set_token(request_manager.get_token());
|
||||
//self.set_auth();
|
||||
}
|
||||
}
|
||||
|
||||
pub fn send(&mut self, need_text: bool) -> Result<Option<String>, ApiError> {
|
||||
if !self.host.is_none() || !self.auth_set {
|
||||
self.set_request_manager();
|
||||
}
|
||||
|
||||
let res_req = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
match self.request.take() {
|
||||
None => {
|
||||
eprintln!("fatal: incorrect request");
|
||||
@@ -158,9 +194,62 @@ impl ApiBuilder {
|
||||
Some(req) => {
|
||||
if let Some(headers) = &self.headers {
|
||||
req.headers(headers.clone())
|
||||
.send().await.map_err(Error::from)
|
||||
.send().await
|
||||
} else {
|
||||
req.send().await.map_err(Error::from)
|
||||
req.send().await
|
||||
}
|
||||
},
|
||||
}
|
||||
});
|
||||
|
||||
// handle request error
|
||||
let res = match res_req {
|
||||
Err(err) => {
|
||||
eprintln!("fatal: {}", err.source().unwrap());
|
||||
std::process::exit(1);
|
||||
},
|
||||
Ok(res) => res,
|
||||
};
|
||||
|
||||
if res.status().is_success() {
|
||||
if need_text {
|
||||
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
res.text().await
|
||||
}).map_err(|err| ApiError::EmptyError(err))?;
|
||||
Ok(Some(body))
|
||||
} else {
|
||||
Ok(None)
|
||||
}
|
||||
} else {
|
||||
Err(ApiError::IncorrectRequest(res))
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn old_send(&mut self) -> Result<Response, reqwest::Error> {
|
||||
let mut request_manager = get_request_manager().lock().unwrap();
|
||||
let request_manager = request_manager.as_mut().unwrap();
|
||||
if !self.host.is_none()
|
||||
{
|
||||
request_manager.set_host(self.host.clone().unwrap());
|
||||
}
|
||||
|
||||
if !self.auth_set {
|
||||
//self.set_auth();
|
||||
self.set_token(request_manager.get_token());
|
||||
}
|
||||
|
||||
|
||||
match self.request.take() {
|
||||
None => {
|
||||
eprintln!("fatal: incorrect request");
|
||||
std::process::exit(1);
|
||||
},
|
||||
Some(req) => {
|
||||
if let Some(headers) = &self.headers {
|
||||
req.headers(headers.clone())
|
||||
.send().await.map_err(reqwest::Error::from)
|
||||
} else {
|
||||
req.send().await.map_err(reqwest::Error::from)
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
13
src/services/api_call.rs
Normal file
13
src/services/api_call.rs
Normal file
@@ -0,0 +1,13 @@
|
||||
use crate::services::api::ApiError;
|
||||
|
||||
pub trait ApiCall {
|
||||
fn new() -> Self where Self: Sized {
|
||||
unimplemented!()
|
||||
}
|
||||
fn set_url(&mut self, _url: &str) -> &mut Self {
|
||||
self
|
||||
}
|
||||
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||
unimplemented!()
|
||||
}
|
||||
}
|
||||
@@ -1,2 +0,0 @@
|
||||
|
||||
|
||||
53
src/services/copy.rs
Normal file
53
src/services/copy.rs
Normal file
@@ -0,0 +1,53 @@
|
||||
use reqwest::{Method, header::HeaderValue};
|
||||
use crate::services::api::{ApiBuilder, ApiError};
|
||||
use crate::commands::clone::get_url_props;
|
||||
use crate::commands::config;
|
||||
use crate::services::api_call::ApiCall;
|
||||
|
||||
pub struct Copy {
|
||||
api_builder: ApiBuilder,
|
||||
}
|
||||
|
||||
impl ApiCall for Copy {
|
||||
fn new() -> Self {
|
||||
Copy {
|
||||
api_builder: ApiBuilder::new(),
|
||||
}
|
||||
}
|
||||
|
||||
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||
self.api_builder.send(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl Copy {
|
||||
pub fn set_url_copy(&mut self, url: &str, destination: &str) -> &mut Copy {
|
||||
self.api_builder.build_request(Method::from_bytes(b"COPY").unwrap(), url);
|
||||
|
||||
let remote = match config::get_remote("origin") {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
eprintln!("fatal: unable to find a remote");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
let (host, username, root) = get_url_props(&remote);
|
||||
let mut url = String::from(host);
|
||||
url.push_str("/remote.php/dav/files/");
|
||||
url.push_str(username.unwrap());
|
||||
url.push_str(&root);
|
||||
url.push_str("/");
|
||||
if destination != "/" {
|
||||
url.push_str(destination);
|
||||
}
|
||||
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Copy {
|
||||
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
|
||||
if overwrite { "T" } else { "F" }
|
||||
}).unwrap());
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -1,34 +1,24 @@
|
||||
use reqwest::{Method, Response, Error};
|
||||
use reqwest::Method;
|
||||
use crate::services::api::{ApiBuilder, ApiError};
|
||||
use crate::services::api_call::ApiCall;
|
||||
|
||||
pub struct CreateFolder {
|
||||
api_builder: ApiBuilder,
|
||||
}
|
||||
|
||||
impl CreateFolder {
|
||||
pub fn new() -> Self {
|
||||
impl ApiCall for CreateFolder {
|
||||
fn new() -> Self {
|
||||
CreateFolder {
|
||||
api_builder: ApiBuilder::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_url(&mut self, url: &str) -> &mut CreateFolder {
|
||||
fn set_url(&mut self, url: &str) -> &mut CreateFolder {
|
||||
self.api_builder.build_request(Method::from_bytes(b"MKCOL").unwrap(), url);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
||||
self.api_builder.send().await
|
||||
}
|
||||
|
||||
pub fn send_with_err(&mut self) -> Result<(), ApiError> {
|
||||
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
self.send().await
|
||||
}).map_err(ApiError::RequestError)?;
|
||||
if res.status().is_success() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(ApiError::IncorrectRequest(res))
|
||||
}
|
||||
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||
self.api_builder.send(false)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,38 +1,24 @@
|
||||
use reqwest::{Method, Response, Error};
|
||||
use reqwest::Method;
|
||||
use crate::services::api::{ApiBuilder, ApiError};
|
||||
use crate::services::api_call::ApiCall;
|
||||
|
||||
pub struct DeletePath {
|
||||
api_builder: ApiBuilder,
|
||||
}
|
||||
|
||||
impl DeletePath {
|
||||
pub fn new() -> Self {
|
||||
impl ApiCall for DeletePath {
|
||||
fn new() -> Self {
|
||||
DeletePath {
|
||||
api_builder: ApiBuilder::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_url(&mut self, url: &str) -> &mut DeletePath {
|
||||
fn set_url(&mut self, url: &str) -> &mut DeletePath {
|
||||
self.api_builder.build_request(Method::DELETE, url);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
||||
self.api_builder.send().await
|
||||
}
|
||||
|
||||
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
|
||||
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
self.send().await
|
||||
}).map_err(ApiError::RequestError)?;
|
||||
|
||||
if res.status().is_success() {
|
||||
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
res.text().await
|
||||
}).map_err(ApiError::EmptyError)?;
|
||||
Ok(body)
|
||||
} else {
|
||||
Err(ApiError::IncorrectRequest(res))
|
||||
}
|
||||
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||
self.api_builder.send(true)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,38 +6,32 @@ use std::io::{self, Write};
|
||||
use reqwest::{Method, Response, Error};
|
||||
use crate::utils::api::ApiProps;
|
||||
use crate::services::api::{ApiBuilder, ApiError};
|
||||
use crate::services::api_call::ApiCall;
|
||||
|
||||
pub struct DownloadFiles {
|
||||
api_builder: ApiBuilder,
|
||||
relative_ps: String,
|
||||
}
|
||||
|
||||
impl DownloadFiles {
|
||||
pub fn new() -> Self {
|
||||
impl ApiCall for DownloadFiles {
|
||||
fn new() -> Self {
|
||||
DownloadFiles {
|
||||
api_builder: ApiBuilder::new(),
|
||||
relative_ps: String::from(""),
|
||||
relative_ps: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_url(&mut self, relative_ps: &str, api_props: &ApiProps) -> &mut DownloadFiles {
|
||||
impl DownloadFiles {
|
||||
// todo make it beautiful
|
||||
pub fn set_url_download(&mut self, relative_ps: &str, api_props: &ApiProps) -> &mut DownloadFiles {
|
||||
self.relative_ps = relative_ps.to_string();
|
||||
self.api_builder.set_req(Method::GET, relative_ps, api_props);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
||||
self.api_builder.send().await
|
||||
}
|
||||
|
||||
pub async fn _send_with_err(mut self) -> Result<Vec<u8>, ApiError> {
|
||||
let res = self.send().await.map_err(ApiError::RequestError)?;
|
||||
if res.status().is_success() {
|
||||
let body = res.bytes().await.map_err(ApiError::EmptyError)?;
|
||||
Ok(body.to_vec())
|
||||
} else {
|
||||
Err(ApiError::IncorrectRequest(res))
|
||||
}
|
||||
pub async fn send_download(&mut self) -> Result<Response, Error> {
|
||||
self.api_builder.old_send().await
|
||||
}
|
||||
|
||||
pub fn save_stream(&mut self, ref_p: PathBuf, callback: Option<impl Fn(u64)>) -> Result<(), ApiError> {
|
||||
@@ -45,7 +39,7 @@ impl DownloadFiles {
|
||||
let mut file = File::create(abs_p).unwrap();
|
||||
|
||||
tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
let res = self.send().await.map_err(ApiError::RequestError)?;
|
||||
let res = self.send_download().await.map_err(ApiError::RequestError)?;
|
||||
if res.status().is_success() {
|
||||
let mut stream = res.bytes_stream();
|
||||
|
||||
@@ -70,11 +64,11 @@ impl DownloadFiles {
|
||||
pub fn save(&mut self, ref_p: PathBuf) -> Result<(), ApiError> {
|
||||
tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
let p = ref_p.join(PathBuf::from(self.relative_ps.clone()));
|
||||
let res = self.send().await.map_err(ApiError::RequestError)?;
|
||||
let res = self.send_download().await.map_err(ApiError::RequestError)?;
|
||||
if res.status().is_success() {
|
||||
let body = res.bytes().await.map_err(ApiError::EmptyError)?;
|
||||
match Self::write_file(p, &body.to_vec()) {
|
||||
Err(_) => Err(ApiError::Unexpected(String::from(""))),
|
||||
Err(_) => Err(ApiError::Unexpected(String::new())),
|
||||
Ok(_) => Ok(()),
|
||||
}
|
||||
} else {
|
||||
|
||||
@@ -2,6 +2,7 @@ use std::path::PathBuf;
|
||||
use indicatif::{ProgressBar, MultiProgress, ProgressStyle, HumanBytes};
|
||||
|
||||
use crate::utils::api::ApiProps;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::api::ApiError;
|
||||
use crate::services::download_files::DownloadFiles;
|
||||
use crate::services::req_props::ObjProps;
|
||||
@@ -42,7 +43,7 @@ impl Downloader {
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_file(&mut self, file: ObjProps) -> &mut Downloader {
|
||||
pub fn _add_file(&mut self, file: ObjProps) -> &mut Downloader {
|
||||
self.files.push(file);
|
||||
self
|
||||
}
|
||||
@@ -92,6 +93,7 @@ impl Downloader {
|
||||
let mut total_size = 0;
|
||||
let nb_objs = self.files.len();
|
||||
|
||||
// set the full size of the download
|
||||
self.files
|
||||
.iter()
|
||||
.for_each(|f|
|
||||
@@ -106,15 +108,11 @@ impl Downloader {
|
||||
for file in self.files.clone() {
|
||||
let relative_s = &file.clone().relative_s.unwrap();
|
||||
let mut download = DownloadFiles::new();
|
||||
download.set_url(&relative_s, &self.api_props.clone().unwrap());
|
||||
download.set_url_download(&relative_s, &self.api_props.clone().unwrap());
|
||||
|
||||
let should_use_stream = {
|
||||
if let Some(size) = file.contentlength {
|
||||
if size > SIZE_TO_STREAM {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
size > SIZE_TO_STREAM
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@@ -123,7 +121,7 @@ impl Downloader {
|
||||
// download
|
||||
let res = {
|
||||
if should_use_stream {
|
||||
download.save_stream(ref_p.clone(), Some(|a| self.update_bytes_bar(a)))
|
||||
download.save_stream(ref_p.clone(), if self.should_log { Some(|a| self.update_bytes_bar(a)) } else { None })
|
||||
} else {
|
||||
download.save(ref_p.clone())
|
||||
}
|
||||
|
||||
112
src/services/login.rs
Normal file
112
src/services/login.rs
Normal file
@@ -0,0 +1,112 @@
|
||||
use std::io;
|
||||
use std::io::Cursor;
|
||||
use std::io::prelude::*;
|
||||
use xml::reader::{EventReader, XmlEvent};
|
||||
use reqwest::{header::HeaderValue, Method};
|
||||
use rpassword;
|
||||
use crate::services::api_call::ApiCall;
|
||||
use crate::services::api::{ApiBuilder, ApiError};
|
||||
|
||||
pub struct Login {
|
||||
api_builder: ApiBuilder,
|
||||
login: String,
|
||||
password: String,
|
||||
host: Option<String>,
|
||||
}
|
||||
|
||||
impl ApiCall for Login {
|
||||
fn new() -> Self {
|
||||
Login {
|
||||
api_builder: ApiBuilder::new(),
|
||||
login: String::new(),
|
||||
password: String::new(),
|
||||
host: None,
|
||||
}
|
||||
}
|
||||
|
||||
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||
|
||||
let url = match self.host.clone() {
|
||||
Some(h) => {
|
||||
let mut u = if &h[0..8] == "https://" || &h[0..7] == "http://" {
|
||||
String::new()
|
||||
} else {
|
||||
String::from("https://")
|
||||
};
|
||||
u.push_str(&h);
|
||||
u.push_str("/ocs/v2.php/core/getapppassword");
|
||||
u
|
||||
},
|
||||
None => "/ocs/v2.php/core/getapppassword".to_owned(),
|
||||
};
|
||||
self.api_builder.set_url(Method::GET, &url);
|
||||
self.api_builder.set_header("OCS-APIRequest", HeaderValue::from_str("true").unwrap());
|
||||
self.api_builder.set_header("USER-AGENT", HeaderValue::from_str("nextsync").unwrap());
|
||||
self.api_builder.set_basic_auth(self.login.clone(), self.password.clone());
|
||||
self.api_builder.send(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl Login {
|
||||
pub fn ask_auth(&mut self) -> &mut Login {
|
||||
println!("Please enter your username/email: ");
|
||||
let stdin = io::stdin();
|
||||
self.login = stdin.lock().lines().next().unwrap().unwrap();
|
||||
println!("Please enter your password: ");
|
||||
self.password = rpassword::read_password().unwrap();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_auth(&mut self, username: &str, password: &str) -> &mut Login {
|
||||
self.login = username.to_owned();
|
||||
self.password = password.to_owned();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn set_host(&mut self, host: Option<String>) -> &mut Login {
|
||||
self.host = host;
|
||||
self
|
||||
}
|
||||
|
||||
pub fn send_login(&mut self) -> Result<String, ApiError> {
|
||||
match self.send() {
|
||||
Ok(Some(body)) => Ok(self.parse(body)),
|
||||
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
|
||||
fn parse(&self, xml: String) -> String {
|
||||
let cursor = Cursor::new(xml);
|
||||
let parser = EventReader::new(cursor);
|
||||
|
||||
let mut should_get = false;
|
||||
|
||||
for event in parser {
|
||||
match event {
|
||||
Ok(XmlEvent::StartElement { name, .. }) => {
|
||||
should_get = {
|
||||
if &name.local_name == "apppassword" {
|
||||
true
|
||||
} else {
|
||||
false
|
||||
}
|
||||
};
|
||||
}
|
||||
Ok(XmlEvent::Characters(text)) => {
|
||||
if !text.trim().is_empty() && should_get {
|
||||
return text.clone();
|
||||
}
|
||||
}
|
||||
//Ok(XmlEvent::EndElement { name, .. }) => {
|
||||
//}
|
||||
Err(e) => {
|
||||
eprintln!("err: parsing xml: {}", e);
|
||||
break;
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
}
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
54
src/services/move.rs
Normal file
54
src/services/move.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use reqwest::{Method, header::HeaderValue};
|
||||
use crate::services::api::{ApiBuilder, ApiError};
|
||||
use crate::commands::clone::get_url_props;
|
||||
use crate::commands::config;
|
||||
use crate::services::api_call::ApiCall;
|
||||
|
||||
pub struct Move {
|
||||
api_builder: ApiBuilder,
|
||||
}
|
||||
|
||||
impl ApiCall for Move {
|
||||
fn new() -> Self {
|
||||
Move {
|
||||
api_builder: ApiBuilder::new(),
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||
self.api_builder.send(false)
|
||||
}
|
||||
}
|
||||
|
||||
impl Move {
|
||||
pub fn set_url_move(&mut self, url: &str, destination: &str) -> &mut Move {
|
||||
self.api_builder.build_request(Method::from_bytes(b"MOVE").unwrap(), url);
|
||||
|
||||
let remote = match config::get_remote("origin") {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
eprintln!("fatal: unable to find a remote");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
let (host, username, root) = get_url_props(&remote);
|
||||
let mut url = String::from(host);
|
||||
url.push_str("/remote.php/dav/files/");
|
||||
url.push_str(username.unwrap());
|
||||
url.push_str(&root);
|
||||
url.push_str("/");
|
||||
if destination != "/" {
|
||||
url.push_str(destination);
|
||||
}
|
||||
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
|
||||
self
|
||||
}
|
||||
|
||||
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Move {
|
||||
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
|
||||
if overwrite { "T" } else { "F" }
|
||||
}).unwrap());
|
||||
self
|
||||
}
|
||||
}
|
||||
@@ -1,6 +1,6 @@
|
||||
use std::io::Cursor;
|
||||
use chrono::{Utc, DateTime};
|
||||
use reqwest::{Method, Response, Error};
|
||||
use reqwest::Method;
|
||||
use xml::reader::{EventReader, XmlEvent};
|
||||
use reqwest::header::HeaderValue;
|
||||
use crate::commands::clone::get_url_props;
|
||||
@@ -8,6 +8,7 @@ use crate::commands::config;
|
||||
use crate::utils::time::parse_timestamp;
|
||||
use crate::utils::api::{get_relative_s, ApiProps};
|
||||
use crate::services::api::{ApiBuilder, ApiError};
|
||||
use crate::services::api_call::ApiCall;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ObjProps {
|
||||
@@ -55,8 +56,8 @@ pub struct ReqProps {
|
||||
api_props: Option<ApiProps>
|
||||
}
|
||||
|
||||
impl ReqProps {
|
||||
pub fn new() -> Self {
|
||||
impl ApiCall for ReqProps {
|
||||
fn new() -> Self {
|
||||
ReqProps {
|
||||
api_builder: ApiBuilder::new(),
|
||||
xml_balises: vec![],
|
||||
@@ -65,8 +66,8 @@ impl ReqProps {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_url(&mut self, url: &str) -> &mut ReqProps {
|
||||
let remote = match config::get("remote") {
|
||||
fn set_url(&mut self, url: &str) -> &mut ReqProps {
|
||||
let remote = match config::get_remote("origin") {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
eprintln!("fatal: unable to find a remote");
|
||||
@@ -83,6 +84,13 @@ impl ReqProps {
|
||||
self
|
||||
}
|
||||
|
||||
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||
self.validate_xml();
|
||||
self.api_builder.send(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl ReqProps {
|
||||
pub fn set_request(&mut self, p: &str, api_props: &ApiProps) -> &mut ReqProps {
|
||||
self.api_props = Some(api_props.clone());
|
||||
self.api_builder.set_req(Method::from_bytes(b"PROPFIND").unwrap(), p, api_props);
|
||||
@@ -145,32 +153,10 @@ impl ReqProps {
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
||||
self.validate_xml();
|
||||
self.api_builder.send().await
|
||||
}
|
||||
|
||||
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
|
||||
tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
match self.send().await {
|
||||
Err(res) => Err(ApiError::RequestError(res)),
|
||||
Ok(res) if res.status().is_success() => {
|
||||
let body = res
|
||||
.text()
|
||||
.await
|
||||
.map_err(|err| ApiError::EmptyError(err))?;
|
||||
Ok(body)
|
||||
},
|
||||
Ok(res) => {
|
||||
Err(ApiError::IncorrectRequest(res))
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn send_req_multiple(&mut self) -> Result<Vec<ObjProps>, ApiError> {
|
||||
match self.send_with_err() {
|
||||
Ok(body) => Ok(self.parse(body, true)),
|
||||
match self.send() {
|
||||
Ok(Some(body)) => Ok(self.parse(body, true)),
|
||||
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
@@ -178,12 +164,13 @@ impl ReqProps {
|
||||
pub fn send_req_single(&mut self) -> Result<ObjProps, ApiError> {
|
||||
// set depth to 0 as we only need one element
|
||||
self.set_depth("0");
|
||||
match self.send_with_err() {
|
||||
Ok(body) => {
|
||||
match self.send() {
|
||||
Ok(Some(body)) => {
|
||||
let objs = self.parse(body, false);
|
||||
let obj = objs[0].clone();
|
||||
Ok(obj)
|
||||
},
|
||||
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
|
||||
Err(err) => Err(err),
|
||||
}
|
||||
}
|
||||
@@ -195,7 +182,7 @@ impl ReqProps {
|
||||
let mut values: Vec<ObjProps> = vec![];
|
||||
|
||||
let mut should_get = false;
|
||||
let mut val: String = String::from("");
|
||||
let mut val: String = String::new();
|
||||
let mut content = ObjProps::new();
|
||||
|
||||
for event in parser {
|
||||
|
||||
91
src/services/request_manager.rs
Normal file
91
src/services/request_manager.rs
Normal file
@@ -0,0 +1,91 @@
|
||||
use lazy_static::lazy_static;
|
||||
use std::sync::Mutex;
|
||||
|
||||
use crate::services::login::Login;
|
||||
use crate::commands::config;
|
||||
use crate::store::gconfig;
|
||||
use crate::commands::clone::get_url_props;
|
||||
use crate::services::api_call::ApiCall;
|
||||
|
||||
lazy_static! {
|
||||
static ref REQUEST_MANAGER: Mutex<Option<RequestManager>> = Mutex::new(None);
|
||||
}
|
||||
|
||||
pub fn get_request_manager() -> &'static Mutex<Option<RequestManager>> {
|
||||
if REQUEST_MANAGER.lock().unwrap().is_none() {
|
||||
*REQUEST_MANAGER.lock().unwrap() = Some(RequestManager::new());
|
||||
}
|
||||
&REQUEST_MANAGER
|
||||
}
|
||||
|
||||
pub struct RequestManager {
|
||||
token: Option<String>,
|
||||
host: Option<String>,
|
||||
}
|
||||
|
||||
impl RequestManager {
|
||||
pub fn new() -> Self {
|
||||
RequestManager {
|
||||
token: None,
|
||||
host: None,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_host(&mut self, host: String) {
|
||||
self.host = Some(host);
|
||||
}
|
||||
|
||||
pub fn get_host(&mut self) -> String
|
||||
{
|
||||
if self.host.is_none()
|
||||
{
|
||||
let remote = match config::get_remote("origin") {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
// todo ask user instead
|
||||
eprintln!("fatal: unable to find a remote");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
let (host, _, _) = get_url_props(&remote);
|
||||
self.host = Some(host.clone());
|
||||
// todo ask user
|
||||
}
|
||||
self.host.clone().unwrap()
|
||||
}
|
||||
|
||||
pub fn get_token(&mut self) -> String {
|
||||
if self.token.is_none() {
|
||||
// look in global config
|
||||
if let Some(token) = gconfig::read_token() {
|
||||
if !token.is_empty() {
|
||||
self.token = Some(token);
|
||||
return self.token.clone().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// look in local config
|
||||
if let Some(token) = config::find_option_in_cat("core", "token")
|
||||
{
|
||||
if !token.is_empty() {
|
||||
self.token = Some(token);
|
||||
return self.token.clone().unwrap();
|
||||
}
|
||||
}
|
||||
|
||||
// ask for a token
|
||||
let get_token = Login::new()
|
||||
.ask_auth()
|
||||
.set_host(Some(self.get_host()))
|
||||
.send_login();
|
||||
|
||||
// todo deal with error cases
|
||||
self.token = Some(get_token.unwrap());
|
||||
if let Err(err) = gconfig::write_token(&self.token.clone().unwrap()) {
|
||||
eprintln!("err: failed to write token ({})", err);
|
||||
}
|
||||
}
|
||||
|
||||
self.token.clone().unwrap()
|
||||
}
|
||||
}
|
||||
@@ -1,25 +1,32 @@
|
||||
use std::fs::File;
|
||||
use std::io::Read;
|
||||
use std::path::PathBuf;
|
||||
use reqwest::{Method, Response, Error};
|
||||
use reqwest::Method;
|
||||
use crate::services::api::{ApiBuilder, ApiError};
|
||||
use crate::services::api_call::ApiCall;
|
||||
|
||||
pub struct UploadFile {
|
||||
api_builder: ApiBuilder,
|
||||
}
|
||||
|
||||
impl UploadFile {
|
||||
pub fn new() -> Self {
|
||||
impl ApiCall for UploadFile {
|
||||
fn new() -> Self {
|
||||
UploadFile {
|
||||
api_builder: ApiBuilder::new(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_url(&mut self, url: &str) -> &mut UploadFile {
|
||||
fn set_url(&mut self, url: &str) -> &mut UploadFile {
|
||||
self.api_builder.build_request(Method::PUT, url);
|
||||
self
|
||||
}
|
||||
|
||||
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||
self.api_builder.send(true)
|
||||
}
|
||||
}
|
||||
|
||||
impl UploadFile {
|
||||
pub fn set_file(&mut self, path: PathBuf) -> &mut UploadFile {
|
||||
// todo large file
|
||||
// todo small files
|
||||
@@ -29,23 +36,4 @@ impl UploadFile {
|
||||
self.api_builder.set_body(buffer);
|
||||
self
|
||||
}
|
||||
|
||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
||||
self.api_builder.send().await
|
||||
}
|
||||
|
||||
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
|
||||
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
self.send().await
|
||||
}).map_err(ApiError::RequestError)?;
|
||||
|
||||
if res.status().is_success() {
|
||||
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||
res.text().await
|
||||
}).map_err(ApiError::EmptyError)?;
|
||||
Ok(body)
|
||||
} else {
|
||||
Err(ApiError::IncorrectRequest(res))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
pub mod index;
|
||||
pub mod head;
|
||||
pub mod object;
|
||||
pub mod gconfig;
|
||||
|
||||
54
src/store/gconfig.rs
Normal file
54
src/store/gconfig.rs
Normal file
@@ -0,0 +1,54 @@
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
use std::fs::{self, OpenOptions};
|
||||
use std::io::{self, Write};
|
||||
use crate::utils::read;
|
||||
|
||||
fn global_path() -> Option<PathBuf> {
|
||||
if let Some(home_dir) = env::var_os("HOME") {
|
||||
let mut path = PathBuf::new();
|
||||
path.push(home_dir);
|
||||
path.push(".nextsync");
|
||||
Some(path)
|
||||
}
|
||||
else
|
||||
{
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn write_token(token: &str) -> io::Result<()> {
|
||||
if let Some(mut path_token) = global_path() {
|
||||
if !path_token.exists() {
|
||||
fs::create_dir_all(path_token.clone())?;
|
||||
}
|
||||
path_token.push("token");
|
||||
let mut file = OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
.create(true)
|
||||
.open(path_token)?;
|
||||
|
||||
writeln!(file, "{}", token)?;
|
||||
}
|
||||
Ok(())
|
||||
|
||||
}
|
||||
|
||||
pub fn read_token() -> Option<String> {
|
||||
if let Some(mut path_token) = global_path() {
|
||||
if !path_token.exists() {
|
||||
return None;
|
||||
}
|
||||
path_token.push("token");
|
||||
if let Ok(lines) = read::read_lines(path_token) {
|
||||
for line in lines {
|
||||
if let Ok(l) = line {
|
||||
return Some(l);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
@@ -1,12 +1,17 @@
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use std::fs::File;
|
||||
use std::fs::OpenOptions;
|
||||
use crate::utils::{read, path};
|
||||
|
||||
pub fn open() -> File {
|
||||
pub fn path() -> PathBuf {
|
||||
let mut path = path::nextsync();
|
||||
|
||||
path.push("index");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn open() -> File {
|
||||
let path = path();
|
||||
OpenOptions::new()
|
||||
.read(true)
|
||||
.write(true)
|
||||
@@ -27,3 +32,16 @@ pub fn rm_line(line: &str) -> io::Result<()> {
|
||||
read::rm_line(root, line)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn alread_added(file: String) -> bool {
|
||||
if let Ok(lines) = read_line() {
|
||||
for line in lines {
|
||||
if let Ok(l) = line {
|
||||
if l == file {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@@ -4,12 +4,12 @@ use std::fs::{self, OpenOptions};
|
||||
use crypto::sha1::Sha1;
|
||||
use crypto::digest::Digest;
|
||||
use std::io::{Seek, SeekFrom, Read};
|
||||
use crate::utils::time::parse_timestamp;
|
||||
use crate::store::head;
|
||||
use crate::utils::{read, path};
|
||||
|
||||
pub mod tree;
|
||||
pub mod blob;
|
||||
pub mod object;
|
||||
|
||||
pub struct Object {
|
||||
path: PathBuf,
|
||||
@@ -31,7 +31,7 @@ impl Object {
|
||||
if path == "" {
|
||||
return Object {
|
||||
path: PathBuf::from("/"),
|
||||
hash: String::from(""),
|
||||
hash: String::new(),
|
||||
obj_p: head::path(),
|
||||
ts: None,
|
||||
}
|
||||
@@ -59,9 +59,10 @@ impl Object {
|
||||
match read::read_lines(&self.obj_p) {
|
||||
Ok(mut reader) => {
|
||||
if let Some(Ok(line)) = reader.next() {
|
||||
let mut data = line.rsplit(' ');
|
||||
if data.clone().count() >= 2 {
|
||||
self.ts = Some(data.next().unwrap().parse::<i64>().unwrap())
|
||||
let mut data = line.rsplit(' ').collect::<Vec<_>>();
|
||||
data.reverse();
|
||||
if data.clone().len() >= 2 {
|
||||
self.ts = Some(data[1].parse::<i64>().unwrap())
|
||||
}
|
||||
}
|
||||
},
|
||||
@@ -131,7 +132,7 @@ fn rm(hash: &str) -> io::Result<()> {
|
||||
|
||||
fn rm_node(path: &Path, node: &str) -> io::Result<()> {
|
||||
let mut root = path::objects();
|
||||
let (dir, rest) = hash_obj(path.clone().to_str().unwrap());
|
||||
let (dir, rest) = hash_obj(path.to_str().unwrap());
|
||||
|
||||
root.push(dir);
|
||||
root.push(rest);
|
||||
@@ -143,11 +144,11 @@ fn rm_node(path: &Path, node: &str) -> io::Result<()> {
|
||||
fn add_node(path: &Path, node: &str) -> io::Result<()> {
|
||||
let mut root = path::objects();
|
||||
|
||||
let (dir, rest) = hash_obj(path.clone().to_str().unwrap());
|
||||
let (dir, rest) = hash_obj(path.to_str().unwrap());
|
||||
|
||||
root.push(dir);
|
||||
if !root.exists() {
|
||||
todo!();
|
||||
//todo!();
|
||||
}
|
||||
root.push(rest);
|
||||
|
||||
@@ -168,7 +169,7 @@ fn update_dates(mut path: PathBuf, date: &str) -> io::Result<()> {
|
||||
let (dir, res) = hash_obj(path.to_str().unwrap());
|
||||
obj_p.push(dir);
|
||||
obj_p.push(res);
|
||||
update_date(obj_p.clone(), date.clone())?;
|
||||
update_date(obj_p.clone(), date)?;
|
||||
obj_p.pop();
|
||||
obj_p.pop();
|
||||
}
|
||||
@@ -223,31 +224,3 @@ fn create_obj(name: String, content: &str) -> io::Result<()> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_timestamp(path_s: String) -> Option<i64> {
|
||||
let mut obj_p = path::objects();
|
||||
|
||||
let (dir, res) = hash_obj(&path_s);
|
||||
obj_p.push(dir);
|
||||
obj_p.push(res);
|
||||
|
||||
match read::read_lines(obj_p) {
|
||||
Ok(mut reader) => {
|
||||
match reader.next() {
|
||||
Some(Ok(line)) => {
|
||||
let mut data = line.rsplit(' ');
|
||||
if data.clone().count() >= 2 {
|
||||
Some(data.next().unwrap().parse::<i64>().unwrap())
|
||||
} else {
|
||||
None
|
||||
}
|
||||
},
|
||||
_ => None,
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("error reading object: {}", err);
|
||||
None
|
||||
},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
@@ -1,52 +1,330 @@
|
||||
use std::io;
|
||||
use std::io::{self, Read};
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use std::fs::OpenOptions;
|
||||
use std::path::PathBuf;
|
||||
use std::fs;
|
||||
use crate::utils::path;
|
||||
use crate::store::head;
|
||||
use crate::store::object::{update_dates, parse_path, add_node, create_obj, rm_node};
|
||||
use std::time::SystemTime;
|
||||
use crate::commands::status::State;
|
||||
use crate::utils::into::IntoPathBuf;
|
||||
use crate::utils::{path, read};
|
||||
use crate::store::object::update_dates;
|
||||
|
||||
pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> {
|
||||
let (line, hash, name) = parse_path(path.clone(), true);
|
||||
// add blob reference to parent
|
||||
if path.iter().count() == 1 {
|
||||
head::add_line(line)?;
|
||||
} else {
|
||||
add_node(path.parent().unwrap(), &line)?;
|
||||
use crate::store::object::object::ObjMethods;
|
||||
use crate::store::object::object::Obj;
|
||||
|
||||
const HASH_EMPTY: &str = "d41d8cd98f00b204e9800998ecf8427e";
|
||||
|
||||
pub struct Blob {
|
||||
pub obj: Obj,
|
||||
data: Vec<String>, // content of the ref file
|
||||
file_hash: Option<String>, // hash of the file's content
|
||||
}
|
||||
|
||||
//pub struct Blob {
|
||||
// r_path: PathBuf, // relative path
|
||||
// a_path: PathBuf, // absolute path
|
||||
// hash: String, // hash of relative path
|
||||
// file_hash: Option<String>,
|
||||
// obj_p: PathBuf, // path of the object file
|
||||
// data: Vec<String>, // content of the blob
|
||||
//}
|
||||
|
||||
|
||||
|
||||
impl Blob {
|
||||
pub fn new(obj: Obj) -> Self {
|
||||
Self {
|
||||
obj,
|
||||
data: vec![],
|
||||
file_hash: None,
|
||||
}
|
||||
}
|
||||
pub fn from_path<S>(r_path: S) -> Blob where S: IntoPathBuf {
|
||||
let r_path = r_path.into();
|
||||
Self {
|
||||
obj: Obj::from_path(r_path),
|
||||
data: vec![],
|
||||
file_hash: None,
|
||||
}
|
||||
}
|
||||
|
||||
let mut content = name.clone().to_owned();
|
||||
content.push_str(" ");
|
||||
content.push_str(date);
|
||||
fn get_file_hash(&mut self) -> String {
|
||||
if self.file_hash.is_none() {
|
||||
let bytes = std::fs::read(self.get_file_path()).unwrap();
|
||||
let hash = md5::compute(&bytes);
|
||||
self.file_hash = Some(format!("{:x}", hash))
|
||||
}
|
||||
self.file_hash.clone().unwrap()
|
||||
}
|
||||
|
||||
// create blob object
|
||||
create_obj(hash, &content)?;
|
||||
/// read line of blob to get all informations and store them in self.data
|
||||
pub fn read_data(&mut self) {
|
||||
if self.data.len() == 0 {
|
||||
if let Ok(mut file) = File::open(self.get_obj_path()) {
|
||||
let mut buffer = String::new();
|
||||
let _ = file.read_to_string(&mut buffer);
|
||||
let data = buffer.rsplit(' ').collect::<Vec<_>>();
|
||||
for e in data {
|
||||
self.data.push(String::from(e));
|
||||
}
|
||||
self.data.reverse();
|
||||
|
||||
// remove \n of last element
|
||||
if let Some(last) = self.data.last_mut() {
|
||||
if last.ends_with("\n") {
|
||||
last.pop();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn get_data_index(&mut self, index: usize) -> String {
|
||||
self.read_data();
|
||||
if self.data.len() >= index + 1 {
|
||||
self.data[index].clone()
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
fn saved_filename(&mut self) -> String {
|
||||
self.get_data_index(0)
|
||||
}
|
||||
|
||||
pub fn saved_remote_ts(&mut self) -> String {
|
||||
self.get_data_index(1)
|
||||
}
|
||||
|
||||
fn saved_local_size(&mut self) -> String {
|
||||
self.get_data_index(2)
|
||||
}
|
||||
|
||||
fn saved_local_ts(&mut self) -> u64 {
|
||||
match self.get_data_index(3).as_str() {
|
||||
"" => 0,
|
||||
str => str.parse::<u64>().unwrap()
|
||||
}
|
||||
}
|
||||
|
||||
fn saved_hash(&mut self) -> String {
|
||||
self.get_data_index(4)
|
||||
}
|
||||
|
||||
fn has_same_size(&mut self) -> bool {
|
||||
let metadata = match fs::metadata(self.get_file_path()) {
|
||||
Ok(m) => m,
|
||||
Err(_) => return true,
|
||||
};
|
||||
|
||||
if self.saved_local_size() == String::new() { return true; }
|
||||
metadata.len().to_string() == self.saved_local_size()
|
||||
}
|
||||
|
||||
fn is_newer(&mut self) -> bool {
|
||||
let metadata = match fs::metadata(self.get_file_path()) {
|
||||
Ok(m) => m,
|
||||
Err(_) => return true,
|
||||
};
|
||||
|
||||
let secs = metadata
|
||||
.modified()
|
||||
.unwrap()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
if self.saved_local_ts() == 0 { return true; }
|
||||
secs > self.saved_local_ts()
|
||||
}
|
||||
|
||||
fn has_same_hash(&mut self) -> bool {
|
||||
if self.saved_hash() == String::new() { return false; }
|
||||
let file_hash = self.get_file_hash().clone();
|
||||
self.saved_hash() == file_hash
|
||||
}
|
||||
|
||||
pub fn has_changes(&mut self) -> bool {
|
||||
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
|
||||
}
|
||||
|
||||
pub fn get_all_identical_blobs(&mut self) -> Vec<String> {
|
||||
// an empty file is a new file not the copy of another empty file
|
||||
if self.get_file_hash() == HASH_EMPTY {
|
||||
return vec![];
|
||||
}
|
||||
|
||||
let refs_p = self.get_obj_path();
|
||||
let mut blobs: Vec<String> = vec![];
|
||||
if let Ok(lines) = read::read_lines(refs_p) {
|
||||
for line in lines {
|
||||
if let Ok(l) = line {
|
||||
blobs.push(l.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
blobs
|
||||
}
|
||||
|
||||
pub fn status(&mut self, path_from: &mut Option<PathBuf>) -> State {
|
||||
let has_obj_ref = self.get_obj_path().exists();
|
||||
let blob_exists = self.get_file_path().exists();
|
||||
|
||||
if has_obj_ref && !blob_exists {
|
||||
State::Deleted
|
||||
} else if !has_obj_ref && blob_exists {
|
||||
let identical_blobs = self.get_all_identical_blobs();
|
||||
if identical_blobs.len() != 0 {
|
||||
let identical_blob = Blob::from_path(identical_blobs[0].clone()).get_local_obj();
|
||||
|
||||
if identical_blob.state == State::Deleted {
|
||||
*path_from = Some(identical_blob.path);
|
||||
State::Moved
|
||||
} else if identical_blob.state == State::Default {
|
||||
*path_from = Some(identical_blob.path);
|
||||
State::Copied
|
||||
} else {
|
||||
State::New
|
||||
}
|
||||
} else {
|
||||
State::New
|
||||
}
|
||||
} else if !has_obj_ref && !blob_exists {
|
||||
State::Default
|
||||
} else if self.has_changes() {
|
||||
State::Modified
|
||||
} else {
|
||||
State::Default
|
||||
}
|
||||
}
|
||||
|
||||
fn create_blob_ref(&mut self, ts_remote: &str) -> io::Result<()> {
|
||||
let metadata = fs::metadata(self.get_file_path())?;
|
||||
let secs = metadata
|
||||
.modified()
|
||||
.unwrap()
|
||||
.duration_since(SystemTime::UNIX_EPOCH)
|
||||
.unwrap()
|
||||
.as_secs();
|
||||
|
||||
// build line with all needed properties
|
||||
let content = format!("{} {} {} {} {}",
|
||||
self.get_name(),
|
||||
ts_remote,
|
||||
metadata.len().to_string(),
|
||||
secs.to_string(),
|
||||
self.get_file_hash());
|
||||
|
||||
// create parent dir if needed
|
||||
let mut obj_path = self.get_obj_path();
|
||||
obj_path.pop();
|
||||
if !obj_path.exists() {
|
||||
fs::create_dir_all(obj_path)?;
|
||||
}
|
||||
|
||||
// open ref file
|
||||
let mut file = OpenOptions::new()
|
||||
.create_new(true)
|
||||
.write(true)
|
||||
.open(self.get_obj_path())?;
|
||||
|
||||
writeln!(file, "{}", content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_file_ref(&mut self) -> PathBuf {
|
||||
let mut refs_p = path::refs();
|
||||
let file_hash = self.get_file_hash().clone();
|
||||
let (dir, res) = file_hash.split_at(2);
|
||||
|
||||
refs_p.push(dir);
|
||||
if !refs_p.exists() {
|
||||
let _ = fs::create_dir_all(refs_p.clone());
|
||||
}
|
||||
refs_p.push(res);
|
||||
refs_p
|
||||
}
|
||||
|
||||
// create a file in .nextsync/refs with the hash of this blob that
|
||||
// redirect to the relative path
|
||||
fn create_hash_ref(&mut self) -> io::Result<()> {
|
||||
// todo check if the file has been modified for moved and copy
|
||||
let refs_p = self.get_file_ref();
|
||||
|
||||
let mut file = OpenOptions::new()
|
||||
.create(true)
|
||||
.write(true)
|
||||
.open(refs_p)?;
|
||||
|
||||
// todo deal with duplicate content
|
||||
writeln!(file, "{}", self.get_relative_file_path().to_str().unwrap())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn create(&mut self, ts_remote: &str, up_parent: bool) -> io::Result<()> {
|
||||
|
||||
// add blob reference to parent
|
||||
let _ = self.add_ref_to_parent();
|
||||
|
||||
if let Err(err) = self.create_blob_ref(ts_remote.clone()) {
|
||||
eprintln!("err: saving blob ref of {}: {}", self.get_relative_file_path().display(), err);
|
||||
}
|
||||
|
||||
if let Err(err) = self.create_hash_ref() {
|
||||
eprintln!("err: saving hash ref of {}: {}", self.get_relative_file_path().display(), err);
|
||||
}
|
||||
|
||||
// update date for all parent
|
||||
if up_parent {
|
||||
update_dates(path, date)?;
|
||||
if let Err(err) = update_dates(self.get_relative_file_path(), ts_remote) {
|
||||
eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
|
||||
}
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn rm(path: PathBuf) -> io::Result<()> {
|
||||
let (line, hash, _) = parse_path(path.clone(), true);
|
||||
|
||||
// remove blob reference to parent
|
||||
if path.iter().count() == 1 {
|
||||
head::rm_line(&line)?;
|
||||
} else {
|
||||
rm_node(path.parent().unwrap(), &line)?;
|
||||
}
|
||||
|
||||
// remove blob object
|
||||
let mut root = path::objects();
|
||||
|
||||
let c = hash.clone();
|
||||
let (dir, rest) = c.split_at(2);
|
||||
root.push(dir);
|
||||
root.push(rest);
|
||||
fs::remove_file(root)?;
|
||||
pub fn update(&mut self, ts_remote: &str) -> io::Result<()> {
|
||||
|
||||
// // remove old hash ref
|
||||
// let mut refs_p = path::refs();
|
||||
// let binding = self.saved_hash();
|
||||
// let (dir, res) = binding.split_at(2);
|
||||
// refs_p.push(dir);
|
||||
// refs_p.push(res);
|
||||
// if let Err(err) = fs::remove_file(refs_p) {
|
||||
// eprintln!("err: removing hash ref of {}: {}", self.r_path.clone().display(), err);
|
||||
// }
|
||||
//
|
||||
// // creating new hash ref
|
||||
// if let Err(err) = self.create_hash_ref() {
|
||||
// eprintln!("err: saving hash ref of {}: {}", self.r_path.clone().display(), err);
|
||||
// }
|
||||
//
|
||||
// // updating content of blob's ref
|
||||
// let metadata = fs::metadata(self.a_path.clone())?;
|
||||
// let secs = metadata
|
||||
// .modified()
|
||||
// .unwrap()
|
||||
// .duration_since(SystemTime::UNIX_EPOCH)
|
||||
// .unwrap()
|
||||
// .as_secs();
|
||||
//
|
||||
// let mut content = self.saved_filename();
|
||||
// content.push_str(" ");
|
||||
// content.push_str(ts_remote);
|
||||
// content.push_str(" ");
|
||||
// content.push_str(&metadata.len().to_string());
|
||||
// content.push_str(" ");
|
||||
// content.push_str(&secs.to_string());
|
||||
// content.push_str(" ");
|
||||
// content.push_str(&self.get_file_hash());
|
||||
//
|
||||
// let mut file = OpenOptions::new()
|
||||
// .write(true)
|
||||
// .open(self.obj_p.clone())?;
|
||||
//
|
||||
// writeln!(file, "{}", &content)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
415
src/store/object/object.rs
Normal file
415
src/store/object/object.rs
Normal file
@@ -0,0 +1,415 @@
|
||||
use std::io;
|
||||
use std::fs;
|
||||
use std::path::PathBuf;
|
||||
use crate::utils::path;
|
||||
use crate::store::head;
|
||||
use crate::store::object::{add_node, rm_node};
|
||||
use crypto::sha1::Sha1;
|
||||
use crypto::digest::Digest;
|
||||
use crate::utils::into::IntoPathBuf;
|
||||
use crate::store::object::{blob::Blob, tree::Tree};
|
||||
use crate::commands::status::{State, LocalObj};
|
||||
|
||||
#[derive(Clone, Copy)]
|
||||
pub enum ObjType {
|
||||
TREE,
|
||||
BLOB,
|
||||
DEFAULT
|
||||
}
|
||||
|
||||
pub trait ObjMethods {
|
||||
fn get_type(&self) -> ObjType;
|
||||
fn get_obj_path(&self) -> PathBuf;
|
||||
fn get_file_path(&self) -> PathBuf;
|
||||
fn get_relative_file_path(&self) -> PathBuf;
|
||||
fn get_repo_file_path(&self) -> PathBuf;
|
||||
fn get_name(&self) -> String;
|
||||
fn get_hash_path(&self) -> String;
|
||||
fn get_local_obj(&self) -> LocalObj;
|
||||
fn get_line(&self, obj_type: ObjType) -> String;
|
||||
fn add_ref_to_parent(&self) -> io::Result<()>;
|
||||
fn rm(&mut self) -> io::Result<()>;
|
||||
fn rm_node(&mut self) -> io::Result<()>;
|
||||
fn rm_node_down(&mut self) -> io::Result<()>;
|
||||
fn exists_on_remote(&mut self) -> bool;
|
||||
fn has_changes(&mut self) -> bool;
|
||||
}
|
||||
|
||||
pub struct Obj {
|
||||
name: String,
|
||||
obj_path: PathBuf,
|
||||
obj_type: ObjType,
|
||||
file_path: PathBuf, // file here is used as both file and directory
|
||||
relative_file_path: PathBuf,
|
||||
repo_file_path: PathBuf,
|
||||
hash_path: String, // hash of the relative path of the file
|
||||
}
|
||||
|
||||
|
||||
impl ObjMethods for Obj {
|
||||
fn get_type(&self) -> ObjType {
|
||||
self.obj_type
|
||||
}
|
||||
|
||||
fn get_obj_path(&self) -> PathBuf {
|
||||
self.obj_path.clone()
|
||||
}
|
||||
|
||||
fn get_file_path(&self) -> PathBuf {
|
||||
self.file_path.clone()
|
||||
}
|
||||
|
||||
fn get_relative_file_path(&self) -> PathBuf {
|
||||
self.relative_file_path.clone()
|
||||
}
|
||||
|
||||
fn get_repo_file_path(&self) -> PathBuf {
|
||||
self.repo_file_path.clone()
|
||||
}
|
||||
|
||||
fn get_local_obj(&self) -> LocalObj {
|
||||
LocalObj {
|
||||
otype: match self.obj_type {
|
||||
ObjType::BLOB => String::from("blob"),
|
||||
ObjType::TREE => String::from("tree"),
|
||||
ObjType::DEFAULT => String::from("default"),
|
||||
},
|
||||
name: self.get_name(),
|
||||
path: self.get_repo_file_path(),
|
||||
path_from: None,
|
||||
state: State::New
|
||||
}
|
||||
}
|
||||
|
||||
fn get_name(&self) -> String {
|
||||
self.name.clone()
|
||||
}
|
||||
|
||||
fn get_hash_path(&self) -> String {
|
||||
self.hash_path.clone()
|
||||
}
|
||||
|
||||
// build line for parent reference
|
||||
fn get_line(&self, obj_type: ObjType) -> String {
|
||||
let type_str = match obj_type {
|
||||
ObjType::BLOB => "blob",
|
||||
ObjType::TREE => "tree",
|
||||
ObjType::DEFAULT => "default",
|
||||
};
|
||||
format!("{} {} {}", type_str, self.get_hash_path(), self.get_name())
|
||||
}
|
||||
|
||||
fn add_ref_to_parent(&self) -> io::Result<()> {
|
||||
let line = self.get_line(self.obj_type);
|
||||
if self.get_relative_file_path().iter().count() == 1 {
|
||||
head::add_line(line)?;
|
||||
} else {
|
||||
add_node(self.get_relative_file_path().parent().unwrap(), &line)?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rm_node(&mut self) -> io::Result<()> {
|
||||
// remove parent reference to self
|
||||
let line = self.get_line(self.obj_type);
|
||||
if self.get_relative_file_path().iter().count() == 1 {
|
||||
head::rm_line(&line)?;
|
||||
} else {
|
||||
rm_node(self.get_relative_file_path().parent().unwrap(), &line)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rm_node_down(&mut self) -> io::Result<()> {
|
||||
eprintln!("rm_node_down: tried to do this on Obj");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rm(&mut self) -> io::Result<()> {
|
||||
eprintln!("rm: tried to do this on Obj");
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exists_on_remote(&mut self) -> bool {
|
||||
self.obj_path.exists()
|
||||
}
|
||||
|
||||
fn has_changes(&mut self) -> bool {
|
||||
if !self.obj_path.exists() {
|
||||
return true;
|
||||
}
|
||||
|
||||
match self.obj_type {
|
||||
ObjType::BLOB => Blob::from_path(self.relative_file_path.clone()).has_changes(),
|
||||
ObjType::TREE => Tree::from_path(self.relative_file_path.clone()).has_changes(),
|
||||
ObjType::DEFAULT => {
|
||||
unreachable!();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ObjMethods for Blob {
|
||||
fn get_type(&self) -> ObjType {
|
||||
self.obj.get_type()
|
||||
}
|
||||
|
||||
fn get_obj_path(&self) -> PathBuf {
|
||||
self.obj.get_obj_path()
|
||||
}
|
||||
|
||||
fn get_file_path(&self) -> PathBuf {
|
||||
self.obj.get_file_path()
|
||||
}
|
||||
|
||||
fn get_relative_file_path(&self) -> PathBuf {
|
||||
self.obj.get_relative_file_path()
|
||||
}
|
||||
|
||||
fn get_repo_file_path(&self) -> PathBuf {
|
||||
self.obj.get_repo_file_path()
|
||||
}
|
||||
|
||||
fn get_local_obj(&self) -> LocalObj {
|
||||
self.obj.get_local_obj()
|
||||
}
|
||||
|
||||
fn get_name(&self) -> String {
|
||||
self.obj.get_name()
|
||||
}
|
||||
|
||||
fn get_hash_path(&self) -> String {
|
||||
self.obj.get_hash_path()
|
||||
}
|
||||
|
||||
fn get_line(&self, _: ObjType) -> String {
|
||||
self.obj.get_line(ObjType::BLOB)
|
||||
}
|
||||
|
||||
fn add_ref_to_parent(&self) -> io::Result<()> {
|
||||
self.obj.add_ref_to_parent()
|
||||
}
|
||||
|
||||
fn rm_node(&mut self) -> io::Result<()> {
|
||||
// remove self object and children object
|
||||
let _ = self.rm_node_down();
|
||||
self.obj.rm_node()
|
||||
}
|
||||
|
||||
fn rm_node_down(&mut self) -> io::Result<()> {
|
||||
// remove reference to self
|
||||
fs::remove_file(self.get_obj_path())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rm(&mut self) -> io::Result<()> {
|
||||
// remove all references, including children's one
|
||||
self.rm_node()?;
|
||||
|
||||
// remove file
|
||||
fs::remove_file(self.get_file_path())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exists_on_remote(&mut self) -> bool {
|
||||
self.obj.exists_on_remote()
|
||||
}
|
||||
|
||||
fn has_changes(&mut self) -> bool {
|
||||
self.obj.has_changes()
|
||||
}
|
||||
}
|
||||
|
||||
impl ObjMethods for Tree {
|
||||
fn get_type(&self) -> ObjType {
|
||||
self.obj.get_type()
|
||||
}
|
||||
|
||||
fn get_obj_path(&self) -> PathBuf {
|
||||
self.obj.get_obj_path()
|
||||
}
|
||||
|
||||
fn get_file_path(&self) -> PathBuf {
|
||||
self.obj.get_file_path()
|
||||
}
|
||||
|
||||
fn get_relative_file_path(&self) -> PathBuf {
|
||||
self.obj.get_relative_file_path()
|
||||
}
|
||||
|
||||
fn get_repo_file_path(&self) -> PathBuf {
|
||||
self.obj.get_repo_file_path()
|
||||
}
|
||||
|
||||
fn get_local_obj(&self) -> LocalObj {
|
||||
self.obj.get_local_obj()
|
||||
}
|
||||
|
||||
fn get_name(&self) -> String {
|
||||
self.obj.get_name()
|
||||
}
|
||||
|
||||
fn get_hash_path(&self) -> String {
|
||||
self.obj.get_hash_path()
|
||||
}
|
||||
|
||||
fn get_line(&self, _: ObjType) -> String {
|
||||
self.obj.get_line(ObjType::TREE)
|
||||
}
|
||||
|
||||
fn add_ref_to_parent(&self) -> io::Result<()> {
|
||||
self.obj.add_ref_to_parent()
|
||||
}
|
||||
|
||||
fn rm_node(&mut self) -> io::Result<()> {
|
||||
// remove self object and children object
|
||||
let _ = self.rm_node_down();
|
||||
self.obj.rm_node()
|
||||
}
|
||||
|
||||
/// remove objects and children but not parent reference to self
|
||||
fn rm_node_down(&mut self) -> io::Result<()> {
|
||||
// remove children
|
||||
while let Some(mut child) = self.next() {
|
||||
match child.get_type() {
|
||||
ObjType::TREE => child.rm_node_down(),
|
||||
ObjType::BLOB => child.rm_node_down(),
|
||||
_ => Ok(())
|
||||
}?;
|
||||
};
|
||||
|
||||
// remove reference to self
|
||||
fs::remove_file(self.get_obj_path())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rm(&mut self) -> io::Result<()> {
|
||||
// remove all references, including children's one
|
||||
self.rm_node()?;
|
||||
|
||||
// remove directory and all subfiles
|
||||
fs::remove_dir_all(self.get_file_path())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn exists_on_remote(&mut self) -> bool {
|
||||
self.obj.exists_on_remote()
|
||||
}
|
||||
|
||||
fn has_changes(&mut self) -> bool {
|
||||
self.obj.has_changes()
|
||||
}
|
||||
}
|
||||
|
||||
impl Obj {
|
||||
fn new() -> Self {
|
||||
Obj {
|
||||
name: String::new(),
|
||||
obj_path: PathBuf::new(),
|
||||
file_path: PathBuf::new(),
|
||||
obj_type: ObjType::DEFAULT,
|
||||
hash_path: String::new(),
|
||||
relative_file_path: PathBuf::new(),
|
||||
repo_file_path: PathBuf::new()
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_path<S>(path: S) -> Obj where S: IntoPathBuf {
|
||||
|
||||
let path = path.into();
|
||||
let mut hasher = Sha1::new();
|
||||
hasher.input_str(path.to_str().unwrap());
|
||||
let hash = hasher.result_str();
|
||||
|
||||
let (dir, res) = hash.split_at(2);
|
||||
let mut obj_path = path::objects();
|
||||
obj_path.push(dir);
|
||||
obj_path.push(res);
|
||||
|
||||
// set to absolute path if not already
|
||||
let root = path::repo_root();
|
||||
let abs_path = match path.clone().starts_with(root.clone()) {
|
||||
true => path.clone(),
|
||||
false => root.join(path.clone())
|
||||
};
|
||||
|
||||
Obj {
|
||||
name: match abs_path.file_name() {
|
||||
None => String::new(),
|
||||
Some(name) => name.to_str().unwrap().to_owned()
|
||||
},
|
||||
obj_path,
|
||||
obj_type: match path.exists() {
|
||||
true => match path.is_dir() {
|
||||
true => ObjType::TREE,
|
||||
false => ObjType::BLOB
|
||||
},
|
||||
false => ObjType::DEFAULT
|
||||
},
|
||||
file_path: abs_path,
|
||||
relative_file_path: path.clone(),
|
||||
repo_file_path: path,
|
||||
hash_path: hash,
|
||||
}
|
||||
}
|
||||
|
||||
/// load from the information line stored in the object
|
||||
pub fn from_line(line: String, base_dir: Option<PathBuf>) -> Box<dyn ObjMethods> {
|
||||
let mut split = line.trim().rsplit(' ');
|
||||
if split.clone().count() != 3 {
|
||||
eprintln!("fatal: invalid object(s) ({})", line.trim());
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let name = split.next().unwrap();
|
||||
let hash_path = split.next().unwrap();
|
||||
let obj_type = split.next().unwrap();
|
||||
|
||||
let (dir, res) = hash_path.split_at(2);
|
||||
let mut obj_path = path::objects();
|
||||
obj_path.push(dir);
|
||||
obj_path.push(res);
|
||||
|
||||
let path = match base_dir {
|
||||
Some(dir) => dir.join(name),
|
||||
None => PathBuf::from(name),
|
||||
};
|
||||
|
||||
let root = path::repo_root();
|
||||
let abs_path = root.join(path.clone());
|
||||
|
||||
let obj = Obj {
|
||||
name: String::from(name),
|
||||
obj_path,
|
||||
obj_type: match obj_type {
|
||||
"tree" => ObjType::TREE,
|
||||
"blob" => ObjType::BLOB,
|
||||
_ => ObjType::DEFAULT
|
||||
},
|
||||
file_path: abs_path,
|
||||
relative_file_path: path.clone(),
|
||||
repo_file_path: path,
|
||||
hash_path: String::from(hash_path),
|
||||
};
|
||||
|
||||
match obj.obj_type {
|
||||
ObjType::TREE => Box::new(Tree::new(obj)),
|
||||
ObjType::BLOB => Box::new(Blob::new(obj)),
|
||||
ObjType::DEFAULT => Box::new(Tree::new(obj))
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_head() -> Self {
|
||||
Obj {
|
||||
name: String::new(),
|
||||
obj_path: head::path(),
|
||||
obj_type: ObjType::TREE,
|
||||
file_path: PathBuf::new(),
|
||||
relative_file_path: PathBuf::new(),
|
||||
repo_file_path: PathBuf::new(),
|
||||
hash_path: String::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,103 +1,119 @@
|
||||
use std::fs::File;
|
||||
use std::io;
|
||||
use std::path::PathBuf;
|
||||
use crate::utils::{read, path};
|
||||
use crate::store::head;
|
||||
use crate::store::object::{self, update_dates, parse_path, hash_obj, add_node, create_obj};
|
||||
use crate::utils::into::IntoPathBuf;
|
||||
use crate::store::object::object::Obj;
|
||||
use crate::store::object::update_dates;
|
||||
use crate::store::object::object::ObjMethods;
|
||||
use std::fs::{self, File, OpenOptions};
|
||||
use std::io::{self, BufRead, BufReader, Write};
|
||||
|
||||
pub fn add(path: PathBuf, date: &str, up_parent: bool) -> io::Result<()> {
|
||||
let (line, hash, name) = parse_path(path.clone(), false);
|
||||
pub struct Tree {
|
||||
pub obj: Obj,
|
||||
pub buf_reader: Option<BufReader<File>>,
|
||||
is_head: bool,
|
||||
}
|
||||
|
||||
// add tree reference to parent
|
||||
if path.iter().count() == 1 {
|
||||
head::add_line(line)?;
|
||||
} else {
|
||||
add_node(path.parent().unwrap(), &line)?;
|
||||
|
||||
impl Tree {
|
||||
pub fn new(obj: Obj) -> Self {
|
||||
Tree {
|
||||
obj,
|
||||
buf_reader: None,
|
||||
is_head: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_head() -> Self {
|
||||
Tree {
|
||||
obj: Obj::from_head(),
|
||||
buf_reader: None,
|
||||
is_head: true,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_path<S>(r_path: S) -> Tree where S: IntoPathBuf {
|
||||
Tree {
|
||||
obj: Obj::from_path(r_path.into()),
|
||||
buf_reader: None,
|
||||
is_head: false,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn read(&mut self) {
|
||||
if self.buf_reader.is_none() {
|
||||
if let Ok(file) = File::open(self.get_obj_path()) {
|
||||
self.buf_reader = Some(BufReader::new(file));
|
||||
|
||||
// skip first line (declaration) if is not head
|
||||
if !self.is_head {
|
||||
let mut line = String::new();
|
||||
self.buf_reader.as_mut().unwrap().read_line(&mut line);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn has_changes(&mut self) -> bool {
|
||||
todo!();
|
||||
return true;
|
||||
}
|
||||
|
||||
pub fn next(&mut self) -> Option<Box<dyn ObjMethods>> {
|
||||
self.read();
|
||||
//if let Some(ref mut file) = self.buf_reader {
|
||||
// let mut line = String::new();
|
||||
// match file.read_line(&mut line) {
|
||||
// Ok(0) => Ok(None), // End of file
|
||||
// Ok(_) => Ok(Some(line.trim_end().len())), // Return length of line
|
||||
// Err(e) => Err(e),
|
||||
// }
|
||||
//} else {
|
||||
// Ok(None) // If file is None, return None
|
||||
//}
|
||||
match self.buf_reader {
|
||||
Some(ref mut file) => {
|
||||
let mut line = String::new();
|
||||
match file.read_line(&mut line) {
|
||||
Ok(0) => None,
|
||||
Ok(_) => Some(Obj::from_line(line, Some(self.get_relative_file_path()))),
|
||||
Err(e) => {
|
||||
eprintln!("tree::next: failed to read next line: {}", e);
|
||||
None
|
||||
}
|
||||
}
|
||||
},
|
||||
None => None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn create(&self, date: &str, up_parent: bool) -> io::Result<()> {
|
||||
// add tree reference to parent
|
||||
let _ = self.add_ref_to_parent();
|
||||
|
||||
// create tree object
|
||||
let mut content = name;
|
||||
content.push_str(" ");
|
||||
content.push_str(date);
|
||||
create_obj(hash, &content)?;
|
||||
let content = format!("{} {}", self.get_name(), date);
|
||||
|
||||
// create parent dir if needed
|
||||
let mut obj_path = self.get_obj_path();
|
||||
obj_path.pop();
|
||||
if !obj_path.exists() {
|
||||
fs::create_dir_all(obj_path)?;
|
||||
}
|
||||
|
||||
// open ref file
|
||||
let mut file = OpenOptions::new()
|
||||
.create_new(true)
|
||||
.write(true)
|
||||
.open(self.get_obj_path())?;
|
||||
|
||||
// update date for all parent
|
||||
if up_parent {
|
||||
update_dates(path, date)?;
|
||||
}
|
||||
// if up_parent {
|
||||
// if let Err(err) = update_dates(self.get_relative_file_path(), date) {
|
||||
// eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
|
||||
// }
|
||||
// }
|
||||
|
||||
writeln!(file, "{}", content)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn rm(path: PathBuf) -> io::Result<()> {
|
||||
let (_, lines) = read(path.to_path_buf().to_str().unwrap().to_string()).unwrap();
|
||||
for line in lines {
|
||||
let (ftype, hash, _) = parse_line(line.unwrap());
|
||||
if ftype == String::from("blob") {
|
||||
object::rm(&hash)?;
|
||||
} else {
|
||||
rm_hash(hash)?;
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn rm_hash(hash: String) -> io::Result<()> {
|
||||
let mut obj_p = path::objects();
|
||||
let (dir, res) = hash.split_at(2);
|
||||
obj_p.push(dir);
|
||||
obj_p.push(res);
|
||||
|
||||
match read::read_lines(obj_p) {
|
||||
Ok(mut reader) => {
|
||||
reader.next();
|
||||
for line in reader {
|
||||
let (ftype, hash, _) = parse_line(line.unwrap());
|
||||
if ftype == String::from("blob") {
|
||||
object::rm(&hash)?;
|
||||
} else {
|
||||
rm_hash(hash)?;
|
||||
}
|
||||
}
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("error reading tree: {}", err);
|
||||
},
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn read(tree: String) -> Option<(String, io::Lines<io::BufReader<File>>)> {
|
||||
let mut obj_p = path::objects();
|
||||
|
||||
let (dir, res) = hash_obj(&tree);
|
||||
obj_p.push(dir);
|
||||
obj_p.push(res);
|
||||
|
||||
match read::read_lines(obj_p) {
|
||||
Ok(mut reader) => {
|
||||
let name = match reader.next() {
|
||||
Some(Ok(line)) => line,
|
||||
_ => String::from(""),
|
||||
};
|
||||
Some((name, reader))
|
||||
},
|
||||
Err(err) => {
|
||||
eprintln!("error reading tree: {}", err);
|
||||
None
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn parse_line(line: String) -> (String, String, String) {
|
||||
let mut split = line.rsplit(' ');
|
||||
if split.clone().count() != 3 {
|
||||
eprintln!("fatal: invalid object(s)");
|
||||
std::process::exit(1);
|
||||
}
|
||||
|
||||
let name = split.next().unwrap();
|
||||
let hash = split.next().unwrap();
|
||||
let ftype = split.next().unwrap();
|
||||
(String::from(ftype), String::from(hash), String::from(name))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
11
src/subcommands.rs
Normal file
11
src/subcommands.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
pub mod init;
|
||||
pub mod status;
|
||||
pub mod add;
|
||||
pub mod reset;
|
||||
pub mod clone;
|
||||
pub mod push;
|
||||
pub mod config;
|
||||
pub mod remote_diff;
|
||||
pub mod pull;
|
||||
pub mod remote;
|
||||
pub mod credential;
|
||||
42
src/subcommands/add.rs
Normal file
42
src/subcommands/add.rs
Normal file
@@ -0,0 +1,42 @@
|
||||
use clap::{Arg, ArgMatches, Command, ArgAction};
|
||||
|
||||
use crate::commands;
|
||||
use crate::commands::add::AddArgs;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("add")
|
||||
.arg(
|
||||
Arg::new("files")
|
||||
.required_unless_present("all")
|
||||
.conflicts_with("all")
|
||||
.num_args(1..)
|
||||
.value_name("FILE")
|
||||
.help("Files to add"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("force")
|
||||
.short('f')
|
||||
.long("force")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Allow adding otherwise ignored files."),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("all")
|
||||
.short('A')
|
||||
.long("all")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("This adds, modifies, and removes index entries to match the working tree"),
|
||||
)
|
||||
.about("Add changes to the index")
|
||||
}
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
commands::add::add(AddArgs {
|
||||
files: match args.get_many::<String>("files") {
|
||||
None => vec![],
|
||||
Some(vals) => vals.map(|s| s.to_string()).collect(),
|
||||
},
|
||||
force: *args.get_one::<bool>("force").unwrap(),
|
||||
all: *args.get_one::<bool>("all").unwrap(),
|
||||
});
|
||||
}
|
||||
52
src/subcommands/clone.rs
Normal file
52
src/subcommands/clone.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use clap::{Arg, Command, ArgMatches};
|
||||
// use textwrap::{fill, Options};
|
||||
|
||||
use crate::commands::clone::CloneArgs;
|
||||
use crate::global;
|
||||
use crate::commands;
|
||||
|
||||
// fn sized_str<'a>(content: &'a str) -> &'a str {
|
||||
// fill(content, Options::new(70).width).as_str();
|
||||
// "ok"
|
||||
// }
|
||||
|
||||
pub fn create() -> Command {
|
||||
// let remote_desc = sized_str(&format!("The repository to clone from. See the NEXTSYNC URLS section below for more information on specifying repositories."));
|
||||
// let depth_desc = sized_str(&format!("Depth of the recursive fetch of object properties. This value should be lower when there are a lot of files per directory and higher when there are a lot of subdirectories with fewer files. (Default: {})", clone::DEPTH));
|
||||
Command::new("clone")
|
||||
.arg(
|
||||
Arg::new("remote")
|
||||
.required(true)
|
||||
.num_args(1)
|
||||
.value_name("REMOTE")
|
||||
//.help(_desc)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("depth")
|
||||
.short('d')
|
||||
.long("depth")
|
||||
.required(false)
|
||||
.num_args(1)
|
||||
//.help(&depth_desc)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("directory")
|
||||
.required(false)
|
||||
.num_args(1)
|
||||
.value_name("DIRECTORY")
|
||||
)
|
||||
.about("Clone a repository into a new directory")
|
||||
.after_help("NEXTSYNC URLS\nThe following syntaxes may be used:\n\t- user@host.xz/path/to/repo\n\t- http[s]://host.xz/apps/files/?dir=/path/to/repo&fileid=111111\n\t- [http[s]://]host.xz/remote.php/dav/files/user/path/to/repo\n")
|
||||
}
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
if let Some(val) = args.get_one::<String>("directory") {
|
||||
global::global::set_dir_path(String::from(val.to_string()));
|
||||
}
|
||||
if let Some(remote) = args.get_one::<String>("remote") {
|
||||
commands::clone::clone(CloneArgs {
|
||||
remote: remote.to_string(),
|
||||
depth: args.get_one::<String>("depth").cloned(),
|
||||
});
|
||||
}
|
||||
}
|
||||
48
src/subcommands/config.rs
Normal file
48
src/subcommands/config.rs
Normal file
@@ -0,0 +1,48 @@
|
||||
use clap::{Arg, Command, ArgMatches};
|
||||
use crate::commands::config::ConfigSetArgs;
|
||||
|
||||
use crate::commands;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("config")
|
||||
.about("Get and set repository or global options")
|
||||
.subcommand(
|
||||
Command::new("get")
|
||||
.about("Get the value of a configuration variable")
|
||||
.arg(
|
||||
Arg::new("name")
|
||||
.help("The name of the configuration variable")
|
||||
.required(true)
|
||||
.index(1)
|
||||
)
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("set")
|
||||
.about("Set a configuration variable")
|
||||
.arg(
|
||||
Arg::new("name")
|
||||
.help("The name of the configuration variable")
|
||||
.required(true)
|
||||
.index(1)
|
||||
)
|
||||
.arg(
|
||||
Arg::new("value")
|
||||
.help("The value to set")
|
||||
.required(true)
|
||||
.index(2)
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
|
||||
match args.subcommand() {
|
||||
Some(("set", set_matches)) => {
|
||||
commands::config::config_set(ConfigSetArgs {
|
||||
name: set_matches.get_one::<String>("name").unwrap().to_string(),
|
||||
value: set_matches.get_one::<String>("value").unwrap().to_string(),
|
||||
});
|
||||
}
|
||||
_ => println!("Invalid or missing subcommand for 'config'"),
|
||||
}
|
||||
}
|
||||
39
src/subcommands/credential.rs
Normal file
39
src/subcommands/credential.rs
Normal file
@@ -0,0 +1,39 @@
|
||||
use clap::{Arg, Command, ArgMatches};
|
||||
|
||||
use crate::commands;
|
||||
use crate::commands::credential::CredentialArgs;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("credential")
|
||||
.about("Manage set of credentials")
|
||||
.subcommand(
|
||||
Command::new("add")
|
||||
.arg(
|
||||
Arg::new("username")
|
||||
.required(true)
|
||||
.num_args(1)
|
||||
.value_name("NAME")
|
||||
.help("The username used to connect to nextcloud"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("password")
|
||||
.required(false)
|
||||
.num_args(1)
|
||||
.value_name("PASSWORD")
|
||||
.help("The passowd used to connect to nextcloud (optional)"),
|
||||
)
|
||||
.about("Add a new set of credential")
|
||||
)
|
||||
}
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
match args.subcommand() {
|
||||
Some(("add", add_matches)) => {
|
||||
commands::credential::credential_add(CredentialArgs {
|
||||
username: add_matches.get_one::<String>("username").unwrap().to_string(),
|
||||
password: add_matches.get_one::<String>("password").cloned(),
|
||||
});
|
||||
}
|
||||
_ => println!("Invalid or missing subcommand for 'credential'"),
|
||||
}
|
||||
}
|
||||
23
src/subcommands/init.rs
Normal file
23
src/subcommands/init.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use clap::{Arg, Command, ArgMatches};
|
||||
|
||||
use crate::global;
|
||||
use crate::commands;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("init")
|
||||
.arg(
|
||||
Arg::new("directory")
|
||||
.required(false)
|
||||
.num_args(1)
|
||||
.value_name("DIRECTORY")
|
||||
)
|
||||
.about("Create an empty Nextsync repository")
|
||||
// Create an empty nextsync repository or reinitialize an existing one
|
||||
}
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
if let Some(val) = args.get_one::<String>("directory") {
|
||||
global::global::set_dir_path(val.to_string());
|
||||
}
|
||||
commands::init::init();
|
||||
}
|
||||
23
src/subcommands/pull.rs
Normal file
23
src/subcommands/pull.rs
Normal file
@@ -0,0 +1,23 @@
|
||||
use clap::{Arg, Command, ArgMatches};
|
||||
|
||||
use crate::global;
|
||||
use crate::commands;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("pull")
|
||||
.arg(
|
||||
Arg::new("path")
|
||||
.required(false)
|
||||
.num_args(1)
|
||||
.value_name("PATH")
|
||||
.help("The path to pull."),
|
||||
)
|
||||
.about("Fetch and integrate changes from the nextcloud server.")
|
||||
}
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
if let Some(val) = args.get_one::<String>("path") {
|
||||
global::global::set_dir_path(val.to_string());
|
||||
}
|
||||
commands::pull::pull();
|
||||
}
|
||||
6
src/subcommands/push.rs
Normal file
6
src/subcommands/push.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
use clap::Command;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("push")
|
||||
.about("Push changes on nextcloud")
|
||||
}
|
||||
46
src/subcommands/remote.rs
Normal file
46
src/subcommands/remote.rs
Normal file
@@ -0,0 +1,46 @@
|
||||
use clap::{Arg, Command, ArgMatches, ArgAction};
|
||||
|
||||
use crate::commands;
|
||||
use crate::commands::remote::RemoteArgs;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("remote")
|
||||
.about("Manage set of tracked repositories")
|
||||
.subcommand(
|
||||
Command::new("add")
|
||||
.arg(
|
||||
Arg::new("name")
|
||||
.required(true)
|
||||
.index(1)
|
||||
.help("The name of the remote"),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("url")
|
||||
.required(true)
|
||||
.index(2)
|
||||
.help("The url of the remote"),
|
||||
)
|
||||
.about("Add a new remote to this repository")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("verbose")
|
||||
.short('v')
|
||||
.long("verbose")
|
||||
.action(ArgAction::SetTrue)
|
||||
.help("Be a little more verbose and show remote url after name.")
|
||||
)
|
||||
}
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
match args.subcommand() {
|
||||
Some(("add", add_matches)) => {
|
||||
commands::remote::remote_add(RemoteArgs {
|
||||
name: add_matches.get_one::<String>("name").unwrap().to_string(),
|
||||
url: add_matches.get_one::<String>("url").unwrap().to_string(),
|
||||
});
|
||||
}
|
||||
_ => {
|
||||
commands::remote::remote_list(*args.get_one::<bool>("verbose").unwrap());
|
||||
}
|
||||
}
|
||||
}
|
||||
24
src/subcommands/remote_diff.rs
Normal file
24
src/subcommands/remote_diff.rs
Normal file
@@ -0,0 +1,24 @@
|
||||
use clap::{Arg, Command, ArgMatches};
|
||||
|
||||
use crate::global;
|
||||
use crate::commands;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("remote-diff")
|
||||
.arg(
|
||||
Arg::new("path")
|
||||
.required(false)
|
||||
.num_args(1)
|
||||
.value_name("PATH")
|
||||
.help("The path to pull."),
|
||||
)
|
||||
.about("Fetch changes from the nextcloud server.")
|
||||
}
|
||||
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
if let Some(val) = args.get_one::<String>("path") {
|
||||
global::global::set_dir_path(val.to_string());
|
||||
}
|
||||
commands::remote_diff::remote_diff();
|
||||
}
|
||||
6
src/subcommands/reset.rs
Normal file
6
src/subcommands/reset.rs
Normal file
@@ -0,0 +1,6 @@
|
||||
use clap::Command;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("reset")
|
||||
.about("Clear the index")
|
||||
}
|
||||
30
src/subcommands/status.rs
Normal file
30
src/subcommands/status.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use clap::{Arg, Command, ArgMatches};
|
||||
|
||||
use crate::global;
|
||||
use crate::commands;
|
||||
use crate::commands::status::StatusArgs;
|
||||
|
||||
pub fn create() -> Command {
|
||||
Command::new("status")
|
||||
.arg(
|
||||
Arg::new("directory")
|
||||
.num_args(1)
|
||||
.value_name("DIRECTORY")
|
||||
)
|
||||
.arg(
|
||||
Arg::new("nostyle")
|
||||
.long("nostyle")
|
||||
.help("Status with minium information and style"),
|
||||
)
|
||||
.about("Show the working tree status")
|
||||
}
|
||||
|
||||
pub fn handler(args: &ArgMatches) {
|
||||
if let Some(val) = args.get_one::<String>("directory") {
|
||||
global::global::set_dir_path(val.to_string());
|
||||
}
|
||||
|
||||
commands::status::status(StatusArgs {
|
||||
nostyle: args.contains_id("nostyle"),
|
||||
});
|
||||
}
|
||||
@@ -4,3 +4,4 @@ pub mod nextsyncignore;
|
||||
pub mod api;
|
||||
pub mod time;
|
||||
pub mod remote;
|
||||
pub mod into;
|
||||
|
||||
@@ -1,3 +1,5 @@
|
||||
use crate::commands::{clone::get_url_props, config};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ApiProps {
|
||||
pub host: String, // nextcloud.example.com
|
||||
@@ -15,11 +17,30 @@ impl Clone for ApiProps {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_api_props() -> ApiProps {
|
||||
let remote = match config::get_remote("origin") {
|
||||
Some(r) => r,
|
||||
None => {
|
||||
eprintln!("fatal: unable to find a remote");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
let (host, username, root) = get_url_props(&remote);
|
||||
ApiProps {
|
||||
host,
|
||||
username: username.unwrap().to_owned(),
|
||||
root: root.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_relative_s(p: String, api_props: &ApiProps) -> String {
|
||||
let mut final_p = p.clone();
|
||||
final_p = final_p.strip_prefix("/remote.php/dav/files/").unwrap().to_string();
|
||||
final_p = final_p.strip_prefix(&api_props.username).unwrap().to_string();
|
||||
final_p = final_p.strip_prefix(&api_props.root).unwrap().to_string();
|
||||
if final_p.starts_with("/") {
|
||||
final_p = final_p.strip_prefix("/").unwrap().to_string();
|
||||
}
|
||||
final_p
|
||||
}
|
||||
|
||||
30
src/utils/into.rs
Normal file
30
src/utils/into.rs
Normal file
@@ -0,0 +1,30 @@
|
||||
use std::path::{PathBuf, Path};
|
||||
|
||||
pub trait IntoPathBuf {
|
||||
fn into(self) -> PathBuf;
|
||||
}
|
||||
|
||||
impl IntoPathBuf for PathBuf {
|
||||
fn into(self) -> PathBuf {
|
||||
self
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPathBuf for &Path {
|
||||
fn into(self) -> PathBuf {
|
||||
PathBuf::from(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPathBuf for String {
|
||||
fn into(self) -> PathBuf {
|
||||
PathBuf::from(self)
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoPathBuf for &str {
|
||||
fn into(self) -> PathBuf {
|
||||
PathBuf::from(self)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -23,6 +23,13 @@ pub fn read_lines() -> Result<Vec<String>, ()> {
|
||||
Ok(vec![])
|
||||
}
|
||||
|
||||
pub fn get_rules() -> Vec<String> {
|
||||
match read_lines() {
|
||||
Ok(r) => r,
|
||||
Err(_) => vec![],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn _ignore_files(files: &mut Vec<String>) -> (bool, Vec<String>) {
|
||||
let mut ignored_f = vec![];
|
||||
if let Ok(lines) = read_lines() {
|
||||
@@ -80,6 +87,7 @@ pub fn ignore_file(path: &String, lines: Vec<String>, ignored_f: &mut Vec<String
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use std::io::Cursor;
|
||||
|
||||
#[test]
|
||||
fn test_ignore_files() {
|
||||
|
||||
@@ -1,8 +1,61 @@
|
||||
use std::env;
|
||||
use std::fs::canonicalize;
|
||||
use std::path::{PathBuf, Path};
|
||||
use std::path::{PathBuf, Path, Component};
|
||||
|
||||
use crate::global::global::DIR_PATH;
|
||||
|
||||
/// Improve the path to try remove and solve .. token.
|
||||
/// Taken from https://stackoverflow.com/questions/68231306/stdfscanonicalize-for-files-that-dont-exist
|
||||
///
|
||||
/// This assumes that `a/b/../c` is `a/c` which might be different from
|
||||
/// what the OS would have chosen when b is a link. This is OK
|
||||
/// for broot verb arguments but can't be generally used elsewhere
|
||||
///
|
||||
/// This function ensures a given path ending with '/' still
|
||||
/// ends with '/' after normalization.
|
||||
pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
|
||||
let ends_with_slash = path.as_ref()
|
||||
.to_str()
|
||||
.map_or(false, |s| s.ends_with('/'));
|
||||
let mut normalized = PathBuf::new();
|
||||
for component in path.as_ref().components() {
|
||||
match &component {
|
||||
Component::ParentDir => {
|
||||
if !normalized.pop() {
|
||||
normalized.push(component);
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
normalized.push(component);
|
||||
}
|
||||
}
|
||||
}
|
||||
if ends_with_slash {
|
||||
normalized.push("");
|
||||
}
|
||||
normalized
|
||||
}
|
||||
|
||||
pub fn normalize_relative(file: &str) -> Result<String, String> {
|
||||
let current = match current() {
|
||||
Some(p) => p,
|
||||
None => {
|
||||
return Err("cannot find current location".to_owned());
|
||||
}
|
||||
};
|
||||
|
||||
let p = {
|
||||
let tmp_p = current.join(PathBuf::from(file));
|
||||
normalize_path(tmp_p)
|
||||
};
|
||||
|
||||
let relative_p = match p.strip_prefix(repo_root()) {
|
||||
Ok(p) => p,
|
||||
Err(_) => return Err("is not in a nextsync repo or doesn't exist".to_owned()),
|
||||
};
|
||||
Ok(relative_p.to_str().unwrap().to_owned())
|
||||
}
|
||||
|
||||
pub fn current() -> Option<PathBuf> {
|
||||
let d = DIR_PATH.lock().unwrap();
|
||||
|
||||
@@ -51,12 +104,23 @@ pub fn repo_root() -> PathBuf {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn is_nextsync_config(path: PathBuf) -> bool {
|
||||
path.ends_with(".nextsync") || path.starts_with(".nextsync")
|
||||
}
|
||||
|
||||
pub fn nextsync() -> PathBuf {
|
||||
let mut path = repo_root();
|
||||
path.push(".nextsync");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn config() -> PathBuf {
|
||||
let mut path = repo_root();
|
||||
path.push(".nextsync");
|
||||
path.push("config");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn objects() -> PathBuf {
|
||||
let mut path = repo_root();
|
||||
path.push(".nextsync");
|
||||
@@ -64,6 +128,13 @@ pub fn objects() -> PathBuf {
|
||||
path
|
||||
}
|
||||
|
||||
pub fn refs() -> PathBuf {
|
||||
let mut path = repo_root();
|
||||
path.push(".nextsync");
|
||||
path.push("refs");
|
||||
path
|
||||
}
|
||||
|
||||
pub fn nextsyncignore() -> Option<PathBuf> {
|
||||
let mut path = repo_root();
|
||||
path.push(".nextsyncignore");
|
||||
@@ -73,3 +144,11 @@ pub fn nextsyncignore() -> Option<PathBuf> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub fn path_buf_to_string(p: PathBuf) -> String {
|
||||
if let Some(str) = p.to_str() {
|
||||
str.to_string()
|
||||
} else {
|
||||
String::new()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
use crate::services::{req_props::ObjProps, api::ApiError};
|
||||
use std::path::PathBuf;
|
||||
use crate::{services::{req_props::ObjProps, api::ApiError}, store::object::{blob::Blob, Object}, commands::status::State};
|
||||
use std::collections::HashMap;
|
||||
|
||||
use super::{path::{path_buf_to_string, self}, read};
|
||||
|
||||
pub struct EnumerateOptions {
|
||||
pub depth: Option<String>,
|
||||
@@ -7,20 +11,26 @@ pub struct EnumerateOptions {
|
||||
|
||||
pub fn enumerate_remote(
|
||||
req: impl Fn(&str) -> Result<Vec<ObjProps>, ApiError>,
|
||||
should_skip: &dyn Fn(ObjProps) -> bool,
|
||||
should_skip: Option<&dyn Fn(ObjProps) -> bool>,
|
||||
options: EnumerateOptions
|
||||
) -> (Vec<ObjProps>, Vec<ObjProps>) {
|
||||
|
||||
let mut folders: Vec<ObjProps> = vec![ObjProps::new()];
|
||||
let mut all_folders: Vec<ObjProps> = vec![];
|
||||
let mut deleted: Vec<PathBuf> = vec![];
|
||||
let mut files: Vec<ObjProps> = vec![];
|
||||
let mut objs_hashmap: HashMap<String, Vec<String>> = HashMap::new();
|
||||
|
||||
objs_hashmap.insert(
|
||||
options.relative_s.clone().unwrap_or(String::new()),
|
||||
Vec::new());
|
||||
|
||||
while folders.len() > 0 {
|
||||
let folder = folders.pop().unwrap();
|
||||
|
||||
let relative_s = match folder.relative_s {
|
||||
Some(relative_s) => relative_s,
|
||||
None => options.relative_s.clone().unwrap_or(String::from("")),
|
||||
None => options.relative_s.clone().unwrap_or(String::new())
|
||||
};
|
||||
|
||||
// request folder content
|
||||
@@ -44,14 +54,82 @@ pub fn enumerate_remote(
|
||||
};
|
||||
|
||||
// separate folders and files in response
|
||||
let mut iter = objs.iter();
|
||||
let d = options.depth.clone()
|
||||
.unwrap_or("0".to_owned())
|
||||
.parse::<u16>()
|
||||
.unwrap();
|
||||
|
||||
// first element is not used as it is the fetched folder
|
||||
if let Some(should_skip_fct) = should_skip.clone() {
|
||||
iter_with_skip_fct(
|
||||
objs,
|
||||
d,
|
||||
&mut files,
|
||||
&mut folders,
|
||||
should_skip_fct,
|
||||
&mut objs_hashmap,
|
||||
&mut all_folders);
|
||||
|
||||
// check for deletion only when folder are not empty
|
||||
// as the folder's content may not have been fetched yet
|
||||
for (key, children) in objs_hashmap.clone() {
|
||||
if children.len() != 0 {
|
||||
get_deleted(key.clone(), children, &mut deleted);
|
||||
objs_hashmap.remove(&key);
|
||||
}
|
||||
}
|
||||
} else {
|
||||
iter_without_skip_fct(
|
||||
objs,
|
||||
d,
|
||||
&mut files,
|
||||
&mut folders,
|
||||
&mut all_folders);
|
||||
}
|
||||
}
|
||||
|
||||
// go through all folders not checked for deletion before
|
||||
// as they were empty
|
||||
if let Some(_) = should_skip.clone() {
|
||||
for (key, children) in objs_hashmap.clone() {
|
||||
get_deleted(key.clone(), children, &mut deleted);
|
||||
objs_hashmap.remove(&key);
|
||||
}
|
||||
}
|
||||
|
||||
(all_folders, files)
|
||||
}
|
||||
|
||||
fn calc_depth(obj: &ObjProps) -> u16 {
|
||||
let path = obj.relative_s.clone().unwrap_or(String::new());
|
||||
path.split("/").count() as u16
|
||||
}
|
||||
|
||||
fn iter_with_skip_fct(
|
||||
objs: Vec<ObjProps>,
|
||||
d: u16,
|
||||
files: &mut Vec<ObjProps>,
|
||||
folders: &mut Vec<ObjProps>,
|
||||
should_skip: &dyn Fn(ObjProps) -> bool,
|
||||
objs_hashmap: &mut HashMap<String, Vec<String>>,
|
||||
all_folders: &mut Vec<ObjProps>) {
|
||||
|
||||
let mut iter = objs.iter();
|
||||
let default_depth = calc_depth(iter.next().unwrap());
|
||||
let d = options.depth.clone().unwrap_or("0".to_owned()).parse::<u16>().unwrap();
|
||||
let mut skip_depth = 0;
|
||||
|
||||
for object in iter {
|
||||
if object.is_dir() {
|
||||
let current_depth = calc_depth(object);
|
||||
|
||||
if object.is_dir() {
|
||||
// add folder to parent folder only if exists
|
||||
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
|
||||
r_path.pop();
|
||||
let r_ps = path_buf_to_string(r_path);
|
||||
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
|
||||
values.push(object.relative_s.clone().unwrap());
|
||||
}
|
||||
|
||||
// skip children of skiped folder
|
||||
if skip_depth != 0 && skip_depth < current_depth {
|
||||
continue;
|
||||
@@ -61,6 +139,12 @@ pub fn enumerate_remote(
|
||||
if should_skip {
|
||||
skip_depth = current_depth;
|
||||
} else {
|
||||
// if this folder is not skipped then we initialised its vector
|
||||
let r_ps_dir = object.relative_s.clone().unwrap();
|
||||
let mut r_ps_key = r_ps_dir.chars();
|
||||
r_ps_key.next_back();
|
||||
objs_hashmap.insert(r_ps_key.as_str().to_owned(), Vec::new());
|
||||
|
||||
skip_depth = 0;
|
||||
all_folders.push(object.clone());
|
||||
}
|
||||
@@ -70,7 +154,14 @@ pub fn enumerate_remote(
|
||||
folders.push(object.clone());
|
||||
}
|
||||
} else {
|
||||
let current_depth = calc_depth(object);
|
||||
// add file to parent folder only if exists
|
||||
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
|
||||
r_path.pop();
|
||||
let r_ps = path_buf_to_string(r_path);
|
||||
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
|
||||
values.push(object.relative_s.clone().unwrap());
|
||||
}
|
||||
|
||||
// skip children of skiped folder
|
||||
if skip_depth != 0 && skip_depth < current_depth {
|
||||
continue;
|
||||
@@ -82,12 +173,86 @@ pub fn enumerate_remote(
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn iter_without_skip_fct(
|
||||
objs: Vec<ObjProps>,
|
||||
d: u16,
|
||||
files: &mut Vec<ObjProps>,
|
||||
folders: &mut Vec<ObjProps>,
|
||||
all_folders: &mut Vec<ObjProps>) {
|
||||
|
||||
let mut iter = objs.iter();
|
||||
let default_depth = calc_depth(iter.next().unwrap());
|
||||
|
||||
for object in iter {
|
||||
if object.is_dir() {
|
||||
// should get content of this folder if it is not already in this reponse
|
||||
let current_depth = calc_depth(object);
|
||||
if current_depth - default_depth == d {
|
||||
folders.push(object.clone());
|
||||
}
|
||||
all_folders.push(object.clone());
|
||||
} else {
|
||||
files.push(object.clone());
|
||||
}
|
||||
}
|
||||
|
||||
(all_folders, files)
|
||||
}
|
||||
|
||||
fn calc_depth(obj: &ObjProps) -> u16 {
|
||||
obj.relative_s.clone().unwrap_or(String::from("")).split("/").count() as u16
|
||||
fn get_non_new_local_element(iter: &mut dyn Iterator<Item = &PathBuf>) -> Option<PathBuf> {
|
||||
let mut el = iter.next();
|
||||
while !el.is_none() && {
|
||||
if el.unwrap().is_dir() {
|
||||
// ignore newly created directory (not sync)
|
||||
!Object::new(el.unwrap().clone().to_str().unwrap()).exists()
|
||||
} else {
|
||||
// ignore newly created file (not sync)
|
||||
Blob::from_path(el.unwrap().clone()).status(&mut None) == State::New
|
||||
}
|
||||
} {
|
||||
el = iter.next();
|
||||
}
|
||||
match el {
|
||||
Some(e) => Some(e.to_owned()),
|
||||
None => None
|
||||
}
|
||||
}
|
||||
|
||||
fn get_deleted(source: String, children: Vec<String>, deleted: &mut Vec<PathBuf>) {
|
||||
let root = path::repo_root();
|
||||
let abs_p = root.join(PathBuf::from(source.clone()));
|
||||
|
||||
let folder_read = read::read_folder(abs_p.clone());
|
||||
if let Ok(mut local_objs) = folder_read {
|
||||
// set path to be ref one not abs
|
||||
local_objs.iter_mut().for_each(|e| {
|
||||
*e = e.strip_prefix(path_buf_to_string(root.clone())).unwrap().to_path_buf();
|
||||
});
|
||||
|
||||
let mut iter = local_objs.iter();
|
||||
let mut local_element = get_non_new_local_element(&mut iter);
|
||||
|
||||
while let Some(local) = local_element {
|
||||
if let None = children.iter().position(|child| {
|
||||
let child_compared = {
|
||||
// remove traling / of directory
|
||||
if child.ends_with("/") {
|
||||
let t = child.clone();
|
||||
let mut ts = t.chars();
|
||||
ts.next_back();
|
||||
ts.as_str().to_owned()
|
||||
} else {
|
||||
child.clone()
|
||||
}
|
||||
};
|
||||
|
||||
child_compared == path_buf_to_string(local.clone())
|
||||
}) {
|
||||
deleted.push(local.clone());
|
||||
}
|
||||
local_element = get_non_new_local_element(&mut iter);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
116
tests/add.rs
Normal file
116
tests/add.rs
Normal file
@@ -0,0 +1,116 @@
|
||||
use std::str;
|
||||
|
||||
mod utils;
|
||||
use utils::{utils::*, client::ClientTest};
|
||||
|
||||
fn line_should_contains(lines: &Vec<String>, nb: usize, str: &str) {
|
||||
|
||||
if lines[nb].find(str).is_none()
|
||||
{
|
||||
eprintln!("'{}' not found in '{}'", str, lines[nb]);
|
||||
dbg!(lines);
|
||||
}
|
||||
|
||||
assert!(lines[nb].find(str).is_some());
|
||||
}
|
||||
|
||||
fn lines_should_not_contains(lines: Vec<String>, str: &str) {
|
||||
|
||||
for line in lines {
|
||||
if line.find("Changes not staged for push").is_some() {
|
||||
return;
|
||||
}
|
||||
|
||||
if line.find(str).is_some() {
|
||||
eprintln!("'{}' found in '{}'", str, line);
|
||||
}
|
||||
assert!(line.find(str).is_none());
|
||||
}
|
||||
}
|
||||
|
||||
fn collect_status_lines(client: &mut ClientTest) -> Vec<String> {
|
||||
let out = client.run_cmd("status");
|
||||
|
||||
str::from_utf8(&out.stdout)
|
||||
.unwrap()
|
||||
.split("\n")
|
||||
.map(|s| s.to_owned())
|
||||
.collect()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod add_tests {
|
||||
use crate::utils::{server::ServerTest, status_utils::status_should_be_empty};
|
||||
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn simple_add() {
|
||||
let id = get_random_test_id();
|
||||
let mut client = ClientTest::new(id).init();
|
||||
|
||||
let _ = client.add_file("file1", "foo");
|
||||
client.run_cmd_ok("add file1");
|
||||
|
||||
let lines = collect_status_lines(&mut client);
|
||||
|
||||
// test
|
||||
line_should_contains(&lines, 2, "file1");
|
||||
|
||||
client.clean();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_config_file() {
|
||||
let id = get_random_test_id();
|
||||
let mut client = ClientTest::new(id).init();
|
||||
|
||||
let _ = client.add_file("file1", "foo");
|
||||
client.run_cmd_ok("add .nextsync -f");
|
||||
|
||||
let lines = collect_status_lines(&mut client);
|
||||
|
||||
// test
|
||||
lines_should_not_contains(lines, ".nextsync");
|
||||
|
||||
client.clean();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_dir_implicit() {
|
||||
let id = get_random_test_id();
|
||||
let mut client = ClientTest::new(id).init();
|
||||
|
||||
let _ = client.add_dir("dir");
|
||||
let _ = client.add_file("dir/file1", "foo");
|
||||
|
||||
// adding the file should add the dir
|
||||
client.run_cmd_ok("add dir/file1");
|
||||
|
||||
let lines = collect_status_lines(&mut client);
|
||||
|
||||
// tests
|
||||
line_should_contains(&lines, 2, "dir");
|
||||
line_should_contains(&lines, 3, "dir/file1");
|
||||
|
||||
client.clean();
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn add_file_no_changes() {
|
||||
// add a file push it and add it again
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
let _ = client.add_file("file1", "foo");
|
||||
client.run_cmd_ok("add file1");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
status_should_be_empty(&mut client);
|
||||
|
||||
client.run_cmd_ok("add file1");
|
||||
status_should_be_empty(&mut client);
|
||||
|
||||
clean_test(client, &mut server)
|
||||
}
|
||||
|
||||
}
|
||||
36
tests/pull.rs
Normal file
36
tests/pull.rs
Normal file
@@ -0,0 +1,36 @@
|
||||
mod utils;
|
||||
use utils::{utils::*};
|
||||
|
||||
|
||||
#[cfg(test)]
|
||||
mod pull_tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn simple_pull() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
let _ = server.add_file("file1", "foo");
|
||||
client.run_cmd_ok("pull");
|
||||
|
||||
// tests
|
||||
assert!(client.has_file("file1", "foo"));
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn simple_pull_directory() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
let _ = server.add_dir("dir");
|
||||
let _ = server.add_file("dir/file1", "foo");
|
||||
|
||||
client.run_cmd_ok("pull");
|
||||
|
||||
// tests
|
||||
assert!(client.has_file("dir/file1", "foo"));
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
}
|
||||
168
tests/push.rs
Normal file
168
tests/push.rs
Normal file
@@ -0,0 +1,168 @@
|
||||
mod utils;
|
||||
use utils::{utils::*, status_utils::*};
|
||||
|
||||
#[cfg(test)]
|
||||
mod push_tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn simple_push() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
let _ = client.add_file("file1", "foo");
|
||||
client.run_cmd_ok("add file1");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_file("file1", "foo"));
|
||||
let (staged, not_staged) = client.get_status();
|
||||
lines_should_not_contains(staged, "file1");
|
||||
lines_should_not_contains(not_staged, "file1");
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn push_update() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
// init content of file1
|
||||
let _ = client.add_file("file1", "foo");
|
||||
client.run_cmd_ok("add file1");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_file("file1", "foo"));
|
||||
|
||||
let (staged, not_staged) = client.get_status();
|
||||
lines_should_not_contains(staged, "file1");
|
||||
lines_should_not_contains(not_staged, "file1");
|
||||
|
||||
// change content of file1
|
||||
let _ = client.add_file("file1", "bar");
|
||||
client.run_cmd_ok("add file1");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_file("file1", "bar"));
|
||||
let (staged, not_staged) = client.get_status();
|
||||
lines_should_not_contains(staged, "file1");
|
||||
lines_should_not_contains(not_staged, "file1");
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn push_dir_explicit() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
let _ = client.add_dir("dir");
|
||||
let _ = client.add_file("dir/file2", "bar");
|
||||
|
||||
// push dir and file2
|
||||
client.run_cmd_ok("add dir");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_file("dir/file2", "bar"));
|
||||
let (staged, not_staged) = client.get_status();
|
||||
lines_should_not_contains(staged.clone(), "file2");
|
||||
lines_should_not_contains(staged, "foo");
|
||||
lines_should_not_contains(not_staged.clone(), "file2");
|
||||
lines_should_not_contains(not_staged, "foo");
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn push_dir_implicit() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
let _ = client.add_dir("dir");
|
||||
let _ = client.add_file("dir/file2", "bar");
|
||||
|
||||
// push dir and file2
|
||||
client.run_cmd_ok("add dir/file2");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_file("dir/file2", "bar"));
|
||||
let (staged, not_staged) = client.get_status();
|
||||
lines_should_not_contains(staged.clone(), "file2");
|
||||
lines_should_not_contains(staged, "foo");
|
||||
lines_should_not_contains(not_staged.clone(), "file2");
|
||||
lines_should_not_contains(not_staged, "foo");
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn push_all() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
let _ = client.add_file("file1", "foo");
|
||||
let _ = client.add_dir("dir");
|
||||
let _ = client.add_file("dir/file2", "bar");
|
||||
|
||||
// push dir and file2
|
||||
client.run_cmd_ok("add *");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_file("file1", "foo"));
|
||||
assert!(server.has_file("dir/file2", "bar"));
|
||||
let (staged, not_staged) = client.get_status();
|
||||
assert!(staged.len() == 0);
|
||||
assert!(not_staged.len() == 0);
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn push_file_deletion() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
let _ = client.add_file("file1", "foo");
|
||||
|
||||
// push file1
|
||||
client.run_cmd_ok("add file1");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_file("file1", "foo"));
|
||||
status_should_be_empty(&mut client);
|
||||
|
||||
// remove it
|
||||
let _ = client.remove_file("file1");
|
||||
client.run_cmd_ok("add file1");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_not_file("file1"));
|
||||
status_should_be_empty(&mut client);
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn push_dir_deletion() {
|
||||
let (mut client, mut server) = init_test();
|
||||
|
||||
// push dir and file2
|
||||
let _ = client.add_dir("dir");
|
||||
let _ = client.add_file("dir/file2", "bar");
|
||||
client.run_cmd_ok("add dir");
|
||||
client.run_cmd_ok("push");
|
||||
|
||||
// tests
|
||||
assert!(server.has_file("dir/file2", "bar"));
|
||||
|
||||
// push deletion
|
||||
let _ = client.remove_dir("dir");
|
||||
client.run_cmd_ok("add dir");
|
||||
client.run_cmd_ok("push");
|
||||
assert!(server.has_not_dir("dir"));
|
||||
|
||||
clean_test(client, &mut server);
|
||||
}
|
||||
}
|
||||
14
tests/utils.rs
Normal file
14
tests/utils.rs
Normal file
@@ -0,0 +1,14 @@
|
||||
#[path = "utils/server.rs"]
|
||||
pub mod server;
|
||||
|
||||
#[path = "utils/client.rs"]
|
||||
pub mod client;
|
||||
|
||||
#[path = "utils/utils.rs"]
|
||||
pub mod utils;
|
||||
|
||||
#[path = "utils/status_utils.rs"]
|
||||
pub mod status_utils;
|
||||
|
||||
#[path = "utils/files_utils.rs"]
|
||||
pub mod files_utils;
|
||||
162
tests/utils/client.rs
Normal file
162
tests/utils/client.rs
Normal file
@@ -0,0 +1,162 @@
|
||||
use std::str;
|
||||
use std::process::{Command, Output};
|
||||
use std::fs::{self, File};
|
||||
use std::io::Write;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::files_utils::has_files;
|
||||
|
||||
#[cfg(test)]
|
||||
pub struct ClientTest {
|
||||
user: String, // the nextcloud user
|
||||
volume: String, // temp dir for the test
|
||||
pub test_id: String, // name of the test (e.g nextsync_rand)
|
||||
exe_path: PathBuf, // absolute path of nextsync executable
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl ClientTest {
|
||||
pub fn new(id: String) -> Self {
|
||||
// create a directory in /tmp with the given id
|
||||
let mut vol = String::from("/tmp/");
|
||||
vol.push_str(&id);
|
||||
let _ = fs::create_dir(vol.clone());
|
||||
|
||||
// get nextsync path
|
||||
let mut exe_path = env::current_dir().unwrap();
|
||||
exe_path = exe_path.join("target/debug/nextsync");
|
||||
|
||||
// build the client
|
||||
ClientTest {
|
||||
user: String::from("admin"),
|
||||
volume: vol,
|
||||
test_id: id,
|
||||
exe_path
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(mut self) -> Self {
|
||||
self.run_cmd_ok("init");
|
||||
|
||||
// set remote url
|
||||
let url = String::from(format!("{}@nextcloud.local/{}", self.user, self.test_id));
|
||||
self.run_cmd_ok(&format!("remote add origin {}", url));
|
||||
|
||||
// set force_unsecure as debug server has not certificate
|
||||
self.run_cmd_ok("config set force_insecure true");
|
||||
|
||||
// set token for request
|
||||
self.run_cmd_ok(&format!("credential add {} {}", self.user, self.user));
|
||||
self
|
||||
|
||||
}
|
||||
|
||||
pub fn clean(self) -> Self {
|
||||
let _ = fs::remove_dir_all(&self.volume);
|
||||
self
|
||||
}
|
||||
|
||||
pub fn run_cmd_ok(&mut self, args: &str) -> Output {
|
||||
let output = self.run_cmd(args);
|
||||
if !output.status.success() {
|
||||
println!("id: {}", self.test_id.clone());
|
||||
println!("Failed to execute: '{}'", args);
|
||||
println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
|
||||
println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
|
||||
}
|
||||
assert!(output.status.success());
|
||||
output
|
||||
}
|
||||
|
||||
pub fn run_cmd(&mut self, args: &str) -> Output {
|
||||
let output = Command::new(self.exe_path.to_str().unwrap())
|
||||
.current_dir(self.volume.clone())
|
||||
.args(args.split(" "))
|
||||
.output()
|
||||
.expect("Could not execute nextsync command");
|
||||
return output;
|
||||
}
|
||||
|
||||
pub fn add_dir(&mut self, name: &str) -> std::io::Result<()> {
|
||||
let mut path = self.volume.clone();
|
||||
path.push_str("/");
|
||||
path.push_str(name);
|
||||
let _ = fs::create_dir_all(path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
|
||||
let mut path = self.volume.clone();
|
||||
path.push_str("/");
|
||||
path.push_str(name);
|
||||
|
||||
let mut file = File::create(path)?;
|
||||
file.write_all(content.as_bytes())?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn remove_file(&mut self, name: &str) -> std::io::Result<()> {
|
||||
let mut path = self.volume.clone();
|
||||
path.push_str("/");
|
||||
path.push_str(name);
|
||||
fs::remove_file(path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn remove_dir(&mut self, name: &str) -> std::io::Result<()> {
|
||||
let mut path = self.volume.clone();
|
||||
path.push_str("/");
|
||||
path.push_str(name);
|
||||
fs::remove_dir_all(path)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
|
||||
let full_path = PathBuf::from(self.volume.clone()).join(file);
|
||||
|
||||
has_files(full_path, file, content, self.test_id.clone())
|
||||
}
|
||||
|
||||
/// get the files given by the status command in two vector (staged and not staged)
|
||||
pub fn get_status(&mut self) -> (Vec<String>, Vec<String>) {
|
||||
let out = self.run_cmd("status");
|
||||
|
||||
let lines: Vec<String> = str::from_utf8(&out.stdout)
|
||||
.unwrap()
|
||||
.split("\n")
|
||||
.map(|s| s.to_owned())
|
||||
.collect();
|
||||
|
||||
let mut staged = vec![];
|
||||
let mut not_staged = vec![];
|
||||
let mut in_staged = true;
|
||||
let mut counter = 0;
|
||||
for line in lines {
|
||||
if line.find("not staged").is_some() {
|
||||
in_staged = false;
|
||||
counter = 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
// skip two first line as there are not files
|
||||
if counter < 2 {
|
||||
counter += 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
if line == String::from("") {
|
||||
continue;
|
||||
}
|
||||
|
||||
|
||||
if in_staged {
|
||||
staged.push(line);
|
||||
} else {
|
||||
not_staged.push(line);
|
||||
}
|
||||
}
|
||||
|
||||
return (staged, not_staged);
|
||||
}
|
||||
}
|
||||
50
tests/utils/files_utils.rs
Normal file
50
tests/utils/files_utils.rs
Normal file
@@ -0,0 +1,50 @@
|
||||
use std::io::{BufReader, BufRead};
|
||||
use std::fs::File;
|
||||
use std::path::PathBuf;
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn has_files(full_path: PathBuf, file: &str, content: &str, test_id: String) -> bool
|
||||
{
|
||||
if !full_path.exists() {
|
||||
println!("id: {}", test_id.clone());
|
||||
eprintln!("File '{}' doesn't exists", file);
|
||||
return false;
|
||||
}
|
||||
|
||||
let f = File::open(full_path).unwrap();
|
||||
for line in BufReader::new(f).lines(){
|
||||
if let Ok(line) = line {
|
||||
if line != content {
|
||||
println!("id: {}", test_id);
|
||||
eprintln!("File '{}' is not equal, {} != {}", file, line, content);
|
||||
return false;
|
||||
}
|
||||
return line == content;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn has_not_file(full_path: PathBuf, file: &str, test_id: String) -> bool
|
||||
{
|
||||
if full_path.exists() {
|
||||
println!("id: {}", test_id.clone());
|
||||
eprintln!("File '{}' exists but it shouldn't", file);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn has_not_dir(full_path: PathBuf, dir: &str, test_id: String) -> bool
|
||||
{
|
||||
if full_path.exists() {
|
||||
println!("id: {}", test_id.clone());
|
||||
eprintln!("Dir '{}' exists but it shouldn't", dir);
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
123
tests/utils/server.rs
Normal file
123
tests/utils/server.rs
Normal file
@@ -0,0 +1,123 @@
|
||||
use std::process::Command;
|
||||
use std::os::unix::fs::PermissionsExt;
|
||||
use std::fs::{self, File, Permissions};
|
||||
use std::io::Write;
|
||||
use std::env;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use super::files_utils::{self, has_files};
|
||||
|
||||
#[cfg(test)]
|
||||
pub struct ServerTest {
|
||||
user: String,
|
||||
volume: PathBuf,
|
||||
pub test_id: String
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
impl ServerTest {
|
||||
pub fn new(id: String) -> Self {
|
||||
let mut volume = env::current_dir().unwrap();
|
||||
volume = volume.join("tests/data/admin/files");
|
||||
|
||||
ServerTest {
|
||||
user: String::from("admin"),
|
||||
volume,
|
||||
test_id: id
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init(&mut self) -> &mut ServerTest{
|
||||
self.add_dir(&self.test_id.clone());
|
||||
self.volume = self.volume.join(self.test_id.clone());
|
||||
self.sync_root();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn clean(&mut self) -> &mut ServerTest{
|
||||
self.remove_dir(self.test_id.clone());
|
||||
self.sync_root();
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_dir(&mut self, path: &str) -> &mut ServerTest {
|
||||
let mut full_path = self.volume.clone();
|
||||
full_path.push(path);
|
||||
|
||||
match fs::create_dir(&full_path) {
|
||||
Ok(_) => {
|
||||
// Set permissions to 777 to allow nextcloud to access it (workaround avoiding to
|
||||
// set group and owner to www-data)
|
||||
if let Err(e) = fs::set_permissions(&full_path, Permissions::from_mode(0o777)) {
|
||||
eprintln!("Error setting permissions: {}", e);
|
||||
}
|
||||
},
|
||||
Err(e) => eprintln!("Error creating directory: {}", e),
|
||||
}
|
||||
|
||||
// do not sync test directory when creating it
|
||||
if !path.ends_with("_nextsync")
|
||||
{
|
||||
self.sync_test();
|
||||
}
|
||||
self
|
||||
}
|
||||
|
||||
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
|
||||
let mut full_path = self.volume.clone();
|
||||
full_path.push(name);
|
||||
|
||||
let mut file = File::create(full_path)?;
|
||||
file.write_all(content.as_bytes())?;
|
||||
self.sync_test();
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn remove_dir(&mut self, path: String) -> &mut ServerTest {
|
||||
let mut full_path = self.volume.clone();
|
||||
full_path.push(path);
|
||||
|
||||
let _ = fs::remove_dir_all(&full_path);
|
||||
self.sync_test();
|
||||
self
|
||||
}
|
||||
|
||||
fn sync_root(&self) -> &Self {
|
||||
self.sync("")
|
||||
}
|
||||
|
||||
fn sync_test(&self) -> &Self {
|
||||
let test_id = self.test_id.clone();
|
||||
self.sync(&test_id)
|
||||
}
|
||||
|
||||
fn sync(&self, path: &str) -> &Self {
|
||||
// perform the occ files:scan command inside the nextcloud docker container
|
||||
|
||||
let nextcloud_docker = "master-nextcloud-1";
|
||||
let args = format!("exec -t --user www-data {} /var/www/html/occ files:scan --path=/{}/files/{}", nextcloud_docker, &self.user, path);
|
||||
|
||||
let _output = Command::new("docker")
|
||||
.args(args.split(" "))
|
||||
.output()
|
||||
.expect("Could not execute docker exec command");
|
||||
self
|
||||
}
|
||||
|
||||
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
|
||||
let full_path = self.volume.clone().join(file);
|
||||
has_files(full_path, file, content, self.test_id.clone())
|
||||
}
|
||||
|
||||
pub fn has_not_file(&mut self, file: &str) -> bool {
|
||||
let full_path = self.volume.clone().join(file);
|
||||
files_utils::has_not_file(full_path, file, self.test_id.clone())
|
||||
}
|
||||
|
||||
pub fn has_not_dir(&mut self, dir: &str) -> bool {
|
||||
let full_path = self.volume.clone().join(dir);
|
||||
dbg!(full_path.clone());
|
||||
files_utils::has_not_file(full_path, dir, self.test_id.clone())
|
||||
}
|
||||
}
|
||||
|
||||
27
tests/utils/status_utils.rs
Normal file
27
tests/utils/status_utils.rs
Normal file
@@ -0,0 +1,27 @@
|
||||
use super::client::ClientTest;
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn lines_should_not_contains(lines: Vec<String>, str: &str) {
|
||||
for line in lines {
|
||||
if line.find(str).is_some() {
|
||||
eprintln!("'{}' found in '{}'", str, line);
|
||||
}
|
||||
assert!(line.find(str).is_none());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn status_should_be_empty(client: &mut ClientTest) {
|
||||
let (staged, not_staged) = client.get_status();
|
||||
if staged.len() != 0 {
|
||||
eprintln!("id: {}", client.test_id.clone());
|
||||
eprintln!("Staged should be empty but has '{}' line(s)", staged.len());
|
||||
assert!(staged.len() == 0);
|
||||
}
|
||||
|
||||
if staged.len() != 0 {
|
||||
eprintln!("id: {}", client.test_id.clone());
|
||||
eprintln!("Not Staged should be empty but has '{}' line(s)", not_staged.len());
|
||||
assert!(not_staged.len() == 0);
|
||||
}
|
||||
}
|
||||
31
tests/utils/utils.rs
Normal file
31
tests/utils/utils.rs
Normal file
@@ -0,0 +1,31 @@
|
||||
use rand::{distributions::Alphanumeric, Rng};
|
||||
use super::client::ClientTest;
|
||||
use super::server::ServerTest;
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn get_random_test_id() -> String {
|
||||
let mut id: String = rand::thread_rng()
|
||||
.sample_iter(&Alphanumeric)
|
||||
.take(7)
|
||||
.map(char::from)
|
||||
.collect();
|
||||
id.push_str("_nextsync");
|
||||
id.to_owned()
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn init_test() -> (ClientTest, ServerTest) {
|
||||
|
||||
let id = get_random_test_id();
|
||||
let mut server = ServerTest::new(id.clone());
|
||||
server.init();
|
||||
let client = ClientTest::new(id).init();
|
||||
(client, server)
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
pub fn clean_test(client: ClientTest, server: &mut ServerTest) {
|
||||
client.clean();
|
||||
server.clean();
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user