Compare commits

...

122 Commits

Author SHA1 Message Date
grimhilt
4533b9a72d refactor(tests): use init_test and clean_test 2024-05-07 18:20:32 +02:00
grimhilt
980d2d9a5d feat(add): prevent adding a file without changes 2024-05-07 18:12:05 +02:00
grimhilt
939b6f2fe3 feat: push deletion 2024-05-02 18:36:09 +02:00
grimhilt
4504b98112 fix(push): push deletion 2024-04-18 15:19:35 +02:00
grimhilt
e8c8ab9dfe fix(add): add deleted file 2024-04-16 17:54:25 +02:00
grimhilt
3420634bea chore: update clap 2024-03-31 22:17:26 +02:00
grimhilt
1aa02a24af test(push): add push remove test 2024-03-31 19:23:32 +02:00
grimhilt
5e43800d6c chore: update libraries 2024-03-31 19:19:10 +02:00
grimhilt
dc7df00ac9 chore: cleaning code 2024-03-17 00:20:58 +01:00
grimhilt
a1b9cde71a fix(tests): fix testsuite allow to pass push 2024-03-16 23:57:01 +01:00
grimhilt
7180647d26 test(pull): add test for pull 2024-03-11 14:47:05 +01:00
grimhilt
d5891a1a93 feat(push): add object when pushing directory 2024-03-11 14:16:41 +01:00
grimhilt
3207391fdb test(push): check that object are locally created when pushed 2024-03-10 23:06:32 +01:00
grimhilt
fa65b6b071 test(add): implicit dir 2024-03-10 17:29:50 +01:00
grimhilt
34dee1ceb6 fix(add): add directory implicitly 2024-03-10 17:29:37 +01:00
grimhilt
fe628ffc9f test(add): first tests 2024-03-10 16:49:21 +01:00
grimhilt
6b7a82bec6 fix: prevent adding nextsync config files 2024-03-10 16:49:06 +01:00
grimhilt
fdcd4633e5 fix: allow to push explicit directory 2024-03-10 16:19:23 +01:00
grimhilt
06bb51476b fix(push): push folder and return error when tcp fail 2024-03-01 17:56:52 +01:00
grimhilt
d8b2116aeb feat(remote): list remote with verbose option 2024-03-01 15:35:38 +01:00
grimhilt
8ed86a05ea style(obj): minor fixes 2024-02-29 09:36:52 +01:00
grimhilt
7951ad0520 refactor(tree): create impl Tree 2024-02-25 17:34:16 +01:00
grimhilt
faf7341525 refactor(blob): use object trait to create blob 2024-02-24 18:52:00 +01:00
grimhilt
642c358737 feat(test): allow multiple tests 2024-02-22 14:00:13 +01:00
grimhilt
e67082b85a refactor(test): use subdir 2024-02-22 13:02:22 +01:00
grimhilt
211e3702a3 refactor(test): remove old tests 2024-02-21 17:03:21 +01:00
grimhilt
a2f746d7f6 test: create first real test 2024-02-21 17:01:16 +01:00
grimhilt
69614b0c9f fix(token): allow to get and store token in local config 2024-02-21 17:01:14 +01:00
grimhilt
a5c5f4a713 fix(config): add option to last category 2024-02-21 17:01:10 +01:00
grimhilt
eaacff0e55 fix: minor warnings 2024-02-21 17:01:08 +01:00
grimhilt
287953c086 feat(config): create a proper config file with proper settings manipulation 2024-02-21 17:01:06 +01:00
grimhilt
6a11bb494b feat(credential): allow to add credential 2024-02-21 17:01:04 +01:00
grimhilt
1c60560c6e refactor(clone): set remote in config 2024-02-21 17:01:02 +01:00
grimhilt
c6534cfd40 feat(remote): add new remote 2024-02-21 17:00:43 +01:00
grimhilt
7719e27fe8 clean main: divide clap config into multiple files, broke clone 70 lines width 2023-10-28 23:46:12 +02:00
grimhilt
fc8e976c9c add add/directory tests 2023-10-28 22:23:48 +02:00
grimhilt
53b103af9e fix add -A 2023-10-28 22:12:27 +02:00
grimhilt
81c24b5e3c fix multiples warnings 2023-10-28 15:49:16 +02:00
grimhilt
22b9351862 add multiple test for the add command 2023-10-28 15:45:35 +02:00
grimhilt
0c925bc4f4 count global number of tests 2023-10-28 15:45:06 +02:00
grimhilt
d34b9bab5e globbing in add and clean the function 2023-10-28 15:44:53 +02:00
grimhilt
56234eaa3d add todos and fix some bugs on add and status 2023-10-28 00:15:47 +02:00
grimhilt
fd477a8139 start some tests on add command 2023-10-28 00:14:14 +02:00
grimhilt
559316e756 add user agent on login request 2023-10-27 23:04:01 +02:00
grimhilt
f4a905c57f store token 2023-10-24 15:32:51 +02:00
grimhilt
c6cf8a9730 update readme to show auth 2023-10-21 22:29:00 +02:00
grimhilt
f6db6992a0 working login system 2023-10-21 22:27:34 +02:00
grimhilt
908ead5b11 change name of functions 2023-10-21 21:48:21 +02:00
grimhilt
9ea1d01c27 add trait ApiCall 2023-10-21 21:47:48 +02:00
grimhilt
07f6405b26 test login 2023-10-21 19:54:11 +02:00
grimhilt
dadf00f4a5 add import necessary for test 2023-09-12 15:48:37 +02:00
grimhilt
a35c7b20d8 cleaning warnings 2023-08-27 22:57:05 +02:00
grimhilt
863e3bd68a find deletion on pull 2023-08-27 22:50:51 +02:00
grimhilt
57647e5df2 implement -all option to add 2023-08-25 18:52:29 +02:00
grimhilt
41c4796555 push copy file 2023-08-25 16:34:16 +02:00
grimhilt
aced8b992a create IntoPathBuf 2023-08-25 16:25:29 +02:00
grimhilt
d323ae3070 push move file 2023-08-25 16:09:28 +02:00
grimhilt
d476622305 prevent copy or move of empty file 2023-08-24 22:19:11 +02:00
grimhilt
498fada9ec push modification 2023-08-24 20:59:41 +02:00
grimhilt
f64d719b31 find modified, copied, moved file in staged 2023-08-23 12:52:45 +02:00
grimhilt
dcf137667b clean code 2023-08-11 22:09:34 +02:00
grimhilt
5b46b1e2f1 not cleaned status with moved and copied 2023-08-11 18:21:29 +02:00
grimhilt
4b12edbe5c add refs to blob 2023-08-11 18:21:00 +02:00
grimhilt
16dbd25168 add modified files in status 2023-08-04 19:17:21 +02:00
grimhilt
91a29480df fix blob creation and get changes 2023-08-04 19:17:04 +02:00
grimhilt
ce047eba12 prevent crash of downloader when not logging 2023-08-04 19:16:44 +02:00
grimhilt
94220be935 add timestamp and hash in blob 2023-08-04 16:01:24 +02:00
grimhilt
d5097727cb create impl for blob 2023-08-04 15:25:51 +02:00
grimhilt
cb43a46456 draft of pull 2023-07-28 13:22:55 +02:00
grimhilt
4c34df7cfe remote-diff only a directory 2023-07-28 13:22:13 +02:00
grimhilt
29def4967c normalize path in add and check if path correspond to deleted object 2023-07-24 00:49:08 +02:00
grimhilt
2775c77c55 improve status and minor fixes 2023-07-24 00:48:22 +02:00
grimhilt
4e20ec94f9 remote diff with new opti remote enumerater 2023-07-21 17:10:57 +02:00
grimhilt
f01983b29d optimize clone by allowing to fetch with a different depth 2023-07-21 16:00:04 +02:00
grimhilt
0832100d83 enumerate_remote function & applied to clone 2023-07-21 14:45:45 +02:00
grimhilt
30004ebd8b remote-diff draft 2023-07-20 00:59:57 +02:00
grimhilt
dfd42389f3 loading bar when downloading 2023-07-14 15:33:44 +02:00
grimhilt
80d497d47c creation of a downloader 2023-07-13 23:36:39 +02:00
grimhilt
70fb733b05 use stream do download file 2023-07-13 23:34:52 +02:00
grimhilt
60e0bf76a0 improve parser in reqprops to take into account not found props 2023-07-13 16:19:33 +02:00
grimhilt
b4cb78c676 fix issue when getting long body in reqprops 2023-07-13 16:02:20 +02:00
grimhilt
0922066baa add contentlength in reqprops 2023-07-13 16:02:04 +02:00
grimhilt
2d4905f506 allow not updating parent's date when cloning 2023-07-13 00:21:37 +02:00
grimhilt
5bc9ef0035 Update README.md 2023-07-02 19:07:24 +02:00
grimhilt
f56dcd30b8 cleaning all warnings 2023-07-02 19:04:45 +02:00
grimhilt
ddf2169950 use PathBuf in obj instead of &Path and get lastmodified when pushing changes 2023-07-02 18:50:33 +02:00
grimhilt
5ccce48381 update date to all parent 2023-07-02 18:49:56 +02:00
grimhilt
4ec389b6cc improve documentation 2023-07-02 15:51:51 +02:00
grimhilt
ed599d1399 add about for the help 2023-06-30 00:58:26 +02:00
grimhilt
8173bbc7d5 Update README.md 2023-06-30 00:57:32 +02:00
grimhilt
95bd5e7366 add license 2023-06-30 00:28:58 +02:00
grimhilt
b08e6d3898 set depth to 0 when looking for single result in req_props 2023-06-30 00:21:14 +02:00
grimhilt
723ceb2655 push deleted dir 2023-06-29 23:52:48 +02:00
grimhilt
2a0fe6d1d1 minor changes 2023-06-29 23:52:40 +02:00
grimhilt
45f8d486d8 remove objects 2023-06-29 23:52:19 +02:00
grimhilt
f812ad411e improve staging to take files in new folders 2023-06-29 23:51:13 +02:00
grimhilt
601f176198 improve getting staged in status 2023-06-29 22:55:19 +02:00
grimhilt
da3d605baa remove env variable for remote 2023-06-25 21:27:31 +02:00
grimhilt
b74c5c176b check if the user is in a nextsync repo and fail else 2023-06-25 18:26:59 +02:00
grimhilt
d3592a5209 check if directory where the init command is clone is empty 2023-06-25 18:03:56 +02:00
grimhilt
39d1032c14 check if directory to clone is empty 2023-06-25 17:56:55 +02:00
grimhilt
20926514d2 allow url format without username 2023-06-25 17:45:51 +02:00
grimhilt
b07e3062b7 push new dir 2023-06-25 17:27:11 +02:00
grimhilt
a1aeb65600 push deleted 2023-06-24 17:00:15 +02:00
grimhilt
675b650200 check timestamp for conflicts with remote 2023-06-24 16:52:02 +02:00
grimhilt
2c593fb254 separate tree and blob in store and create get_timestamp 2023-06-24 16:42:09 +02:00
grimhilt
23908c135c better architecture of PushChanges and applied to new 2023-06-24 16:07:53 +02:00
grimhilt
4842a20024 organize import order 2023-06-19 18:31:03 +02:00
grimhilt
8b3ba64e48 move push factory in several files 2023-06-19 18:25:51 +02:00
grimhilt
f1d552a31c move async in services 2023-06-19 18:04:50 +02:00
grimhilt
4cde39dffd add lastmodified on folder 2023-06-19 17:34:18 +02:00
grimhilt
5d25429546 add lastmodified in store for files 2023-06-19 17:23:52 +02:00
grimhilt
e0d4c5efac add lastmodified in req props 2023-06-17 16:41:00 +02:00
grimhilt
1fd7948122 merge list folders and req props 2023-06-17 16:26:31 +02:00
grimhilt
eabf707844 minor cleaning 2023-06-17 15:54:09 +02:00
grimhilt
7cfd572ad0 cleaning clone 2023-06-17 15:36:18 +02:00
grimhilt
b16058b4d3 cleaning imports and warnings 2023-06-17 01:26:13 +02:00
grimhilt
ea2b0772af move parsing into list_folders 2023-06-17 01:20:00 +02:00
grimhilt
b911ad8606 move creation of downloaded file in service 2023-06-17 00:37:12 +02:00
grimhilt
0bf5fb76e0 allow http if forced 2023-06-16 23:30:44 +02:00
grimhilt
7a34b3c79b optimize add 2023-06-16 23:22:18 +02:00
grimhilt
cbbf185b1a use retain 2023-06-16 18:49:08 +02:00
80 changed files with 6759 additions and 1375 deletions

15
.gitignore vendored
View File

@@ -1,7 +1,10 @@
*
!/**/
!*.rs
!.gitignore
!README.md
!LICENSE
target
*.test
.env
todo
.nextsync
.nextsyncignore
test
tests/nextcloud-docker-dev
tests/data

895
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -6,13 +6,25 @@ edition = "2021"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
reqwest = { version = "0.11", features = ["blocking", "json", "multipart"] }
tokio = { version = "1", features = ["full"] }
rustc-serialize="0.3.25"
reqwest = { version = "0.12", features = ["stream", "json", "multipart"] }
tokio = { version = "1.37", features = ["full"] }
dotenv ="0.15.0"
clap = "2.33"
clap = "4.5.4"
rust-crypto = "0.2.36"
colored = "2.0.0"
xml-rs = "0.8.0"
regex = "1.8.3"
colored = "2.1.0"
xml-rs = "0.8.19"
regex = "1.10.4"
lazy_static = "1.4.0"
glob = "0.3.1"
textwrap = "0.16.1"
chrono = "0.4.37"
indicatif = "0.17.8"
md5 = "0.7.0"
futures-util = "0.3.30"
rpassword = "7.3.1"
rand = "0.8.5"
tempfile = "3.10.1"
[profile.release]
debug = true

674
LICENSE Normal file
View File

@@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

39
README.md Normal file
View File

@@ -0,0 +1,39 @@
# Nextsync
A git-like command line tool to interact with Nextcloud.
This is **in working progress**.
This should work pretty much like git with some adaptations to be more debuggable (for now) and easier to code. There is no history and with that no need to commit, to upload new files you have to add and push them.
## Features
- [x] Cloning
- [x] Status (new, deleted, modified, copied, moved)
- [x] Pushing updates (new, deleted, modified)
- [x] Using a .nextsyncignore to ignore files
- [ ] Pulling changes
- [x] Auth with a token
- [ ] Remember token
- [ ] Various optimisation
## Usage
```
USAGE:
nextsync [SUBCOMMAND]
FLAGS:
-h, --help Prints help information
-V, --version Prints version information
SUBCOMMANDS:
add Add changes to the index
clone Clone a repository into a new directory
config
help Prints this message or the help of the given subcommand(s)
init Create an empty Nextsync repository
push Push changes on nextcloud
reset Clear the index
status Show the working tree status
```

View File

@@ -0,0 +1,21 @@
# Conventions
## Path Variables
Considering cloning:
* ``https://nextcloud.example.com/remote.php/dav/files/grimhilt/dir/dir_to_clone``
We have (in ``ApiProps`` for example):
* ``host``: ``https://nextcloud.example.com``
* ``username``: ``grimhilt``
* ``root``: ``/dir/dir_to_clone``
Concerning paths we have:
* ``remote_p``: ``/remote.php/dav/files/grimhilt/dir/dir_to_clone/D1/D1_F1.md``
* ``ref_p``: ``/home/grimhilt/dir_cloned``
* ``relative_p``: ``D1/D1_F1.md``
* ``abs_p``: ``/home/grimhilt/dir_cloned/D1_D1_F1.md``
Use prefix:
* ``p`` for Path or PathBuffer
* ``ps`` for String

View File

@@ -2,12 +2,14 @@
## Blob object
```
file_name timestamp size hash
file_name timestamp1 size timestamp2 hash
```
timestamp1: timestamp of file on server to know if the server has an update
timestamp2: timestamp of file locally to know when the file has changed on the system
## Tree object
```
folder_name timestamp
tree hash_path folder_name
blob hash_path file_name
```
```

View File

@@ -5,3 +5,7 @@ pub mod reset;
pub mod clone;
pub mod push;
pub mod config;
pub mod remote_diff;
pub mod remote;
pub mod pull;
pub mod credential;

View File

@@ -1,52 +1,121 @@
use clap::Values;
use crate::utils::{self, nextsyncignore};
use crate::store;
use std::path::{Path, PathBuf};
use std::io::Write;
use std::path::{Path, PathBuf};
use glob::glob;
use crate::store::{self, object::Object};
use crate::utils::{self, path};
use crate::store::object::object::{Obj, ObjMethods};
use crate::utils::nextsyncignore::{self, ignore_file};
use crate::utils::path::{normalize_relative, repo_root, path_buf_to_string};
pub struct AddArgs<'a> {
pub files: Values<'a>,
pub struct AddArgs {
pub files: Vec<String>,
pub force: bool,
pub all: bool,
}
// todo match deleted files
pub fn add(args: AddArgs) {
let mut index_file = store::index::open();
let mut added_files: Vec<String> = vec![];
let file_vec: Vec<&str> = args.files.collect();
let mut pattern: String;
let file_vec: Vec<String> = match args.all {
true => {
pattern = path_buf_to_string(repo_root());
pattern.push_str("/*");
vec![pattern]
},
false => args.files,
};
let mut added_files: Vec<String> = vec![];
let mut ignored_f = vec![];
let rules = nextsyncignore::get_rules();
for file in file_vec {
let path = Path::new(file);
let f = match normalize_relative(&file) {
Ok(f) => f,
Err(err) => {
eprintln!("err: {} {}", file, err);
continue;
}
};
let path = repo_root().join(Path::new(&f));
match path.exists() {
true => {
if path.is_dir() {
added_files.push(String::from(path.to_str().unwrap()));
add_folder_content(path.to_path_buf(), &mut added_files);
} else {
added_files.push(String::from(path.to_str().unwrap()));
let mut obj = Obj::from_path(f.clone());
if obj.has_changes() {
add_entry(path, args.force, &mut added_files, rules.clone(), &mut ignored_f);
}
},
false => {
// todo deleted file/folder verif if exists
added_files.push(String::from(path.to_str().unwrap()));
if Obj::from_path(file.clone()).exists_on_remote() {
// object is deleted so not present but can still be added for deletion
added_files.push(String::from(f));
} else {
// try globbing if nothing has been found
for entry in try_globbing(path) {
add_entry(entry, args.force, &mut added_files, rules.clone(), &mut ignored_f);
}
}
}
}
}
// check ignored file if not forced
if !args.force {
let (ignored, ignored_files) = nextsyncignore::ignore_files(&mut added_files);
if ignored {
// todo multiple nextsyncignore
println!("The following paths are ignored by your .nextsyncignore file:");
for file in ignored_files {
println!("{}", file);
}
}
print_ignored_files(ignored_f);
write_added_files(added_files);
}
fn add_entry(entry: PathBuf, force: bool, added_files: &mut Vec<String>, rules: Vec<String>, ignored_f: &mut Vec<String>) {
// ignore nextsync config files
if path::is_nextsync_config(entry.clone()) {
return;
}
// save all added_files in index
// check if the file must be ignored
if !force && ignore_file(&path_buf_to_string(entry.clone()), rules, ignored_f) {
return;
}
// add the parent if there is one and it is not already created
add_parent(entry.clone(), added_files);
added_files.push(path_buf_to_string(entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
if entry.is_dir() {
add_folder_content(entry.to_path_buf(), added_files);
}
}
fn add_parent(entry: PathBuf, added_files: &mut Vec<String>) {
let test_parent = entry.strip_prefix(repo_root()).unwrap().parent();
if test_parent.is_none() || test_parent.unwrap() == PathBuf::new() {
return;
}
let parent = entry.parent().unwrap();
if !Obj::from_path(parent).exists_on_remote() {
add_parent(parent.to_path_buf(), added_files);
added_files.push(path_buf_to_string(parent.strip_prefix(repo_root()).unwrap().to_path_buf()));
}
}
fn print_ignored_files(ignored_files: Vec<String>) {
if ignored_files.len() > 0 {
// todo multiple nextsyncignore
println!("The following paths are ignored by your .nextsyncignore file:");
for file in ignored_files {
println!("{}", file);
}
}
}
fn write_added_files(added_files: Vec<String>) {
let mut index_file = store::index::open();
for file in added_files {
if store::index::alread_added(file.clone()) {
continue;
}
match writeln!(index_file, "{}", file) {
Ok(()) => (),
Err(err) => eprintln!("{}", err),
@@ -55,7 +124,25 @@ pub fn add(args: AddArgs) {
drop(index_file);
}
fn try_globbing(path: PathBuf) -> Vec<PathBuf> {
let mut paths: Vec<PathBuf> = vec![];
if let Ok(entries) = glob(path.to_str().unwrap()) {
for entry in entries {
match entry {
Ok(ppath) => paths.push(ppath),
Err(e) => {
eprintln!("err: {} incorrect pattern ({})", path.display(), e);
}
}
}
} else {
eprintln!("err: {} is not something you can add.", path.to_str().unwrap());
}
return paths;
}
fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
// todo check for changes
let mut folders: Vec<PathBuf> = vec![];
folders.push(path);
@@ -63,12 +150,16 @@ fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
if let Ok(entries) = utils::read::read_folder(folder.clone()) {
for entry in entries {
let path_entry = PathBuf::from(entry);
if path_entry.is_dir() {
folders.push(path_entry.clone());
if !path::is_nextsync_config(path_entry.clone())
{
if path_entry.is_dir() {
folders.push(path_entry.clone());
}
added_files.push(path_buf_to_string(path_entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
}
added_files.push(String::from(path_entry.to_str().unwrap()));
}
}
}
}

View File

@@ -1,185 +1,127 @@
use std::fs::OpenOptions;
use std::fs::DirBuilder;
use std::io;
use std::io::prelude::*;
use std::io::{self, Cursor};
use std::fs::DirBuilder;
use std::path::{Path, PathBuf};
use clap::Values;
use regex::Regex;
use xml::reader::{EventReader, XmlEvent};
use crate::services::api::ApiError;
use crate::services::list_folders::ListFolders;
use crate::services::download_files::DownloadFiles;
use crate::store::object;
use crate::commands;
use crate::services::downloader::Downloader;
use crate::utils::api::ApiProps;
use crate::utils::path::path_buf_to_string;
use crate::utils::remote::{enumerate_remote, EnumerateOptions};
use crate::global::global::{DIR_PATH, set_dir_path};
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::{ReqProps, ObjProps};
use crate::store::object::{tree::Tree, blob::Blob};
use crate::commands::config;
use crate::commands::init;
pub fn clone(remote: Values<'_>) {
pub const DEPTH: &str = "3";
pub struct CloneArgs {
pub remote: String,
pub depth: Option<String>,
}
pub fn clone(args: CloneArgs) {
let d = DIR_PATH.lock().unwrap().clone();
let url = remote.clone().next().unwrap();
let (domain, tmp_user, dist_path_str) = get_url_props(url);
let url = args.remote.clone();
let (host, tmp_user, dist_path_str) = get_url_props(&url);
let username = match tmp_user {
Some(u) => u,
Some(u) => u.to_string(),
None => {
eprintln!("No username found");
todo!();
""
println!("Please enter the username of the webdav instance: ");
let stdin = io::stdin();
stdin.lock().lines().next().unwrap().unwrap()
}
};
let api_props = ApiProps {
host: host.clone(),
username,
root: dist_path_str.to_string(),
};
let local_path = match d.clone() {
let ref_path = match d.clone() {
Some(dir) => Path::new(&dir).to_owned(),
None => {
let iter = Path::new(dist_path_str).iter();
let dest_dir = iter.last().unwrap();
let lp = std::env::current_dir().unwrap().join(dest_dir);
set_dir_path(lp.to_str().unwrap().to_string());
set_dir_path(path_buf_to_string(lp.clone()));
lp
},
};
let mut folders = vec![String::from(dist_path_str)];
let mut url_request;
let mut files: Vec<String> = vec![];
let mut first_iter = true;
while folders.len() > 0 {
let folder = folders.pop().unwrap();
url_request = String::from(domain.clone());
if first_iter {
url_request.push_str("/remote.php/dav/files/");
url_request.push_str(username);
}
url_request.push_str(folder.as_str());
// try to create root folder
if DirBuilder::new().recursive(true).create(ref_path.clone()).is_err() {
eprintln!("fatal: unable to create the destination directory");
std::process::exit(1);
} else {
init::init();
// request folder content
let mut body = Default::default();
tokio::runtime::Runtime::new().unwrap().block_on(async {
body = ListFolders::new(url_request.as_str())
.send_with_res()
.await;
// set remote origin in config file
let mut remote_url = api_props.username.clone();
remote_url.push_str("@");
remote_url.push_str(api_props.host.strip_prefix("https://").unwrap());
remote_url.push_str(&api_props.root);
if config::add_remote("origin", &remote_url).is_err()
{
eprintln!("err: not able to save remote");
}
}
let depth = &args.depth.clone().unwrap_or(DEPTH.to_string());
let (folders, files) = enumerate_remote(
|a| req(&api_props, depth, a),
None,
EnumerateOptions {
depth: Some(depth.to_owned()),
relative_s: None
});
for folder in folders {
// create folder
if first_iter {
if DirBuilder::new().create(local_path.clone()).is_err() {
eprintln!("fatal: directory already exist");
// destination path 'path' already exists and is not an empty directory.
//std::process::exit(1);
} else {
commands::init::init();
}
} else {
// create folder
let local_folder = get_local_path(folder, local_path.clone(), username, dist_path_str);
if let Err(err) = DirBuilder::new().recursive(true).create(local_folder.clone()) {
eprintln!("error: cannot create directory {}: {}", local_folder.display(), err);
}
// add tree
let path_folder = local_folder.strip_prefix(local_path.clone()).unwrap();
if object::add_tree(&path_folder).is_err() {
eprintln!("error: cannot store object {}", path_folder.display());
}
let p = ref_path.clone().join(Path::new(&folder.relative_s.unwrap()));
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
eprintln!("err: cannot create directory {} ({})", p.display(), err);
}
// find folders and files in response
let objects = get_objects_xml(body);
let mut iter = objects.iter();
iter.next(); // jump first element which the folder fetched
for object in iter {
if object.chars().last().unwrap() == '/' {
folders.push(object.to_string());
} else {
files.push(object.to_string());
}
// add tree
let path_folder = p.strip_prefix(ref_path.clone()).unwrap();
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
if let Err(err) = Tree::from_path(path_folder.to_path_buf()).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
}
first_iter = false;
}
download_files(&domain, local_path.clone(), username, dist_path_str, files);
Downloader::new()
.set_api_props(api_props.clone())
.set_files(files)
.should_log()
.download(ref_path.clone(), Some(&save_blob));
}
fn get_local_path(p: String, local_p: PathBuf, username: &str, dist_p: &str) -> PathBuf {
let mut final_p = Path::new(p.as_str());
final_p = final_p.strip_prefix("/remote.php/dav/files/").unwrap();
final_p = final_p.strip_prefix(username.clone()).unwrap();
let dist_p = Path::new(dist_p).strip_prefix("/");
final_p = final_p.strip_prefix(dist_p.unwrap()).unwrap();
local_p.clone().join(final_p.clone())
}
fn write_file(path: PathBuf, content: &Vec<u8>, local_p: PathBuf) -> io::Result<()> {
let mut f = OpenOptions::new()
.write(true)
.create(true)
.open(path.clone())?;
f.write_all(&content)?;
let relative_p = Path::new(&path).strip_prefix(local_p).unwrap();
object::add_blob(relative_p, "tmpdate")?;
Ok(())
}
fn download_files(domain: &str, local_p: PathBuf, username: &str, dist_p: &str, files: Vec<String>) {
for file in files {
let mut url_request = String::from(domain.clone());
url_request.push_str(file.as_str());
tokio::runtime::Runtime::new().unwrap().block_on(async {
match DownloadFiles::new(url_request.as_str()).send_with_err().await {
Ok(b) => {
let p_to_save = get_local_path(file.clone(), local_p.clone(), username, dist_p);
if let Err(_) = write_file(p_to_save, &b, local_p.clone()) {
eprintln!("error writing {}", file);
}
},
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => eprintln!("Failed to get body"),
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
}
}
});
fn save_blob(obj: ObjProps) {
let relative_s = &obj.clone().relative_s.unwrap();
let relative_p = PathBuf::from(&relative_s);
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
}
}
fn get_objects_xml(xml: String) -> Vec<String> {
let cursor = Cursor::new(xml);
let parser = EventReader::new(cursor);
let mut should_get = false;
let mut objects: Vec<String> = vec![];
for event in parser {
match event {
Ok(XmlEvent::StartElement { name, .. }) => {
should_get = name.local_name == "href";
}
Ok(XmlEvent::Characters(text)) => {
if !text.trim().is_empty() && should_get {
objects.push(text);
}
}
Ok(XmlEvent::EndElement { .. }) => {
should_get = false;
}
Err(e) => {
eprintln!("Error: {}", e);
break;
}
_ => {}
}
}
objects
fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> {
ReqProps::new()
.set_request(relative_s, &api_props)
.set_depth(depth)
.gethref()
.getcontentlength()
.getlastmodified()
.send_req_multiple()
}
// todo allow http
fn get_url_props(url: &str) -> (String, Option<&str>, &str) {
pub fn get_url_props(url: &str) -> (String, Option<&str>, &str) {
let mut username = None;
let mut domain = "";
let mut path = "";
@@ -193,7 +135,7 @@ fn get_url_props(url: &str) -> (String, Option<&str>, &str) {
}
None => (),
}
} else if url.find("?").is_some() {
} else if url.find("?").is_some() { // from browser url
let re = Regex::new(r"((https?://)?.+?)/.+dir=(.+?)&").unwrap();
match re.captures_iter(url).last() {
Some(cap) => {
@@ -218,9 +160,13 @@ fn get_url_props(url: &str) -> (String, Option<&str>, &str) {
}
let re = Regex::new(r"(^https?://)?").unwrap();
let secure_domain = re.replace(domain, "https://").to_string();
(secure_domain, username, path)
let re = Regex::new(r"^http://").unwrap();
if !re.is_match(domain) {
let re = Regex::new(r"(^https?://)?").unwrap();
let secure_domain = re.replace(domain, "https://").to_string();
return (secure_domain, username, path);
}
(domain.to_string(), username, path)
}
#[cfg(test)]
@@ -231,21 +177,22 @@ mod tests {
fn test_get_url_props() {
let p = "/foo/bar";
let u = Some("user");
let d = String::from("https://nextcloud.com");
let ld = String::from("https://nextcloud.example.com");
assert_eq!(get_url_props("user@nextcloud.com/remote.php/dav/files/user/foo/bar"), (d.clone(), u, p));
assert_eq!(get_url_props("user@nextcloud.com/foo/bar"), (d.clone(), u, p));
assert_eq!(get_url_props("user@nextcloud.example.com/remote.php/dav/files/user/foo/bar"), (ld.clone(), u, p));
assert_eq!(get_url_props("user@nextcloud.example.com/foo/bar"), (ld.clone(), u, p));
assert_eq!(get_url_props("https://nextcloud.example.com/apps/files/?dir=/foo/bar&fileid=166666"), (ld.clone(), None, p));
assert_eq!(get_url_props("https://nextcloud.com/apps/files/?dir=/foo/bar&fileid=166666"), (d.clone(), None, p));
// let d = String::from("http://nextcloud.com");
let sd = String::from("https://nextcloud.com");
let sld = String::from("https://nextcloud.example.com");
let ld = String::from("http://nextcloud.example.com");
assert_eq!(get_url_props("user@nextcloud.com/remote.php/dav/files/user/foo/bar"), (sd.clone(), u, p));
assert_eq!(get_url_props("user@nextcloud.com/foo/bar"), (sd.clone(), u, p));
assert_eq!(get_url_props("user@nextcloud.example.com/remote.php/dav/files/user/foo/bar"), (sld.clone(), u, p));
assert_eq!(get_url_props("user@nextcloud.example.com/foo/bar"), (sld.clone(), u, p));
assert_eq!(get_url_props("https://nextcloud.example.com/apps/files/?dir=/foo/bar&fileid=166666"), (sld.clone(), None, p));
assert_eq!(get_url_props("https://nextcloud.com/apps/files/?dir=/foo/bar&fileid=166666"), (sd.clone(), None, p));
assert_eq!(get_url_props("http://nextcloud.example.com/remote.php/dav/files/user/foo/bar"), (ld.clone(), u, p));
assert_eq!(get_url_props("https://nextcloud.example.com/remote.php/dav/files/user/foo/bar"), (ld.clone(), u, p));
assert_eq!(get_url_props("https://nextcloud.example.com/remote.php/dav/files/user/foo/bar"), (sld.clone(), u, p));
assert_eq!(get_url_props("http://nextcloud.example.com/remote.php/dav/files/user/foo/bar"), (ld.clone(), u, p));
assert_eq!(get_url_props("nextcloud.example.com/remote.php/dav/files/user/foo/bar"), (ld.clone(), u, p));
assert_eq!(get_url_props("https://nextcloud.example.com/foo/bar"), (ld.clone(), None, p));
assert_eq!(get_url_props("nextcloud.example.com/remote.php/dav/files/user/foo/bar"), (sld.clone(), u, p));
assert_eq!(get_url_props("https://nextcloud.example.com/foo/bar"), (sld.clone(), None, p));
assert_eq!(get_url_props("http://nextcloud.example.com/foo/bar"), (ld.clone(), None, p));
assert_eq!(get_url_props("nextcloud.example.com/foo/bar"), (ld.clone(), None, p));
assert_eq!(get_url_props("nextcloud.example.com/foo/bar"), (sld.clone(), None, p));
}
}

View File

@@ -1,52 +1,190 @@
use crate::utils::{path, read};
use std::fs::OpenOptions;
use std::io::{self, Write};
use std::io::{self, Write, BufRead, Seek, SeekFrom};
use crate::utils::{path, read};
use std::collections::HashMap;
pub struct ConfigSetArgs {
pub name: String,
pub value: String,
}
pub fn config_set(args: ConfigSetArgs) {
// configure possible options and their associated category
let mut option_categories: HashMap<&str, &str> = HashMap::new();
option_categories.insert("force_insecure", "core");
option_categories.insert("token", "core");
// get category of option
let category = option_categories.get(args.name.as_str());
if category.is_none() {
eprintln!("fatal: '{}' is not a valid option.", args.name.clone());
std::process::exit(1);
}
let _ = write_option_in_cat(category.unwrap(), &args.name, &args.value);
}
pub fn find_option_in_cat(category: &str, option: &str) -> Option<String> {
let mut config = path::nextsync();
config.push("config");
let mut in_target_category = false;
if let Ok(lines) = read::read_lines(config) {
for line in lines {
if let Ok(line) = line {
let trimmed_line = line.trim();
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
in_target_category = trimmed_line == format!("[{}]", category);
} else if in_target_category {
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
if parts.len() == 2 && parts[0].trim() == option {
return Some(parts[1].trim().to_string());
}
}
}
}
}
None
}
pub fn write_option_in_cat(category: &str, option: &str, value: &str) -> io::Result<()> {
let mut config = path::nextsync();
config.push("config");
let mut file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(&config)?;
let mut in_target_category = false;
let mut option_found = false;
// Go to the beginning of the file
file.seek(SeekFrom::Start(0))?;
// Create a temporary file to hold the modified content
let mut tmp_file = tempfile::Builder::new()
.prefix(".nextsyncconfig")
.tempfile()?;
let reader = io::BufReader::new(&file);
for line in reader.lines() {
let line = line?;
let trimmed_line = line.trim();
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
// if we were already in target category we are now leaving it
// add option only if not found before
if in_target_category && !option_found {
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
} else if !in_target_category {
in_target_category = trimmed_line == format!("[{}]", category);
}
}
if in_target_category && !option_found && trimmed_line.starts_with(&format!("{} =", option)) {
// Option already exists, update its value
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
option_found = true;
} else {
// Write the original line
writeln!(&mut tmp_file, "{}", line)?;
}
}
// add to last category
if in_target_category && !option_found {
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
}
// if the category didn't exist create it and add the option
if !in_target_category {
writeln!(&mut tmp_file, "[{}]", category)?;
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
}
// Flush and sync the temporary file to ensure data is written to disk
tmp_file.flush()?;
// Go back to the beginning of the file
tmp_file.seek(SeekFrom::Start(0))?;
file.seek(SeekFrom::Start(0))?;
// Copy the contents of the temporary file to the original file
io::copy(&mut tmp_file, &mut file)?;
Ok(())
}
pub fn add_remote(name: &str, url: &str) -> io::Result<()> {
let config = path::config();
// check if there is already a remote with this name
if get_remote(name).is_some()
{
eprintln!("error: remote {} already exists.", name);
std::process::exit(3);
}
pub fn set(var: &str, val: &str) -> io::Result<()> {
let mut root = match path::nextsync() {
Some(path) => path,
None => {
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
std::process::exit(1);
}
};
root.push("config");
// todo check if exist
let mut file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.append(true)
.open(root)?;
.open(config)?;
writeln!(file, "[remote \"{}\"]", name)?;
writeln!(file, "\turl = {}", url)?;
let mut line = var.to_owned();
line.push_str(" ");
line.push_str(val);
writeln!(file, "{}", line)?;
Ok(())
}
pub fn get(var: &str) -> Option<String> {
let mut root = match path::nextsync() {
Some(path) => path,
None => {
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
std::process::exit(1);
}
};
root.push("config");
if let Ok(lines) = read::read_lines(root) {
for line in lines {
if let Ok(l) = line {
dbg!(l.clone());
if l.starts_with(var.clone()) {
let (_, val) = l.split_once(" ").unwrap();
return Some(val.to_owned());
}
}
}
}
None
pub fn get_remote(name: &str) -> Option<String> {
find_option_in_cat(&format!("remote \"{}\"", name), "url")
}
/// return a vector of remote found in config file (e.g: ("origin", "https://example.com"))
pub fn get_all_remote() -> Vec<(String, String)> {
let config = path::config();
let mut remotes: Vec<(String, String)> = vec![];
let mut in_remote = false;
let mut remote_name = String::new();
if let Ok(lines) = read::read_lines(config) {
for line in lines {
if let Ok(line) = line {
let trimmed_line = line.trim();
if trimmed_line.starts_with("[remote ") {
in_remote = true;
remote_name = trimmed_line.strip_prefix("[remote \"").unwrap().strip_suffix("\"]").unwrap().to_string();
}
else if trimmed_line.starts_with('[')
{
in_remote = false;
}
else if in_remote {
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
if parts.len() == 2 {
remotes.push((remote_name.to_string(), parts[1].trim().to_string()))
}
}
}
}
}
remotes
}
pub fn get_core(name: &str) -> Option<String> {
find_option_in_cat("core", name)
}

View File

@@ -0,0 +1,54 @@
use crate::commands::clone::get_url_props;
use crate::services::api::ApiError::RequestError;
use crate::services::login::Login;
use crate::services::api_call::ApiCall;
use crate::commands::config;
pub struct CredentialArgs {
pub username: String,
pub password: Option<String>,
}
pub fn credential_add(args: CredentialArgs) {
// get remote if exists
let remote = match config::get_remote("origin") {
None => {
eprintln!("fatal: No remote origin, impossible to send request to get token");
std::process::exit(1);
},
Some(remote) => remote
};
let (host, _, _) = get_url_props(&remote);
// get username and password
let username = args.username.to_owned();
let password = match args.password {
Some(mut pwd) => pwd.to_owned(),
None => {
println!("Please enter the password for {}: ", username);
rpassword::read_password().unwrap()
}
};
// get token
let get_token = Login::new()
.set_auth(&username, &password)
.set_host(Some(host))
.send_login();
// deal with error
if let Err(err) = get_token {
if let RequestError(err) = err {
eprintln!("fatal: Failed to get token for these credential. ({})", err);
}
else {
eprintln!("fatal: Failed to get token for these credential.");
}
std::process::exit(1);
}
// save token
let _ = config::write_option_in_cat("core", "token", get_token.unwrap().as_str());
}

View File

@@ -1,6 +1,6 @@
use std::env;
use std::fs::{DirBuilder, File};
use std::path::PathBuf;
use std::env;
use crate::global::global::DIR_PATH;
pub fn init() {
@@ -10,42 +10,61 @@ pub fn init() {
Some(dir) => PathBuf::from(dir),
None => env::current_dir().unwrap(),
};
let builder = DirBuilder::new();
// todo check if dir empty
// .nextsync folder
// todo
// check if dir is empty
// if let Ok(entries) = read_folder(path.clone()) {
// if entries.len() != 0 {
// eprintln!("fatal: destination path '{}' already exists and is not an empty directory.", path.display());
// std::process::exit(1);
// }
// } else {
// eprintln!("fatal: cannot open the destination directory");
// std::process::exit(1);
// }
let builder = DirBuilder::new();
path.push(".nextsync");
match builder.create(path.clone()) {
Ok(()) => println!("Directory successfuly created"),
Err(_) => println!("Error: cannot create directory"),
Ok(()) => (),
Err(err) => println!("Error: cannot create .nextsync ({})", err),
};
path.push("objects");
match builder.create(path.clone()) {
Ok(()) => println!("Directory successfuly created"),
Err(_) => println!("Error: cannot create directory"),
Ok(()) => (),
Err(_) => println!("Error: cannot create objects"),
};
path.pop();
path.push("refs");
match builder.create(path.clone()) {
Ok(()) => (),
Err(_) => println!("Error: cannot create refs"),
};
path.pop();
path.push("HEAD");
match File::create(path.clone()) {
Ok(_) => println!("File successfuly created"),
Err(_) => println!("Error: cannot create .nextsyncignore"),
Ok(_) => (),
Err(_) => println!("Error: cannot create HEAD"),
}
path.pop();
path.push("index");
match File::create(path.clone()) {
Ok(_) => println!("File successfuly created"),
Err(_) => println!("Error: cannot create .nextsyncignore"),
Ok(_) => (),
Err(_) => println!("Error: cannot create index"),
}
path.pop();
path.pop();
path.push(".nextsyncignore");
match File::create(path) {
Ok(_) => println!("File successfuly created"),
Err(_) => println!("Error: cannot create .nextsyncignore"),
}
// todo
// path.pop();
// path.pop();
// path.push(".nextsyncignore");
//
// match File::create(path) {
// Ok(_) => (),
// Err(_) => println!("Error: cannot create .nextsyncignore"),
// }
}

54
src/commands/pull.rs Normal file
View File

@@ -0,0 +1,54 @@
use std::path::PathBuf;
use std::fs::DirBuilder;
use crate::services::downloader::Downloader;
use crate::services::req_props::ObjProps;
use crate::store::object::blob::Blob;
use crate::store::object::tree::Tree;
use crate::utils::api::get_api_props;
use crate::utils::path;
use crate::commands::remote_diff::get_diff;
pub fn pull() {
let relative_p = path::current()
.unwrap()
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
let (folders, files) = get_diff(relative_p);
let root = path::repo_root();
for folder in folders {
let p = root.clone().join(PathBuf::from(folder.relative_s.unwrap()));
if !p.exists() {
// create folder
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
eprintln!("err: cannot create directory {} ({})", p.display(), err);
}
// add tree
let path_folder = p.strip_prefix(root.clone()).unwrap();
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
if let Err(err) = Tree::from_path(path_folder).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
}
}
}
Downloader::new()
.set_api_props(get_api_props())
.set_files(files)
.should_log()
.download(root, Some(&update_blob));
// todo look if need to download or update
}
fn update_blob(obj: ObjProps) {
let relative_s = &obj.clone().relative_s.unwrap();
let relative_p = PathBuf::from(&relative_s);
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
// todo update function
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
}
}

View File

@@ -1,240 +1,87 @@
use std::path::PathBuf;
use crate::commands::{status, config};
use crate::services::req_props::ReqProps;
use crate::services::api::ApiError;
use crate::services::upload_file::UploadFile;
use crate::services::delete_path::DeletePath;
use crate::commands::status::{State, Obj};
use crate::store::object::{add_blob, rm_blob};
use crate::commands::push::push_factory::{PushFactory, PushState};
use crate::store::index;
pub fn push() {
dbg!(status::get_all_staged());
use super::status::LocalObj;
let remote = match config::get("remote") {
pub mod push_factory;
pub mod new;
pub mod new_dir;
pub mod rm_dir;
pub mod deleted;
pub mod modified;
pub mod moved;
pub mod copied;
pub fn push() {
let _remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: no remote set in configuration");
std::process::exit(1);
// todo debug
//std::process::exit(1);
String::new()
}
};
let staged_objs = status::get_all_staged();
// todo sort folder first
// exit if there is nothing to push
if staged_objs.len() == 0 {
println!("Everything up-to-date");
std::process::exit(0);
}
// path that certify that all its children can be push whithout hesistation
// (e.g. if remote dir has no changes since last sync all children
// can be pushed without verification)
let mut whitelist: Option<PathBuf> = None;
for obj in staged_objs {
if obj.otype == String::from("tree") {
dbg!("should push folder");
let push_factory = PushFactory.new_dir(obj.clone());
let res = push_factory.can_push(&mut whitelist);
match res {
PushState::Valid => {
match push_factory.push() {
Ok(()) => (),
Err(err) => {
eprintln!("err: pushing {}: {}", obj.name, err);
}
}
},
PushState::Done => (),
PushState::Conflict => {
println!("CONFLICT: {}", obj.clone().name);
},
_ => todo!(),
};
} else {
let push_factory = PushFactory.new(obj.clone());
match push_factory.can_push() {
PushState::Valid => push_factory.push(),
PushState::Done => (),
_ => todo!(),
}
}
}
// read index
// if dir upload dir
}
#[derive(Debug)]
enum PushState {
Done,
Valid,
Conflict,
Error,
}
trait PushChange {
fn can_push(&self) -> PushState;
fn push(&self);
}
struct New {
obj: Obj,
}
impl PushChange for New {
fn can_push(&self) -> PushState {
// check if exist on server
let file_infos = tokio::runtime::Runtime::new().unwrap().block_on(async {
let res = ReqProps::new()
.set_url(&self.obj.path.to_str().unwrap())
.getlastmodified()
.send_with_err()
.await;
match res {
Ok(data) => Ok(data),
Err(ApiError::IncorrectRequest(err)) => {
if err.status() == 404 {
Ok(vec![])
} else {
Err(())
match push_factory.can_push(&mut whitelist) {
PushState::Valid => {
match push_factory.push() {
Ok(()) => remove_obj_from_index(obj.clone()),
Err(err) => {
eprintln!("err: pushing {}: {}", obj.name, err);
}
}
},
Err(_) => Err(()),
}
});
if let Ok(infos) = file_infos {
if infos.len() == 0 {
// file doesn't exist on remote
PushState::Valid
} else {
// todo check date
PushState::Conflict
}
} else {
PushState::Error
}
}
fn push(&self) {
let obj = &self.obj;
tokio::runtime::Runtime::new().unwrap().block_on(async {
let res = UploadFile::new()
.set_url(obj.path.to_str().unwrap())
.set_file(obj.path.clone())
.send_with_err()
.await;
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error pushing file {}", obj.name);
std::process::exit(1);
PushState::Done => remove_obj_from_index(obj.clone()),
PushState::Conflict => {
eprintln!("conflict when pushing blob");
// download file
}
_ => (),
PushState::Error => (eprintln!("error when pushing changes blob")),
}
});
// update tree
add_blob(&obj.path.clone(), "todo_date");
// remove index
index::rm_line(obj.path.to_str().unwrap());
}
}
struct Deleted {
obj: Obj,
}
impl PushChange for Deleted {
fn can_push(&self) -> PushState {
// check if exist on server
let file_infos = tokio::runtime::Runtime::new().unwrap().block_on(async {
let res = ReqProps::new()
.set_url(&self.obj.path.to_str().unwrap())
.getlastmodified()
.send_with_err()
.await;
match res {
Ok(data) => Ok(data),
Err(ApiError::IncorrectRequest(err)) => {
if err.status() == 404 {
Ok(vec![])
} else {
Err(())
}
},
Err(_) => Err(()),
}
});
if let Ok(infos) = file_infos {
if infos.len() == 0 {
// file doesn't exist on remote
PushState::Done
} else {
// todo check date
//PushState::Conflict
PushState::Valid
}
} else {
PushState::Error
}
}
fn push(&self) {
let obj = &self.obj;
tokio::runtime::Runtime::new().unwrap().block_on(async {
let res = DeletePath::new()
.set_url(obj.path.to_str().unwrap())
.send_with_err()
.await;
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error deleting file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error deleting file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
});
// update tree
rm_blob(&obj.path.clone());
// remove index
index::rm_line(obj.path.to_str().unwrap());
}
}
struct PushFactory;
impl PushFactory {
fn new(&self, obj: Obj) -> Box<dyn PushChange> {
match obj.state {
State::New => Box::new(New { obj: obj.clone() }),
State::Renamed => todo!(),
State::Modified => todo!(),
State::Deleted => Box::new(Deleted { obj: obj.clone() }),
State::Default => todo!(),
}
}
}
fn can_push_file(obj: Obj) -> PushState {
dbg!(obj.clone());
// check if exist on server
let file_infos = tokio::runtime::Runtime::new().unwrap().block_on(async {
let res = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_with_err()
.await;
match res {
Ok(data) => Ok(data),
Err(ApiError::IncorrectRequest(err)) => {
if err.status() == 404 {
Ok(vec![])
} else {
Err(())
}
},
Err(_) => Err(()),
}
});
if let Ok(infos) = file_infos {
if infos.len() == 0 {
// file doesn't exist on remote
PushState::Valid
} else {
// check date
PushState::Conflict
}
} else {
PushState::Error
fn remove_obj_from_index(obj: LocalObj) {
if let Err(err) = index::rm_line(obj.path.to_str().unwrap()) {
eprintln!("err: removing {} from index: {}", obj.name, err);
}
}

View File

@@ -0,0 +1,84 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::r#copy::Copy;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
use crate::utils::path::path_buf_to_string;
pub struct Copied {
pub obj: LocalObj,
}
impl PushChange for Copied {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Conflict,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = Copy::new()
.set_url_copy(
&path_buf_to_string(obj.path_from.clone().unwrap()),
obj.path.to_str().unwrap())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error copying file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error copying file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// create destination blob
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
}
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

View File

@@ -0,0 +1,58 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::delete_path::DeletePath;
use crate::store::index;
use crate::store::object::blob::Blob;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::object::ObjMethods;
pub struct Deleted {
pub obj: LocalObj
}
impl PushChange for Deleted {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Done,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Valid,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = DeletePath::new()
.set_url(obj.path.to_str().unwrap())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error deleting file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error deleting file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// update tree
// todo date
Blob::from_path(obj.path.clone()).rm_node()?;
// remove index
index::rm_line(obj.path.to_str().unwrap())?;
Ok(())
}
fn conflict(&self) {
}
}

View File

@@ -0,0 +1,80 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps;
use crate::services::upload_file::UploadFile;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
pub struct Modified {
pub obj: LocalObj,
}
impl PushChange for Modified {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Valid,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = UploadFile::new()
.set_url(obj.path.to_str().unwrap())
.set_file(obj.path.clone())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error pushing file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update blob
Blob::from_path(obj.path.clone()).update(&lastmodified.to_string())?;
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

View File

@@ -0,0 +1,88 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::r#move::Move;
use crate::services::req_props::ReqProps;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::blob::Blob;
use crate::utils::path::path_buf_to_string;
use crate::store::object::object::ObjMethods;
pub struct Moved {
pub obj: LocalObj,
}
impl PushChange for Moved {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Conflict,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = Move::new()
.set_url_move(
&path_buf_to_string(obj.path_from.clone().unwrap()),
obj.path.to_str().unwrap())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error moving file {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error moving file {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// delete source and create destination blob
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
}
if let Err(err) = Blob::from_path(obj.path_from.clone().unwrap()).rm() {
eprintln!("err: removing ref of {}: {}", obj.name.clone(), err);
}
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

81
src/commands/push/new.rs Normal file
View File

@@ -0,0 +1,81 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps;
use crate::services::upload_file::UploadFile;
use crate::store::object::blob::Blob;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
pub struct New {
pub obj: LocalObj,
}
impl PushChange for New {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Valid,
PushFlowState::NotOnRemote => PushState::Valid,
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => PushState::Valid,
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = UploadFile::new()
.set_url(obj.path.to_str().unwrap())
.set_file(obj.path.clone())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
dbg!(&err);
eprintln!("fatal: error pushing file '{}': {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error pushing file '{}'", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// create new blob
Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false)?;
Ok(())
}
// download file with .distant at the end
fn conflict(&self) {
todo!()
}
}

View File

@@ -0,0 +1,87 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps;
use crate::services::create_folder::CreateFolder;
use crate::store::index;
use crate::store::object::tree::Tree;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
pub struct NewDir {
pub obj: LocalObj
}
impl PushChange for NewDir {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Valid,
PushFlowState::NotOnRemote => {
*whitelist = Some(self.obj.path.clone());
PushState::Valid
},
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => {
*whitelist = Some(self.obj.path.clone());
PushState::Done
},
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = CreateFolder::new()
.set_url(obj.path.to_str().unwrap())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error creating folder {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error creating folder {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// get lastmodified props to update it
let props = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let prop = match props {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
std::process::exit(1);
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
// update tree
Tree::from_path(obj.path.clone()).create(&lastmodified.to_string(), true)?;
// remove index
index::rm_line(obj.path.to_str().unwrap())?;
Ok(())
}
fn conflict(&self) {}
}

View File

@@ -0,0 +1,121 @@
use std::path::PathBuf;
use std::io;
use crate::commands::status::{State, LocalObj};
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::ReqProps;
use crate::commands::push::new::New;
use crate::commands::push::new_dir::NewDir;
use crate::commands::push::rm_dir::RmDir;
use crate::commands::push::deleted::Deleted;
use crate::commands::push::modified::Modified;
use crate::commands::push::moved::Moved;
use crate::commands::push::copied::Copied;
use crate::store::object::blob::Blob;
#[derive(Debug)]
pub enum PushState {
Done,
Valid,
Conflict,
Error,
}
pub enum PushFlowState {
Whitelisted,
NotOnRemote,
RemoteIsNewer,
LocalIsNewer,
Error,
}
pub trait PushChange {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState;
fn push(&self) -> io::Result<()>;
fn conflict(&self);
fn is_whitelisted(&self, obj: &LocalObj, path: Option<PathBuf>) -> bool {
match path {
Some(p) => obj.path.starts_with(p),
None => false,
}
}
fn flow(&self, obj: &LocalObj, whitelist: Option<PathBuf>) -> PushFlowState {
// todo moved: from same file, destination doesn't exist but parent do
if self.is_whitelisted(obj, whitelist) {
return PushFlowState::Whitelisted;
}
// check if exist on server
let res = ReqProps::new()
.set_url(obj.path.to_str().unwrap())
.getlastmodified()
.send_req_single();
let obj_data = match res {
Ok(obj) => Ok(Some(obj)),
Err(ApiError::IncorrectRequest(err)) => {
if err.status() == 404 {
Ok(None)
} else {
eprintln!("err: when requesting properties of {} ({})", obj.name, err.status());
Err(())
}
},
Err(_) => Err(()),
};
let obj_data = match obj_data {
Ok(Some(info)) => info,
Ok(None) => return PushFlowState::NotOnRemote,
Err(_) => return PushFlowState::Error,
};
// check if remote is newest
let last_sync_ts = {
if obj.otype == String::from("blob") {
Blob::from_path(obj.path.clone())
.saved_remote_ts()
.parse::<i64>().unwrap()
} else {
// todo timestamp on tree
99999999999999
}
};
let remote_ts = obj_data.lastmodified.unwrap().timestamp_millis();
if last_sync_ts < remote_ts {
PushFlowState::RemoteIsNewer
} else {
PushFlowState::LocalIsNewer
}
}
}
pub struct PushFactory;
impl PushFactory {
pub fn new(&self, obj: LocalObj) -> Box<dyn PushChange> {
match obj.state {
State::New => Box::new(New { obj }),
State::Modified => Box::new(Modified { obj }),
State::Deleted => Box::new(Deleted { obj }),
State::Moved => Box::new(Moved { obj }),
State::Copied => Box::new(Copied { obj }),
State::Default => todo!(),
}
}
pub fn new_dir(&self, obj: LocalObj) -> Box<dyn PushChange> {
match obj.state {
State::New => Box::new(NewDir { obj }),
State::Modified => todo!(),
State::Deleted => Box::new(RmDir { obj }),
State::Default => todo!(),
_ => todo!(),
}
}
}

View File

@@ -0,0 +1,62 @@
use std::path::PathBuf;
use std::io;
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::delete_path::DeletePath;
use crate::store::index;
use crate::store::object::tree::Tree;
use crate::commands::status::LocalObj;
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
use crate::store::object::object::ObjMethods;
pub struct RmDir {
pub obj: LocalObj
}
impl PushChange for RmDir {
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
match self.flow(&self.obj, whitelist.clone()) {
PushFlowState::Whitelisted => PushState::Done,
PushFlowState::NotOnRemote => {
*whitelist = Some(self.obj.path.clone());
PushState::Done
},
PushFlowState::RemoteIsNewer => PushState::Conflict,
PushFlowState::LocalIsNewer => {
*whitelist = Some(self.obj.path.clone());
PushState::Valid
},
PushFlowState::Error => PushState::Error,
}
}
fn push(&self) -> io::Result<()> {
let obj = &self.obj;
let res = DeletePath::new()
.set_url(obj.path.to_str().unwrap())
.send();
match res {
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: error deleting dir {}: {}", obj.name, err.status());
std::process::exit(1);
},
Err(ApiError::RequestError(_)) => {
eprintln!("fatal: request error deleting dir {}", obj.name);
std::process::exit(1);
}
_ => (),
}
// update tree
// todo update date
Tree::from_path(obj.path.clone()).rm()?;
// remove index
index::rm_line(obj.path.to_str().unwrap())?;
Ok(())
}
fn conflict(&self) {}
}

27
src/commands/remote.rs Normal file
View File

@@ -0,0 +1,27 @@
use crate::commands::config;
use super::config::get_all_remote;
pub struct RemoteArgs {
pub name: String,
pub url: String,
}
pub fn remote_add(args: RemoteArgs) {
let _ = config::add_remote(&args.name, &args.url);
}
pub fn remote_list(verbose: bool) {
let remotes = get_all_remote();
for remote in remotes {
if verbose
{
println!("{} {}", remote.0, remote.1);
}
else
{
println!("{}", remote.0);
}
}
}

View File

@@ -0,0 +1,62 @@
use crate::services::api::ApiError;
use crate::services::api_call::ApiCall;
use crate::services::req_props::{ReqProps, ObjProps};
use crate::store::object::Object;
use crate::utils::api::{ApiProps, get_api_props};
use crate::utils::path;
use crate::utils::remote::{enumerate_remote, EnumerateOptions};
use std::path::PathBuf;
// todo deletion
pub fn remote_diff() {
let relative_p = path::current()
.unwrap()
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
let (folders, files) = get_diff(relative_p);
for folder in folders {
println!("should pull {}", folder.clone().relative_s.unwrap());
}
for file in files {
println!("should pull {}", file.clone().relative_s.unwrap());
}
}
pub fn get_diff(path: PathBuf) -> (Vec<ObjProps>, Vec<ObjProps>) {
let depth = "6"; // todo opti
let api_props = get_api_props();
enumerate_remote(
|a| req(&api_props, depth, a),
Some(&should_skip),
EnumerateOptions {
depth: Some(depth.to_owned()),
relative_s: Some(path.to_str().unwrap().to_owned())
})
}
fn should_skip(obj: ObjProps) -> bool {
let mut o = Object::new(&obj.clone().relative_s.unwrap());
let exist = o.exists();
// if doesn't exist locally when cannot skip it as we need to pull it
if !exist {
return false;
}
// if local directory is older there is changes on the remote we cannot
// skip this folder
!o.read().is_older(obj.lastmodified.unwrap().timestamp())
}
fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> {
ReqProps::new()
.set_request(relative_s, &api_props)
.set_depth(depth)
.gethref()
.getcontentlength() // todo opti
.getlastmodified()
.send_req_multiple()
}

View File

@@ -2,14 +2,7 @@ use std::fs::File;
use crate::utils;
pub fn reset() {
let mut root = match utils::path::nextsync_root() {
Some(path) => path,
None => {
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
std::process::exit(1);
}
};
root.push(".nextsync");
let mut root = utils::path::nextsync();
root.push("index");
if File::create(root).is_err() {
eprintln!("fatal: failed to reset");

View File

@@ -1,12 +1,19 @@
use std::fs::File;
use std::path::PathBuf;
use std::collections::HashMap;
use crypto::digest::Digest;
use crypto::sha1::Sha1;
use std::collections::{HashSet, HashMap};
use colored::Colorize;
use std::path::PathBuf;
use std::io::{self, Lines, BufReader};
use crate::utils;
use crate::store::{self, object};
use crate::utils::path::{self, path_buf_to_string};
use crate::store::object::blob::Blob;
use crate::store::object::object::Obj;
use crate::store::object::tree::Tree;
use crate::utils::read::read_folder;
use crate::store::index;
use crate::store::object::object::ObjMethods;
pub struct StatusArgs {
pub nostyle: bool,
}
#[derive(PartialEq)]
enum RemoveSide {
@@ -19,111 +26,217 @@ enum RemoveSide {
pub enum State {
Default,
New,
Renamed,
Moved,
Copied,
Modified,
Deleted,
}
// todo: relative path, filename, get modified
pub fn status() {
let (mut new_objs, mut del_objs) = get_diff();
dbg!(get_diff());
let mut renamed_objs = get_renamed(&mut new_objs, &mut del_objs);
// get copy, modified
let mut objs = new_objs;
objs.append(&mut del_objs);
objs.append(&mut renamed_objs);
let staged_objs = get_staged(&mut objs);
print_status(staged_objs, objs);
// todo: relative path, filename
// todo: not catch added empty folder
pub fn status(args: StatusArgs) {
let mut all_hashes = get_all_objs_hashes();
let staged_objs = get_staged(&mut all_hashes);
let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
x.1.clone()
}).collect();
if args.nostyle
{
print_status_nostyle(staged_objs, objs);
}
else
{
print_status(staged_objs, objs);
}
}
pub fn get_all_objs() -> Vec<LocalObj> {
let all_hashes = get_all_objs_hashes();
all_hashes.iter().map(|x| {
x.1.clone()
}).collect()
}
fn get_all_objs_hashes() -> HashMap<String, LocalObj> {
let (mut new_objs_hashes, mut del_objs_hashes, objs_modified) = get_diff();
let move_copy_hashes = get_move_copy_objs(&mut new_objs_hashes, &mut del_objs_hashes);
let mut hasher = Sha1::new();
let mut modified_objs_hashes = HashMap::new();
for obj in objs_modified {
hasher.input_str(&obj);
let hash = hasher.result_str();
hasher.reset();
modified_objs_hashes.insert(hash, LocalObj {
// todo otype
otype: get_otype(PathBuf::from(obj.clone())),
name: obj.clone().to_string(),
path: PathBuf::from(obj),
path_from: None,
state: State::Modified
});
}
let mut all_hashes = HashMap::new();
all_hashes.extend(move_copy_hashes);
all_hashes.extend(del_objs_hashes);
all_hashes.extend(new_objs_hashes);
all_hashes.extend(modified_objs_hashes);
all_hashes
}
fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> bool {
// todo prevent copied or moved if file empty
// todo deal with directories
if obj.path.is_dir()
{
return true;
}
let mut blob = Blob::from_path(obj.path.clone());
let mut flag = true;
let identical_blobs = blob.get_all_identical_blobs();
// try to find an identical blob among the deleted files (=moved)
for obj_s in identical_blobs.clone() {
if !flag { break; }
hasher.input_str(&obj_s);
let hash = hasher.result_str();
hasher.reset();
if del_objs_h.contains_key(&hash) {
let mut new_move = obj.clone();
let deleted = del_objs_h.get(&hash).unwrap().clone();
del_objs_h.remove(&hash);
new_move.path_from = Some(deleted.path);
new_move.state = State::Moved;
move_copy_hashes.insert(key.clone(), new_move.clone());
flag = false;
}
}
// if did not find anything before try to find a file with the same content (=copy)
if flag {
if let Some(rel_s) = identical_blobs.first() {
let root = path::repo_root();
let rel_p = PathBuf::from(rel_s.clone());
let abs_p = root.join(rel_p.clone());
if abs_p.exists() {
let mut new_copy = obj.clone();
new_copy.path_from = Some(rel_p);
new_copy.state = State::Copied;
move_copy_hashes.insert(key, new_copy.clone());
flag = false;
}
}
}
flag
}
fn get_move_copy_objs(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> HashMap<String, LocalObj> {
let mut hasher = Sha1::new();
let mut move_copy_hashes = HashMap::new();
new_objs_h.retain(|key, obj| {
should_retain(&mut hasher, key.to_owned(), obj.clone(), &mut move_copy_hashes, del_objs_h)
});
move_copy_hashes
}
#[derive(Debug, Clone)]
pub struct Obj {
pub struct LocalObj {
pub otype: String,
pub name: String,
pub path: PathBuf,
pub path_from: Option<PathBuf>, // origin path when state is move or copy
pub state: State,
}
pub fn get_all_staged() -> Vec<Obj> {
// todo opti getting staged and then finding differences ?
// todo opti return folder
let (mut new_objs, mut del_objs) = get_diff();
let mut renamed_objs = get_renamed(&mut new_objs, &mut del_objs);
// get copy, modified
let mut objs = new_objs;
objs.append(&mut del_objs);
objs.append(&mut renamed_objs);
let staged_objs = get_staged(&mut objs);
staged_objs
pub fn get_all_staged() -> Vec<LocalObj> {
let mut all_hashes = get_all_objs_hashes();
get_staged(&mut all_hashes)
}
fn get_renamed(new_obj: &mut Vec<Obj>, del_obj: &mut Vec<Obj>) -> Vec<Obj> {
// get hash of all new obj, compare to hash of all del
let renamed_objs = vec![];
fn get_staged(hashes: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
let mut lines: Vec<String> = vec![];
renamed_objs
}
fn get_staged(objs: &mut Vec<Obj>) -> Vec<Obj> {
let mut indexes = HashSet::new();
let mut staged_objs: Vec<Obj> = vec![];
let nextsync_path = utils::path::nextsync().unwrap();
if let Ok(entries) = store::index::read_line(nextsync_path.clone()) {
if let Ok(entries) = index::read_line() {
for entry in entries {
indexes.insert(entry.unwrap());
lines.push(entry.unwrap());
}
}
let mut to_remove: Vec<usize> = vec![];
let mut index = 0;
for obj in &mut *objs {
dbg!(obj.clone().path.to_str().unwrap());
if indexes.contains(obj.clone().path.to_str().unwrap()) {
staged_objs.push(obj.clone());
to_remove.push(index);
}
index += 1;
}
let mut hasher = Sha1::new();
let mut staged_objs: Vec<LocalObj> = vec![];
let mut offset = 0;
for i in to_remove {
objs.remove(i + offset.clone());
offset += 1;
}
let ref_p = path::repo_root();
for obj in lines {
// hash the object
hasher.input_str(&obj);
let hash = hasher.result_str();
hasher.reset();
// find it on the list of hashes
if hashes.contains_key(&hash) {
staged_objs.push(hashes.get(&hash).unwrap().clone());
hashes.remove(&hash);
}else {
let mut t_path = ref_p.clone();
let relative_p = PathBuf::from(obj.clone());
t_path.push(relative_p.clone());
staged_objs.push(LocalObj {
otype: get_otype(t_path.clone()),
name: obj.to_string(),
path: relative_p.clone(),
path_from: None,
state: {
if t_path.exists() {
State::New
} else {
State::Deleted
}
},
});
}
}
staged_objs
}
fn get_diff() -> (Vec<Obj>, Vec<Obj>) {
fn read_tree_to_hashmap(tree: &mut Tree, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
while let Some(child) = tree.next() {
hashes.insert(String::from(child.get_hash_path()), child.get_local_obj());
};
}
fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>, Vec<String>) {
let mut hashes = HashMap::new();
let mut objs: Vec<String> = vec![];
let mut objs_modified: Vec<String> = vec![];
let root = match utils::path::nextsync_root() {
Some(path) => path,
None => {
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
std::process::exit(1);
}
};
let root = path::repo_root();
dbg!(utils::path::current());
let nextsync_path = utils::path::nextsync().unwrap();
let current_p = utils::path::current().unwrap();
let current_p = path::current().unwrap();
// todo use repo_root instead of current
let dist_path = current_p.strip_prefix(root.clone()).unwrap().to_path_buf();
if let Ok(lines) = read_head(nextsync_path.clone()) {
add_to_hashmap(lines, &mut hashes, dist_path.clone());
}
if let Ok(entries) = utils::read::read_folder(root.clone()) {
read_tree_to_hashmap(&mut Tree::from_head(), &mut hashes, dist_path.clone());
//if let Ok(lines) = read_lines(head::path()) {
// add_to_hashmap(lines, &mut hashes, dist_path.clone());
//}
if let Ok(entries) = read_folder(root.clone()) {
add_to_vec(entries, &mut objs, root.clone());
}
let mut obj_to_analyse = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
dbg!(obj_to_analyse.clone());
while obj_to_analyse.len() > 0 {
let cur_obj = obj_to_analyse.pop().unwrap();
@@ -132,45 +245,57 @@ fn get_diff() -> (Vec<Obj>, Vec<Obj>) {
let obj_path = root.clone().join(cur_path.clone());
if obj_path.is_dir() {
if let Some((_, lines)) = object::read_tree(cur_obj.clone()) {
add_to_hashmap(lines, &mut hashes, cur_path.clone());
}
// read virtual tree
read_tree_to_hashmap(&mut Tree::from_path(cur_obj.clone()), &mut hashes, dist_path.clone());
//let mut tree = Tree::from_path(cur_obj.clone());
//if let Some(lines) = tree.get_children() {
//add_to_hashmap(lines, &mut hashes, cur_path.clone());
//}
if let Ok(entries) = utils::read::read_folder(obj_path.clone()) {
// read physical tree
if let Ok(entries) = read_folder(obj_path.clone()) {
add_to_vec(entries, &mut objs, root.clone());
}
// remove duplicate
let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
obj_to_analyse.append(&mut diff.clone());
} else {
// todo look for change
if Blob::from_path(cur_path).has_changes() {
objs_modified.push(cur_obj);
}
}
}
let del_objs: Vec<Obj> = hashes.iter().map(|x| {
Obj {
otype: x.1.otype.clone(),
name: x.1.name.clone(),
path: x.1.path.clone(),
state: State::Deleted
}
}).collect();
for (_, elt) in &mut hashes {
elt.state = State::Deleted;
}
let new_objs: Vec<Obj> = objs.iter().map(|x| {
let p = PathBuf::from(x.to_string());
let mut new_objs_hashes = HashMap::new();
let mut hasher = Sha1::new();
for obj in objs {
// hash the object
hasher.input_str(&obj);
let hash = hasher.result_str();
hasher.reset();
let p = PathBuf::from(obj.to_string());
let abs_p = path::repo_root().join(p.clone());
// todo name
Obj {
otype: get_type(p.clone()),
name: x.to_string(),
new_objs_hashes.insert(String::from(hash), LocalObj {
otype: get_otype(abs_p),
name: obj.to_string(),
path: p,
path_from: None,
state: State::New
}
}).collect();
(new_objs, del_objs)
});
}
(new_objs_hashes, hashes, objs_modified)
}
fn get_type(p: PathBuf) -> String {
fn get_otype(p: PathBuf) -> String {
if p.is_dir() {
String::from("tree")
} else {
@@ -178,27 +303,28 @@ fn get_type(p: PathBuf) -> String {
}
}
fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, Obj>, path: PathBuf) {
for line in lines {
if let Ok(ip) = line {
if ip.clone().len() > 5 {
let (ftype, hash, name) = object::parse_line(ip);
let mut p = path.clone();
p.push(name.clone());
hashes.insert(String::from(hash), Obj{
otype: String::from(ftype),
name: String::from(name),
path: p,
state: State::Default,
});
}
}
}
}
//fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
// for line in lines {
// if let Ok(ip) = line {
// if ip.clone().len() > 5 {
// let (ftype, hash, name) = tree::parse_line(ip);
// let mut p = path.clone();
// p.push(name.clone());
// hashes.insert(String::from(hash), LocalObj{
// otype: String::from(ftype),
// name: String::from(name),
// path: p,
// path_from: None,
// state: State::Default,
// });
// }
// }
// }
//}
fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) {
for entry in entries {
if !is_nextsync_config(entry.clone()) {
if !path::is_nextsync_config(entry.clone()) {
let object_path = entry.strip_prefix(root.clone()).unwrap();
objects.push(String::from(object_path.to_str().unwrap()));
}
@@ -206,9 +332,7 @@ fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) {
}
fn print_status(staged_objs: Vec<Obj>, objs: Vec<Obj>) {
dbg!(staged_objs.clone());
dbg!(objs.clone());
fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
if staged_objs.len() == 0 && objs.len() == 0 {
println!("Nothing to push, working tree clean");
return;
@@ -226,7 +350,7 @@ fn print_status(staged_objs: Vec<Obj>, objs: Vec<Obj>) {
// not staged files
if objs.len() != 0 {
println!("Changes not staged for push:");
println!(" (Use\"nextsync add <file>...\" to update what will be pushed)");
println!(" (Use \"nextsync add <file>...\" to update what will be pushed)");
for object in objs {
print_object(object);
@@ -234,78 +358,88 @@ fn print_status(staged_objs: Vec<Obj>, objs: Vec<Obj>) {
}
}
fn print_object(obj: Obj) {
fn print_status_nostyle(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
// todo sort
if staged_objs.len() == 0 && objs.len() == 0 {
return;
}
for obj in staged_objs {
if obj.state == State::Deleted {
println!("deleted: {}", obj.name);
} else if obj.state == State::New {
println!("new: {}", obj.name);
} else if obj.state == State::Modified {
println!("modified: {}", obj.name);
} else if obj.state == State::Moved {
println!("moved: {} => {}", path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path));
} else if obj.state == State::Copied {
println!("copied: {} => {}", path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path));
}
}
}
fn print_object(obj: LocalObj) {
if obj.state == State::Deleted {
println!(" {} {}", String::from("deleted:").red(), obj.name.red());
} else if obj.state == State::Renamed {
println!(" {} {}", String::from("renamed:").red(), obj.name.red());
} else if obj.state == State::New {
println!(" {} {}", String::from("new file:").red(), obj.name.red());
println!(" {} {}", String::from("new:").red(), obj.name.red());
} else if obj.state == State::Modified {
println!(" {} {}", String::from("modified:").red(), obj.name.red());
} else if obj.state == State::Moved {
println!(" {} {} => {}", String::from("moved:").red(), path_buf_to_string(obj.path_from.unwrap()).red(), path_buf_to_string(obj.path).red());
} else if obj.state == State::Copied {
println!(" {} {} => {}", String::from("copied:").red(), path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path).red());
}
}
fn print_staged_object(obj: Obj) {
fn print_staged_object(obj: LocalObj) {
if obj.state == State::Deleted {
println!(" {} {}", String::from("deleted:").green(), obj.name.green());
} else if obj.state == State::Renamed {
println!(" {} {}", String::from("renamed:").green(), obj.name.green());
} else if obj.state == State::New {
println!(" {} {}", String::from("new file:").green(), obj.name.green());
println!(" {} {}", String::from("new:").green(), obj.name.green());
} else if obj.state == State::Modified {
println!(" {} {}", String::from("modified:").green(), obj.name.green());
} else if obj.state == State::Moved {
println!(" {} {} => {}", String::from("moved:").green(), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
} else if obj.state == State::Copied {
println!(" {} {} => {}", String::from("copied:"), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
}
}
fn remove_duplicate(hashes: &mut HashMap<String, Obj>, objects: &mut Vec<String>, remove_option: RemoveSide) -> Vec<String> {
fn remove_duplicate(hashes: &mut HashMap<String, LocalObj>, objects: &mut Vec<String>, remove_option: RemoveSide) -> Vec<String> {
let mut hasher = Sha1::new();
let mut to_remove: Vec<usize> = vec![];
let mut i = 0;
let mut duplicate = vec![];
for object in &mut *objects {
objects.retain(|obj| {
// hash the object
hasher.input_str(object);
hasher.input_str(obj);
let hash = hasher.result_str();
hasher.reset();
// find it on the list of hashes
if hashes.contains_key(&hash) {
duplicate.push(object.clone());
duplicate.push(obj.clone());
// remove from hashes
if remove_option == RemoveSide::Left || remove_option == RemoveSide::Both {
hashes.remove(&hash);
}
if remove_option == RemoveSide::Right || remove_option == RemoveSide::Both {
to_remove.push(i);
}
}
i += 1;
}
// remove all objects existing in the list of hashes
i = 0;
for index in to_remove {
objects.remove(index-i);
i += 1;
}
// remove from objects
remove_option != RemoveSide::Right && remove_option != RemoveSide::Both
} else {
true
}
});
duplicate
}
fn is_nextsync_config(path: PathBuf) -> bool {
path.ends_with(".nextsync") || path.ends_with(".nextsyncignore")
}
fn read_head(mut path: PathBuf) -> io::Result<io::Lines<io::BufReader<File>>> {
path.push("HEAD");
utils::read::read_lines(path)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_remove_duplicate() {
let mut hasher = Sha1::new();
@@ -322,10 +456,11 @@ mod tests {
hasher.reset();
let mut hashes = HashMap::new();
let default_obj = Obj {
let default_obj = LocalObj {
otype: String::from("tree"),
name: String::from("test"),
path: PathBuf::from(""),
path_from: None,
state: State::Default,
};
hashes.insert(hash1.clone(), default_obj.clone());
@@ -337,8 +472,7 @@ mod tests {
objects.push(String::from("file2"));
objects.push(String::from("file3"));
remove_duplicate(&mut hashes, &mut objects, RemoveSide::Both);
dbg!(hashes.clone());
dbg!(objects.clone());
assert_eq!(hashes.contains_key(&hash4), true);
assert_eq!(hashes.len(), 1);
assert_eq!(objects, vec!["file3"]);

View File

@@ -1,5 +1,7 @@
use clap::{App, Arg, SubCommand};
use crate::commands::add::AddArgs;
use clap::Command;
mod subcommands;
mod commands;
mod utils;
mod services;
@@ -7,115 +9,41 @@ mod global;
mod store;
fn main() {
let matches = App::new("NextSync")
let app = Command::new("Nextsync")
.version("1.0")
.author("grimhilt")
.about("")
.subcommand(
SubCommand::with_name("init")
.arg(
Arg::with_name("directory")
.required(false)
.takes_value(true)
.value_name("DIRECTORY")
)
)
.subcommand(
SubCommand::with_name("status")
.arg(
Arg::with_name("directory")
.required(false)
.takes_value(true)
.value_name("DIRECTORY")
)
)
.subcommand(SubCommand::with_name("reset"))
.subcommand(SubCommand::with_name("push"))
.subcommand(
SubCommand::with_name("clone")
.arg(
Arg::with_name("remote")
.required(true)
.takes_value(true)
.value_name("REMOTE")
)
.arg(
Arg::with_name("directory")
.required(false)
.takes_value(true)
.value_name("DIRECTORY")
)
)
.subcommand(
SubCommand::with_name("add")
.arg(
Arg::with_name("files")
.required(true)
.multiple(true)
.takes_value(true)
.value_name("FILE")
.help("Files to add"),
)
.arg(
Arg::with_name("force")
.short("f")
.long("force")
.help("Allow adding otherwise ignored files."),
)
)
.subcommand(
SubCommand::with_name("config")
.arg(
Arg::with_name("variable")
.required(true)
.takes_value(true)
.value_name("VARIABLE")
)
.arg(
Arg::with_name("value")
.required(true)
.takes_value(true)
.value_name("VALUE")
)
)
.get_matches();
.about("A git-line command line tool to interact with nextcloud")
.subcommands([
subcommands::clone::create(),
subcommands::init::create(),
subcommands::status::create(),
subcommands::add::create(),
subcommands::push::create(),
subcommands::reset::create(),
subcommands::remote::create(),
subcommands::config::create(),
subcommands::remote_diff::create(),
subcommands::pull::create(),
subcommands::credential::create(),
]);
// .setting(clap::AppSettings::SubcommandRequiredElseHelp);
if let Some(matches) = matches.subcommand_matches("init") {
if let Some(val) = matches.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
}
commands::init::init();
} else if let Some(matches) = matches.subcommand_matches("status") {
if let Some(val) = matches.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
}
commands::status::status();
} else if let Some(matches) = matches.subcommand_matches("add") {
if let Some(files) = matches.values_of("files") {
commands::add::add(AddArgs {
files: files,
force: matches.is_present("force"),
});
}
} else if let Some(_) = matches.subcommand_matches("reset") {
commands::reset::reset();
} else if let Some(matches) = matches.subcommand_matches("clone") {
if let Some(val) = matches.values_of("directory") {
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
}
if let Some(remote) = matches.values_of("remote") {
commands::clone::clone(remote);
}
} else if let Some(matches) = matches.subcommand_matches("push") {
commands::push::push();
} else if let Some(matches) = matches.subcommand_matches("config") {
if let Some(mut var) = matches.values_of("variable") {
if let Some(mut val) = matches.values_of("value") {
if commands::config::set(var.next().unwrap(), val.next().unwrap()).is_err() {
eprintln!("fatal: cannot save the value");
}
}
}
}
let matches = app.get_matches();
match matches.subcommand() {
Some(("init", args)) => subcommands::init::handler(args),
Some(("status", args)) => subcommands::status::handler(args),
Some(("add", args)) => subcommands::add::handler(args),
Some(("reset", _)) => commands::reset::reset(),
Some(("clone", args)) => subcommands::clone::handler(args),
Some(("push", _)) => commands::push::push(),
Some(("config", args)) => subcommands::config::handler(args),
Some(("remote-diff", args)) => subcommands::remote_diff::handler(args),
Some(("pull", args)) => subcommands::pull::handler(args),
Some(("remote", args)) => subcommands::remote::handler(args),
Some(("credential", args)) => subcommands::credential::handler(args),
Some((_, _)) => {},
None => {},
};
}

View File

@@ -1,6 +1,14 @@
pub mod api;
pub mod list_folders;
pub mod create_folder;
pub mod download_files;
pub mod req_props;
pub mod upload_file;
pub mod delete_path;
pub mod downloader;
pub mod r#move;
pub mod r#copy;
pub mod login;
pub mod request_manager;
pub mod api_call;
//pub mod auth;
//pub mod bulk_upload;

View File

@@ -1,19 +1,34 @@
use std::error::Error;
use lazy_static::lazy_static;
use std::sync::Mutex;
use reqwest::Client;
use reqwest::RequestBuilder;
use reqwest::{Response, Error, IntoUrl, Method};
use std::env;
use dotenv::dotenv;
use reqwest::multipart::Form;
use reqwest::{Response, Method};
use reqwest::header::{HeaderValue, CONTENT_TYPE, HeaderMap, IntoHeaderName};
use crate::utils::api::ApiProps;
use crate::commands::config;
use crate::commands::clone::get_url_props;
use crate::services::request_manager::get_request_manager;
lazy_static! {
static ref HTTP_TOKEN: Mutex<String> = Mutex::new(String::new());
}
#[derive(Debug)]
pub enum ApiError {
IncorrectRequest(reqwest::Response),
EmptyError(reqwest::Error),
RequestError(reqwest::Error),
Unexpected(String),
}
pub struct ApiBuilder {
client: Client,
request: Option<RequestBuilder>,
headers: Option<HeaderMap>,
auth_set: bool,
host: Option<String>,
}
impl ApiBuilder {
@@ -21,49 +36,88 @@ impl ApiBuilder {
ApiBuilder {
client: Client::new(),
request: None,
headers: None,
auth_set: false,
host: None,
}
}
pub fn set_request<U: IntoUrl>(mut self, method: Method, url: U) -> ApiBuilder {
self.request = Some(self.client.request(method, url));
pub fn set_url(&mut self, method: Method, url: &str) -> &mut ApiBuilder {
let mut new_url = url.to_owned();
if let Some(active) = config::get_core("force_insecure") {
if active == "true" {
new_url = url.replace("https", "http");
}
}
self.request = Some(self.client.request(method, new_url));
self
}
pub fn build_request(&mut self, method: Method, path: &str) -> &mut ApiBuilder {
dotenv().ok();
// todo remove env
let host = env::var("HOST").unwrap();
let username = env::var("USERNAME").unwrap();
let root = env::var("ROOT").unwrap();
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
self.host = Some(host.clone());
let mut url = String::from(host);
url.push_str("/remote.php/dav/files/");
url.push_str(&username);
url.push_str("/");
url.push_str(username.unwrap());
url.push_str(&root);
url.push_str("/");
url.push_str(path);
dbg!(url.clone());
self.request = Some(self.client.request(method, url));
self
if path != "/" {
url.push_str(path);
}
self.set_url(method, &url)
}
fn set_auth(&mut self) -> &mut ApiBuilder {
// todo if not exist
dotenv().ok();
let password = env::var("PASSWORD").unwrap();
let username = env::var("USERNAME").unwrap();
pub fn set_req(&mut self, meth: Method, p: &str, api_props: &ApiProps) -> &mut ApiBuilder {
self.host = Some(api_props.clone().host.clone());
let mut url = String::from(&api_props.host);
url.push_str("/remote.php/dav/files/");
url.push_str("/");
url.push_str(&api_props.username);
url.push_str(&api_props.root);
url.push_str("/");
if p != "/" {
url.push_str(p);
}
self.set_url(meth, &url)
}
pub fn set_basic_auth(&mut self, login: String, pwd: String) -> &mut ApiBuilder {
match self.request.take() {
None => {
eprintln!("fatal: incorrect request");
std::process::exit(1);
},
Some(req) => {
self.request = Some(req.basic_auth(username, Some(password)));
self.request = Some(req.basic_auth(login, Some(pwd)));
}
}
self.auth_set = true;
self
}
pub fn set_token(&mut self, token: String) {
match self.request.take() {
None => {
eprintln!("fatal: incorrect request");
std::process::exit(1);
},
Some(req) => {
self.request = Some(req.bearer_auth(token));
}
}
self.auth_set = true;
}
pub fn set_xml(&mut self, xml_payload: String) -> &mut ApiBuilder {
match self.request.take() {
None => {
@@ -72,11 +126,32 @@ impl ApiBuilder {
},
Some(req) => {
self.request = Some(req.body(xml_payload));
self.set_header(CONTENT_TYPE, HeaderValue::from_static("application/xml"));
}
}
self
}
pub fn set_multipart(&mut self, form: Form) -> &mut ApiBuilder {
match self.request.take() {
None => {
eprintln!("fatal: incorrect request");
std::process::exit(1);
},
Some(req) => {
self.request = Some(req.multipart(form));
self.set_header(CONTENT_TYPE, HeaderValue::from_static("multipart/related"));
}
}
self
}
pub fn set_header<K: IntoHeaderName>(&mut self, key: K, val: HeaderValue) -> &mut ApiBuilder {
let map = self.headers.get_or_insert(HeaderMap::new());
map.insert(key, val);
self
}
pub fn set_body(&mut self, body: Vec<u8>) -> &mut ApiBuilder {
match self.request.take() {
None => {
@@ -88,17 +163,95 @@ impl ApiBuilder {
}
}
self
}
pub async fn send(&mut self) -> Result<Response, Error> {
self.set_auth();
fn set_request_manager(&mut self) {
let mut request_manager = get_request_manager().lock().unwrap();
let request_manager = request_manager.as_mut().unwrap();
if !self.host.is_none()
{
request_manager.set_host(self.host.clone().unwrap().replace("https://", ""));
}
if !self.auth_set {
self.set_token(request_manager.get_token());
//self.set_auth();
}
}
pub fn send(&mut self, need_text: bool) -> Result<Option<String>, ApiError> {
if !self.host.is_none() || !self.auth_set {
self.set_request_manager();
}
let res_req = tokio::runtime::Runtime::new().unwrap().block_on(async {
match self.request.take() {
None => {
eprintln!("fatal: incorrect request");
std::process::exit(1);
},
Some(req) => {
if let Some(headers) = &self.headers {
req.headers(headers.clone())
.send().await
} else {
req.send().await
}
},
}
});
// handle request error
let res = match res_req {
Err(err) => {
eprintln!("fatal: {}", err.source().unwrap());
std::process::exit(1);
},
Ok(res) => res,
};
if res.status().is_success() {
if need_text {
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
res.text().await
}).map_err(|err| ApiError::EmptyError(err))?;
Ok(Some(body))
} else {
Ok(None)
}
} else {
Err(ApiError::IncorrectRequest(res))
}
}
pub async fn old_send(&mut self) -> Result<Response, reqwest::Error> {
let mut request_manager = get_request_manager().lock().unwrap();
let request_manager = request_manager.as_mut().unwrap();
if !self.host.is_none()
{
request_manager.set_host(self.host.clone().unwrap());
}
if !self.auth_set {
//self.set_auth();
self.set_token(request_manager.get_token());
}
match self.request.take() {
None => {
eprintln!("fatal: incorrect request");
std::process::exit(1);
},
Some(req) => req.send().await.map_err(Error::from),
Some(req) => {
if let Some(headers) = &self.headers {
req.headers(headers.clone())
.send().await.map_err(reqwest::Error::from)
} else {
req.send().await.map_err(reqwest::Error::from)
}
},
}
}
}

13
src/services/api_call.rs Normal file
View File

@@ -0,0 +1,13 @@
use crate::services::api::ApiError;
pub trait ApiCall {
fn new() -> Self where Self: Sized {
unimplemented!()
}
fn set_url(&mut self, _url: &str) -> &mut Self {
self
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
unimplemented!()
}
}

View File

@@ -1,2 +0,0 @@

53
src/services/copy.rs Normal file
View File

@@ -0,0 +1,53 @@
use reqwest::{Method, header::HeaderValue};
use crate::services::api::{ApiBuilder, ApiError};
use crate::commands::clone::get_url_props;
use crate::commands::config;
use crate::services::api_call::ApiCall;
pub struct Copy {
api_builder: ApiBuilder,
}
impl ApiCall for Copy {
fn new() -> Self {
Copy {
api_builder: ApiBuilder::new(),
}
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send(true)
}
}
impl Copy {
pub fn set_url_copy(&mut self, url: &str, destination: &str) -> &mut Copy {
self.api_builder.build_request(Method::from_bytes(b"COPY").unwrap(), url);
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
let mut url = String::from(host);
url.push_str("/remote.php/dav/files/");
url.push_str(username.unwrap());
url.push_str(&root);
url.push_str("/");
if destination != "/" {
url.push_str(destination);
}
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
self
}
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Copy {
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
if overwrite { "T" } else { "F" }
}).unwrap());
self
}
}

View File

@@ -1,28 +1,24 @@
use reqwest::Method;
use crate::services::api::{ApiBuilder, ApiError};
use reqwest::{Method, IntoUrl, Response, Error};
use crate::services::api_call::ApiCall;
pub struct CreateFolder {
api_builder: ApiBuilder,
}
impl CreateFolder {
pub fn new<U: IntoUrl>(url: U) -> Self {
ListFolders {
api_builder: ApiBuilder::new()
.set_request(Method::from_bytes(b"MKCOL").unwrap(), url),
impl ApiCall for CreateFolder {
fn new() -> Self {
CreateFolder {
api_builder: ApiBuilder::new(),
}
}
pub async fn send(&mut self) -> Result<Response, Error> {
self.api_builder.send().await
fn set_url(&mut self, url: &str) -> &mut CreateFolder {
self.api_builder.build_request(Method::from_bytes(b"MKCOL").unwrap(), url);
self
}
pub async fn send_with_err(mut self) -> Result<(), ApiError> {
let res = self.send().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
Ok()
} else {
Err(ApiError::IncorrectRequest(res))
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send(false)
}
}

View File

@@ -1,33 +1,24 @@
use reqwest::Method;
use crate::services::api::{ApiBuilder, ApiError};
use reqwest::{Method, Response, Error};
use crate::services::api_call::ApiCall;
pub struct DeletePath {
api_builder: ApiBuilder,
}
impl DeletePath {
pub fn new() -> Self {
impl ApiCall for DeletePath {
fn new() -> Self {
DeletePath {
api_builder: ApiBuilder::new(),
}
}
pub fn set_url(&mut self, url: &str) -> &mut DeletePath {
fn set_url(&mut self, url: &str) -> &mut DeletePath {
self.api_builder.build_request(Method::DELETE, url);
self
}
pub async fn send(&mut self) -> Result<Response, Error> {
self.api_builder.send().await
}
pub async fn send_with_err(&mut self) -> Result<String, ApiError> {
let res = self.send().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = res.text().await.map_err(ApiError::EmptyError)?;
Ok(body)
} else {
Err(ApiError::IncorrectRequest(res))
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send(true)
}
}

View File

@@ -1,29 +1,89 @@
use std::path::PathBuf;
use futures_util::StreamExt;
use std::fs::File;
use std::fs::OpenOptions;
use std::io::{self, Write};
use reqwest::{Method, Response, Error};
use crate::utils::api::ApiProps;
use crate::services::api::{ApiBuilder, ApiError};
use reqwest::{Method, IntoUrl, Response, Error};
use crate::services::api_call::ApiCall;
pub struct DownloadFiles {
api_builder: ApiBuilder,
relative_ps: String,
}
impl ApiCall for DownloadFiles {
fn new() -> Self {
DownloadFiles {
api_builder: ApiBuilder::new(),
relative_ps: String::new(),
}
}
}
impl DownloadFiles {
pub fn new<U: IntoUrl>(url: U) -> Self {
DownloadFiles {
api_builder: ApiBuilder::new()
.set_request(Method::GET, url),
}
// todo make it beautiful
pub fn set_url_download(&mut self, relative_ps: &str, api_props: &ApiProps) -> &mut DownloadFiles {
self.relative_ps = relative_ps.to_string();
self.api_builder.set_req(Method::GET, relative_ps, api_props);
self
}
pub async fn send(&mut self) -> Result<Response, Error> {
self.api_builder.send().await
pub async fn send_download(&mut self) -> Result<Response, Error> {
self.api_builder.old_send().await
}
pub async fn send_with_err(mut self) -> Result<Vec<u8>, ApiError> {
let res = self.send().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = res.bytes().await.map_err(ApiError::EmptyError)?;
Ok(body.to_vec())
} else {
Err(ApiError::IncorrectRequest(res))
}
pub fn save_stream(&mut self, ref_p: PathBuf, callback: Option<impl Fn(u64)>) -> Result<(), ApiError> {
let abs_p = ref_p.join(PathBuf::from(self.relative_ps.clone()));
let mut file = File::create(abs_p).unwrap();
tokio::runtime::Runtime::new().unwrap().block_on(async {
let res = self.send_download().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
let mut stream = res.bytes_stream();
while let Some(chunk) = stream.next().await {
let unwrap_chunk = chunk.unwrap();
// save chunk inside file
if let Err(err) = file.write_all(&unwrap_chunk) {
return Err(ApiError::Unexpected(err.to_string()));
} else if let Some(fct) = &callback {
// call callback with size of this chunk
fct(unwrap_chunk.len().try_into().unwrap());
}
}
Ok(())
} else {
Err(ApiError::IncorrectRequest(res))
}
})
}
pub fn save(&mut self, ref_p: PathBuf) -> Result<(), ApiError> {
tokio::runtime::Runtime::new().unwrap().block_on(async {
let p = ref_p.join(PathBuf::from(self.relative_ps.clone()));
let res = self.send_download().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = res.bytes().await.map_err(ApiError::EmptyError)?;
match Self::write_file(p, &body.to_vec()) {
Err(_) => Err(ApiError::Unexpected(String::new())),
Ok(_) => Ok(()),
}
} else {
Err(ApiError::IncorrectRequest(res))
}
})
}
fn write_file(path: PathBuf, content: &Vec<u8>) -> io::Result<()> {
let mut f = OpenOptions::new()
.write(true)
.create(true)
.open(path.clone())?;
f.write_all(&content)?;
Ok(())
}
}

168
src/services/downloader.rs Normal file
View File

@@ -0,0 +1,168 @@
use std::path::PathBuf;
use indicatif::{ProgressBar, MultiProgress, ProgressStyle, HumanBytes};
use crate::utils::api::ApiProps;
use crate::services::api_call::ApiCall;
use crate::services::api::ApiError;
use crate::services::download_files::DownloadFiles;
use crate::services::req_props::ObjProps;
const SIZE_TO_STREAM: u64 = 2 * 1024 * 1024;
pub struct Downloader {
files: Vec<ObjProps>,
should_log: bool,
api_props: Option<ApiProps>,
progress_bars: Vec<ProgressBar>,
multi_progress: Option<MultiProgress>,
}
impl Downloader {
pub fn new() -> Self {
Downloader {
files: vec![],
should_log: false,
api_props: None,
progress_bars: vec![],
multi_progress: None,
}
}
pub fn should_log(&mut self) -> &mut Downloader {
self.should_log = true;
self
}
pub fn set_api_props(&mut self, api_props: ApiProps) -> &mut Downloader {
self.api_props = Some(api_props);
self
}
pub fn set_files(&mut self, files: Vec<ObjProps>) -> &mut Downloader {
self.files = files;
self
}
pub fn _add_file(&mut self, file: ObjProps) -> &mut Downloader {
self.files.push(file);
self
}
fn init_log(&mut self, nb_objs: u64, total_size: u64) {
self.multi_progress = Some(MultiProgress::new());
self.progress_bars.push(
self.multi_progress
.clone()
.unwrap()
.add(ProgressBar::new(nb_objs).with_message("Objects")));
let msg = format!("0B/{}", HumanBytes(total_size).to_string());
self.progress_bars.push(
self.multi_progress
.clone()
.unwrap()
.add(ProgressBar::new(total_size).with_message(msg)));
self.progress_bars[0].set_style(
ProgressStyle::with_template("{_:>10} [{bar:40}] {pos}/{len} {msg}")
.unwrap()
.progress_chars("=> "));
self.progress_bars[1].set_style(
ProgressStyle::with_template("[{elapsed_precise}] [{bar:40}] {msg}")
.unwrap()
.progress_chars("=> "));
self.progress_bars[0].tick();
self.progress_bars[1].tick();
}
fn update_bytes_bar(&self, size: u64) {
let bytes_bar = &self.progress_bars[1];
bytes_bar.inc(size);
let msg = format!(
"{}/{}",
HumanBytes(bytes_bar.position()).to_string(),
HumanBytes(bytes_bar.length().unwrap()).to_string());
bytes_bar.set_message(msg);
}
pub fn download(&mut self, ref_p: PathBuf, callback: Option<&dyn Fn(ObjProps)>) {
if self.should_log {
let mut total_size = 0;
let nb_objs = self.files.len();
// set the full size of the download
self.files
.iter()
.for_each(|f|
if let Some(size) = f.contentlength {
total_size += size
}
);
self.init_log(nb_objs.try_into().unwrap(), total_size);
}
for file in self.files.clone() {
let relative_s = &file.clone().relative_s.unwrap();
let mut download = DownloadFiles::new();
download.set_url_download(&relative_s, &self.api_props.clone().unwrap());
let should_use_stream = {
if let Some(size) = file.contentlength {
size > SIZE_TO_STREAM
} else {
false
}
};
// download
let res = {
if should_use_stream {
download.save_stream(ref_p.clone(), if self.should_log { Some(|a| self.update_bytes_bar(a)) } else { None })
} else {
download.save(ref_p.clone())
}
};
// deal with error
match res {
Ok(()) => {
if let Some(fct) = callback {
fct(file.clone());
}
},
Err(ApiError::Unexpected(_)) => {
eprintln!("err: writing {}", relative_s);
},
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => eprintln!("Failed to get body"),
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
}
}
// increment loading bars
if self.should_log {
self.progress_bars[0].inc(1); // increment object
// increment bytes only if
// not incremented continuously by stream
if !should_use_stream {
self.update_bytes_bar(file.contentlength.unwrap());
}
}
}
// finish all bars
for bar in &self.progress_bars {
bar.finish();
}
}
}

View File

@@ -1,47 +0,0 @@
use crate::services::api::{ApiBuilder, ApiError};
use reqwest::{Method, IntoUrl, Response, Error};
pub struct ListFolders {
api_builder: ApiBuilder,
}
impl ListFolders {
pub fn new<U: IntoUrl>(url: U) -> Self {
ListFolders {
api_builder: ApiBuilder::new()
.set_request(Method::from_bytes(b"PROPFIND").unwrap(), url),
}
}
pub async fn send(&mut self) -> Result<Response, Error> {
self.api_builder.send().await
}
pub async fn send_with_err(mut self) -> Result<String, ApiError> {
let res = self.send().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = res.text().await.map_err(ApiError::EmptyError)?;
Ok(body)
} else {
Err(ApiError::IncorrectRequest(res))
}
}
pub async fn send_with_res(self) -> String {
match self.send_with_err().await {
Ok(body) => body,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
String::from("")
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
}
}
}
}

112
src/services/login.rs Normal file
View File

@@ -0,0 +1,112 @@
use std::io;
use std::io::Cursor;
use std::io::prelude::*;
use xml::reader::{EventReader, XmlEvent};
use reqwest::{header::HeaderValue, Method};
use rpassword;
use crate::services::api_call::ApiCall;
use crate::services::api::{ApiBuilder, ApiError};
pub struct Login {
api_builder: ApiBuilder,
login: String,
password: String,
host: Option<String>,
}
impl ApiCall for Login {
fn new() -> Self {
Login {
api_builder: ApiBuilder::new(),
login: String::new(),
password: String::new(),
host: None,
}
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
let url = match self.host.clone() {
Some(h) => {
let mut u = if &h[0..8] == "https://" || &h[0..7] == "http://" {
String::new()
} else {
String::from("https://")
};
u.push_str(&h);
u.push_str("/ocs/v2.php/core/getapppassword");
u
},
None => "/ocs/v2.php/core/getapppassword".to_owned(),
};
self.api_builder.set_url(Method::GET, &url);
self.api_builder.set_header("OCS-APIRequest", HeaderValue::from_str("true").unwrap());
self.api_builder.set_header("USER-AGENT", HeaderValue::from_str("nextsync").unwrap());
self.api_builder.set_basic_auth(self.login.clone(), self.password.clone());
self.api_builder.send(true)
}
}
impl Login {
pub fn ask_auth(&mut self) -> &mut Login {
println!("Please enter your username/email: ");
let stdin = io::stdin();
self.login = stdin.lock().lines().next().unwrap().unwrap();
println!("Please enter your password: ");
self.password = rpassword::read_password().unwrap();
self
}
pub fn set_auth(&mut self, username: &str, password: &str) -> &mut Login {
self.login = username.to_owned();
self.password = password.to_owned();
self
}
pub fn set_host(&mut self, host: Option<String>) -> &mut Login {
self.host = host;
self
}
pub fn send_login(&mut self) -> Result<String, ApiError> {
match self.send() {
Ok(Some(body)) => Ok(self.parse(body)),
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
Err(err) => Err(err),
}
}
fn parse(&self, xml: String) -> String {
let cursor = Cursor::new(xml);
let parser = EventReader::new(cursor);
let mut should_get = false;
for event in parser {
match event {
Ok(XmlEvent::StartElement { name, .. }) => {
should_get = {
if &name.local_name == "apppassword" {
true
} else {
false
}
};
}
Ok(XmlEvent::Characters(text)) => {
if !text.trim().is_empty() && should_get {
return text.clone();
}
}
//Ok(XmlEvent::EndElement { name, .. }) => {
//}
Err(e) => {
eprintln!("err: parsing xml: {}", e);
break;
}
_ => {}
}
}
String::new()
}
}

54
src/services/move.rs Normal file
View File

@@ -0,0 +1,54 @@
use reqwest::{Method, header::HeaderValue};
use crate::services::api::{ApiBuilder, ApiError};
use crate::commands::clone::get_url_props;
use crate::commands::config;
use crate::services::api_call::ApiCall;
pub struct Move {
api_builder: ApiBuilder,
}
impl ApiCall for Move {
fn new() -> Self {
Move {
api_builder: ApiBuilder::new(),
}
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send(false)
}
}
impl Move {
pub fn set_url_move(&mut self, url: &str, destination: &str) -> &mut Move {
self.api_builder.build_request(Method::from_bytes(b"MOVE").unwrap(), url);
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
let mut url = String::from(host);
url.push_str("/remote.php/dav/files/");
url.push_str(username.unwrap());
url.push_str(&root);
url.push_str("/");
if destination != "/" {
url.push_str(destination);
}
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
self
}
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Move {
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
if overwrite { "T" } else { "F" }
}).unwrap());
self
}
}

View File

@@ -1,65 +1,151 @@
use crate::services::api::{ApiBuilder, ApiError};
use xml::reader::{EventReader, XmlEvent};
use std::io::Cursor;
use reqwest::{Method, Response, Error};
use chrono::{Utc, DateTime};
use reqwest::Method;
use xml::reader::{EventReader, XmlEvent};
use reqwest::header::HeaderValue;
use crate::commands::clone::get_url_props;
use crate::commands::config;
use crate::utils::time::parse_timestamp;
use crate::utils::api::{get_relative_s, ApiProps};
use crate::services::api::{ApiBuilder, ApiError};
use crate::services::api_call::ApiCall;
pub struct ReqProps {
api_builder: ApiBuilder,
xml_list: Vec<String>,
xml_payload: String,
#[derive(Debug)]
pub struct ObjProps {
pub href: Option<String>,
pub relative_s: Option<String>,
pub lastmodified: Option<DateTime<Utc>>,
pub contentlength: Option<u64>,
}
impl ReqProps {
impl Clone for ObjProps {
fn clone(&self) -> Self {
ObjProps {
href: self.href.clone(),
relative_s: self.relative_s.clone(),
lastmodified: self.lastmodified.clone(),
contentlength: self.contentlength.clone(),
}
}
}
impl ObjProps {
pub fn new() -> Self {
ReqProps {
api_builder: ApiBuilder::new(),
xml_list: vec![],
xml_payload: String::new(),
ObjProps {
href: None,
relative_s: None,
lastmodified: None,
contentlength: None,
}
}
pub fn set_url(&mut self, url: &str) -> &mut ReqProps {
pub fn is_dir(&self) -> bool {
if let Some(href) = &self.href {
href.chars().last().unwrap() == '/'
} else {
eprintln!("err: cannot determine object type wihout href");
false
}
}
}
pub struct ReqProps {
api_builder: ApiBuilder,
xml_balises: Vec<String>,
xml_payload: String,
api_props: Option<ApiProps>
}
impl ApiCall for ReqProps {
fn new() -> Self {
ReqProps {
api_builder: ApiBuilder::new(),
xml_balises: vec![],
xml_payload: String::new(),
api_props: None,
}
}
fn set_url(&mut self, url: &str) -> &mut ReqProps {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
self.api_props = Some(ApiProps {
host,
username: username.unwrap().to_owned(),
root: root.to_owned(),
});
self.api_builder.build_request(Method::from_bytes(b"PROPFIND").unwrap(), url);
self
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.validate_xml();
self.api_builder.send(true)
}
}
impl ReqProps {
pub fn set_request(&mut self, p: &str, api_props: &ApiProps) -> &mut ReqProps {
self.api_props = Some(api_props.clone());
self.api_builder.set_req(Method::from_bytes(b"PROPFIND").unwrap(), p, api_props);
self
}
pub fn gethref(&mut self) -> &mut ReqProps {
// not an actual property but used to prevent getting anything else
self.xml_balises.push(String::from("href"));
self
}
pub fn getlastmodified(&mut self) -> &mut ReqProps {
self.xml_list.push(String::from("getlastmodified"));
self.xml_balises.push(String::from("getlastmodified"));
self.xml_payload.push_str(r#"<d:getlastmodified/>"#);
self
}
pub fn getcontentlenght(&mut self) -> &mut ReqProps {
self.xml_list.push(String::from("getcontentlength"));
pub fn getcontentlength(&mut self) -> &mut ReqProps {
self.xml_balises.push(String::from("getcontentlength"));
self.xml_payload.push_str(r#"<d:getcontentlength/>"#);
self
}
pub fn getcontenttype(&mut self) -> &mut ReqProps {
self.xml_list.push(String::from("getcontenttype"));
pub fn _getcontenttype(&mut self) -> &mut ReqProps {
self.xml_balises.push(String::from("getcontenttype"));
self.xml_payload.push_str(r#"<d:getcontenttype/>"#);
self
}
pub fn getpermissions(&mut self) -> &mut ReqProps {
self.xml_list.push(String::from("permissions"));
pub fn _getpermissions(&mut self) -> &mut ReqProps {
self.xml_balises.push(String::from("permissions"));
self.xml_payload.push_str(r#"<oc:permissions/>"#);
self
}
pub fn getressourcetype(&mut self) -> &mut ReqProps {
self.xml_list.push(String::from("resourcetype"));
pub fn _getressourcetype(&mut self) -> &mut ReqProps {
self.xml_balises.push(String::from("resourcetype"));
self.xml_payload.push_str(r#"<d:resourcetype/>"#);
self
}
pub fn getetag(&mut self) -> &mut ReqProps {
self.xml_list.push(String::from("getetag"));
pub fn _getetag(&mut self) -> &mut ReqProps {
self.xml_balises.push(String::from("getetag"));
self.xml_payload.push_str(r#"<d:getetag/>"#);
self
}
pub fn set_depth(&mut self, depth: &str) -> &mut ReqProps {
self.api_builder.set_header("Depth", HeaderValue::from_str(depth).unwrap());
self
}
fn validate_xml(&mut self) -> &mut ReqProps {
self.gethref();
let mut xml = String::from(r#"<?xml version="1.0" encoding="UTF-8"?><d:propfind xmlns:d="DAV:" xmlns:oc="http://owncloud.org/ns" xmlns:nc="http://nextcloud.org/ns"><d:prop>"#);
xml.push_str(&self.xml_payload.clone());
xml.push_str(r#"</d:prop></d:propfind>"#);
@@ -67,50 +153,86 @@ impl ReqProps {
self
}
pub async fn send(&mut self) -> Result<Response, Error> {
self.validate_xml();
self.api_builder.send().await
}
pub async fn send_with_err(&mut self) -> Result<Vec<String>, ApiError> {
let res = self.send().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = res.text().await.map_err(ApiError::EmptyError)?;
Ok(self.parse(body))
} else {
Err(ApiError::IncorrectRequest(res))
pub fn send_req_multiple(&mut self) -> Result<Vec<ObjProps>, ApiError> {
match self.send() {
Ok(Some(body)) => Ok(self.parse(body, true)),
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
Err(err) => Err(err),
}
}
pub fn parse(&self, xml: String) -> Vec<String> {
pub fn send_req_single(&mut self) -> Result<ObjProps, ApiError> {
// set depth to 0 as we only need one element
self.set_depth("0");
match self.send() {
Ok(Some(body)) => {
let objs = self.parse(body, false);
let obj = objs[0].clone();
Ok(obj)
},
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
Err(err) => Err(err),
}
}
fn parse(&self, xml: String, multiple: bool) -> Vec<ObjProps> {
let cursor = Cursor::new(xml);
let parser = EventReader::new(cursor);
let mut values: Vec<ObjProps> = vec![];
let mut should_get = false;
let mut values: Vec<String> = vec![];
let mut iter = self.xml_list.iter();
let mut val = iter.next();
let mut val: String = String::new();
let mut content = ObjProps::new();
for event in parser {
match event {
Ok(XmlEvent::StartElement { name, .. }) => {
if let Some(v) = val.clone() {
should_get = &name.local_name == v;
} else {
break;
}
should_get = {
if self.xml_balises.clone().contains(&name.local_name) {
val = name.local_name.clone();
true
} else {
false
}
};
}
Ok(XmlEvent::Characters(text)) => {
if !text.trim().is_empty() && should_get {
values.push(text);
val = iter.next()
match val.as_str() {
"href" => {
content.href = Some(text.clone());
content.relative_s = Some(
get_relative_s(text, &(self.api_props
.clone()
.unwrap())));
},
"getlastmodified" => {
content.lastmodified = Some(
parse_timestamp(&text).unwrap());
},
"getcontentlength" => {
content.contentlength = Some(
text.clone().parse().unwrap());
},
_ => (),
}
should_get = false;
}
}
Ok(XmlEvent::EndElement { .. }) => {
Ok(XmlEvent::EndElement { name, .. }) => {
if name.local_name == "response" {
values.push(content.clone());
if multiple {
content = ObjProps::new();
} else {
break;
}
}
should_get = false;
}
Err(e) => {
eprintln!("Error: {}", e);
eprintln!("err: parsing xml: {}", e);
break;
}
_ => {}

View File

@@ -0,0 +1,91 @@
use lazy_static::lazy_static;
use std::sync::Mutex;
use crate::services::login::Login;
use crate::commands::config;
use crate::store::gconfig;
use crate::commands::clone::get_url_props;
use crate::services::api_call::ApiCall;
lazy_static! {
static ref REQUEST_MANAGER: Mutex<Option<RequestManager>> = Mutex::new(None);
}
pub fn get_request_manager() -> &'static Mutex<Option<RequestManager>> {
if REQUEST_MANAGER.lock().unwrap().is_none() {
*REQUEST_MANAGER.lock().unwrap() = Some(RequestManager::new());
}
&REQUEST_MANAGER
}
pub struct RequestManager {
token: Option<String>,
host: Option<String>,
}
impl RequestManager {
pub fn new() -> Self {
RequestManager {
token: None,
host: None,
}
}
pub fn set_host(&mut self, host: String) {
self.host = Some(host);
}
pub fn get_host(&mut self) -> String
{
if self.host.is_none()
{
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
// todo ask user instead
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, _, _) = get_url_props(&remote);
self.host = Some(host.clone());
// todo ask user
}
self.host.clone().unwrap()
}
pub fn get_token(&mut self) -> String {
if self.token.is_none() {
// look in global config
if let Some(token) = gconfig::read_token() {
if !token.is_empty() {
self.token = Some(token);
return self.token.clone().unwrap();
}
}
// look in local config
if let Some(token) = config::find_option_in_cat("core", "token")
{
if !token.is_empty() {
self.token = Some(token);
return self.token.clone().unwrap();
}
}
// ask for a token
let get_token = Login::new()
.ask_auth()
.set_host(Some(self.get_host()))
.send_login();
// todo deal with error cases
self.token = Some(get_token.unwrap());
if let Err(err) = gconfig::write_token(&self.token.clone().unwrap()) {
eprintln!("err: failed to write token ({})", err);
}
}
self.token.clone().unwrap()
}
}

View File

@@ -1,25 +1,32 @@
use std::fs::File;
use crate::services::api::{ApiBuilder, ApiError};
use std::io::Read;
use std::path::PathBuf;
use std::io::{Read};
use reqwest::{Method, Response, Error};
use reqwest::Method;
use crate::services::api::{ApiBuilder, ApiError};
use crate::services::api_call::ApiCall;
pub struct UploadFile {
api_builder: ApiBuilder,
}
impl UploadFile {
pub fn new() -> Self {
impl ApiCall for UploadFile {
fn new() -> Self {
UploadFile {
api_builder: ApiBuilder::new(),
}
}
pub fn set_url(&mut self, url: &str) -> &mut UploadFile {
fn set_url(&mut self, url: &str) -> &mut UploadFile {
self.api_builder.build_request(Method::PUT, url);
self
}
fn send(&mut self) -> Result<Option<String>, ApiError> {
self.api_builder.send(true)
}
}
impl UploadFile {
pub fn set_file(&mut self, path: PathBuf) -> &mut UploadFile {
// todo large file
// todo small files
@@ -29,18 +36,4 @@ impl UploadFile {
self.api_builder.set_body(buffer);
self
}
pub async fn send(&mut self) -> Result<Response, Error> {
self.api_builder.send().await
}
pub async fn send_with_err(&mut self) -> Result<String, ApiError> {
let res = self.send().await.map_err(ApiError::RequestError)?;
if res.status().is_success() {
let body = res.text().await.map_err(ApiError::EmptyError)?;
Ok(body)
} else {
Err(ApiError::IncorrectRequest(res))
}
}
}

View File

@@ -1,3 +1,4 @@
pub mod index;
pub mod head;
pub mod object;
pub mod gconfig;

54
src/store/gconfig.rs Normal file
View File

@@ -0,0 +1,54 @@
use std::env;
use std::path::PathBuf;
use std::fs::{self, OpenOptions};
use std::io::{self, Write};
use crate::utils::read;
fn global_path() -> Option<PathBuf> {
if let Some(home_dir) = env::var_os("HOME") {
let mut path = PathBuf::new();
path.push(home_dir);
path.push(".nextsync");
Some(path)
}
else
{
None
}
}
pub fn write_token(token: &str) -> io::Result<()> {
if let Some(mut path_token) = global_path() {
if !path_token.exists() {
fs::create_dir_all(path_token.clone())?;
}
path_token.push("token");
let mut file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.open(path_token)?;
writeln!(file, "{}", token)?;
}
Ok(())
}
pub fn read_token() -> Option<String> {
if let Some(mut path_token) = global_path() {
if !path_token.exists() {
return None;
}
path_token.push("token");
if let Ok(lines) = read::read_lines(path_token) {
for line in lines {
if let Ok(l) = line {
return Some(l);
}
}
}
}
None
}

View File

@@ -1,38 +1,16 @@
use std::fs::{File, OpenOptions};
use std::fs::OpenOptions;
use std::path::PathBuf;
use crate::utils::{read, path};
use std::io::{self, Write};
use crate::utils::{read, path};
pub fn _read_only(mut path: PathBuf) -> File {
path.push("HEAD");
OpenOptions::new()
.read(true)
.open(path).expect("Cannot open HEAD file")
}
pub fn _open(mut path: PathBuf) -> File {
path.push("HEAD");
OpenOptions::new()
.read(true)
.write(true)
.append(true)
.create(true)
.open(path).expect("Cannot open HEAD file")
}
pub fn _read_line(mut path: PathBuf) -> io::Result<io::Lines<io::BufReader<File>>> {
path.push("HEAD");
read::read_lines(path)
pub fn path() -> PathBuf {
let mut root = path::nextsync();
root.push("HEAD");
root
}
pub fn add_line(line: String) -> io::Result<()> {
let mut root = match path::nextsync_root() {
Some(path) => path,
None => todo!(),
};
root.push(".nextsync");
root.push("HEAD");
let root = path();
let mut file = OpenOptions::new()
.read(true)
@@ -45,13 +23,7 @@ pub fn add_line(line: String) -> io::Result<()> {
}
pub fn rm_line(line: &str) -> io::Result<()> {
let mut root = match path::nextsync_root() {
Some(path) => path,
None => todo!(),
};
root.push(".nextsync");
root.push("HEAD");
let root = path();
read::rm_line(root, line)?;
Ok(())
}

View File

@@ -1,23 +1,17 @@
use std::fs::OpenOptions;
use std::fs::File;
use std::path::PathBuf;
use crate::utils::{read, path};
use std::io;
use std::path::PathBuf;
use std::fs::File;
use std::fs::OpenOptions;
use crate::utils::{read, path};
pub fn _read_only(mut path: PathBuf) -> File {
pub fn path() -> PathBuf {
let mut path = path::nextsync();
path.push("index");
OpenOptions::new()
.read(true)
.open(path).expect("Cannot open index file")
path
}
pub fn open() -> File {
let mut path = match path::nextsync() {
Some(p) => p,
None => todo!(),
};
path.push("index");
let path = path();
OpenOptions::new()
.read(true)
.write(true)
@@ -26,18 +20,28 @@ pub fn open() -> File {
.open(path).expect("Cannot open index file")
}
pub fn read_line(mut path: PathBuf) -> io::Result<io::Lines<io::BufReader<File>>> {
pub fn read_line() -> io::Result<io::Lines<io::BufReader<File>>> {
let mut path = path::nextsync();
path.push("index");
read::read_lines(path)
}
pub fn rm_line(line: &str) -> io::Result<()> {
let mut root = match path::nextsync() {
Some(path) => path,
None => todo!(),
};
let mut root = path::nextsync();
root.push("index");
read::rm_line(root, line)?;
Ok(())
}
pub fn alread_added(file: String) -> bool {
if let Ok(lines) = read_line() {
for line in lines {
if let Ok(l) = line {
if l == file {
return true;
}
}
}
}
return false;
}

View File

@@ -1,18 +1,95 @@
use std::io::{self, Write};
use std::path::{Path, PathBuf};
use crate::utils::{read, path};
use crate::store::head;
use std::fs::{self, OpenOptions};
use crypto::sha1::Sha1;
use crypto::digest::Digest;
use std::fs::{OpenOptions, self};
use std::io::{self, Write};
use std::fs::File;
use std::io::{Seek, SeekFrom, Read};
use crate::store::head;
use crate::utils::{read, path};
pub mod tree;
pub mod blob;
pub mod object;
pub struct Object {
path: PathBuf,
hash: String,
obj_p: PathBuf,
ts: Option<i64>
}
impl Object {
pub fn new(path: &str) -> Object {
let path = match path.chars().next_back() == "/".chars().next() {
true => {
let mut new = path.chars();
new.next_back();
new.as_str()
},
false => path,
};
if path == "" {
return Object {
path: PathBuf::from("/"),
hash: String::new(),
obj_p: head::path(),
ts: None,
}
}
let mut hasher = Sha1::new();
hasher.input_str(path);
let hash = hasher.result_str();
let (dir, res) = hash.split_at(2);
let mut obj_p = path::objects();
obj_p.push(dir);
obj_p.push(res);
Object {
path: PathBuf::from(path),
hash,
obj_p,
ts: None,
}
}
pub fn read(&mut self) -> &mut Object {
match read::read_lines(&self.obj_p) {
Ok(mut reader) => {
if let Some(Ok(line)) = reader.next() {
let mut data = line.rsplit(' ').collect::<Vec<_>>();
data.reverse();
if data.clone().len() >= 2 {
self.ts = Some(data[1].parse::<i64>().unwrap())
}
}
},
Err(err) => {
eprintln!("error reading object {}: {}", self.obj_p.display(), err);
},
};
self
}
pub fn exists(&mut self) -> bool {
self.obj_p.exists()
}
/// return true if the local file is older than the remote one
pub fn is_older(&mut self, ts: i64) -> bool {
// todo be aware of the diff of ts format
ts > self.ts.expect("Should be read before used") / 1000
}
}
/// Returns (line, hash, name)
///
/// # Examples
/// Input: /foo/bar
/// Result: ("tree hash(/foo/bar) bar", hash(/foo/bar), bar)
fn parse_path(path: &Path, is_blob: bool) -> (String, String, String) {
pub fn parse_path(path: PathBuf, is_blob: bool) -> (String, String, String) {
let file_name = path.file_name().unwrap().to_str().unwrap();
let mut hasher = Sha1::new();
@@ -27,83 +104,6 @@ fn parse_path(path: &Path, is_blob: bool) -> (String, String, String) {
(line, hash, String::from(file_name))
}
pub fn parse_line(line: String) -> (String, String, String) {
let mut split = line.rsplit(' ');
if split.clone().count() != 3 {
eprintln!("fatal: invalid object(s)");
std::process::exit(1);
}
let name = split.next().unwrap();
let hash = split.next().unwrap();
let ftype = split.next().unwrap();
(String::from(ftype), String::from(hash), String::from(name))
}
pub fn add_tree(path: &Path) -> io::Result<()> {
let (line, hash, name) = parse_path(path.clone(), false);
// add tree reference to parent
if path.iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(path.parent().unwrap(), &line)?;
}
// create tree object
create_object(hash, &name)?;
Ok(())
}
pub fn rm_blob(path: &Path) -> io::Result<()> {
let (line, hash, name) = parse_path(path.clone(), true);
// remove blob reference to parent
if path.iter().count() == 1 {
head::rm_line(&line)?;
} else {
rm_node(path.parent().unwrap(), &line)?;
}
// remove blob object
let mut root = match path::objects() {
Some(path) => path,
None => todo!(),
};
let c = hash.clone();
let (dir, rest) = c.split_at(2);
root.push(dir);
root.push(rest);
fs::remove_file(root)?;
Ok(())
}
pub fn add_blob(path: &Path, date: &str) -> io::Result<()> {
let (line, hash, name) = parse_path(path.clone(), true);
// add blob reference to parent
if path.iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(path.parent().unwrap(), &line)?;
}
let mut content = name.clone().to_owned();
content.push_str(" ");
content.push_str("tmp_hash");
content.push_str(" ");
content.push_str(date);
// create blob object
create_object(hash, &content)?;
Ok(())
}
fn hash_obj(obj: &str) -> (String, String) {
let mut hasher = Sha1::new();
hasher.input_str(obj);
@@ -112,52 +112,27 @@ fn hash_obj(obj: &str) -> (String, String) {
(String::from(dir), String::from(res))
}
fn object_path(obj: &str) -> PathBuf {
let mut root = match path::objects() {
Some(path) => path,
None => todo!(),
};
fn _object_path(obj: &str) -> PathBuf {
let mut root = path::objects();
let (dir, res) = hash_obj(&obj);
root.push(dir);
root.push(res);
root
}
pub fn read_tree(tree: String) -> Option<(String, io::Lines<io::BufReader<File>>)> {
let mut obj_p = match path::objects() {
Some(path) => path,
None => todo!(),
};
let (dir, res) = hash_obj(&tree);
obj_p.push(dir);
obj_p.push(res);
match read::read_lines(obj_p) {
Ok(mut reader) => {
let name = match reader.next() {
Some(Ok(line)) => line,
_ => String::from(""),
};
Some((name, reader))
},
Err(err) => {
eprintln!("error reading tree: {}", err);
None
},
}
fn rm(hash: &str) -> io::Result<()> {
let mut root = path::objects();
let (dir, rest) = hash.split_at(2);
root.push(dir);
root.push(rest);
fs::remove_file(root)?;
Ok(())
}
fn rm_node(path: &Path, node: &str) -> io::Result<()> {
let mut root = match path::objects() {
Some(path) => path,
None => todo!(),
};
let (dir, rest) = hash_obj(path.clone().to_str().unwrap());
let mut root = path::objects();
let (dir, rest) = hash_obj(path.to_str().unwrap());
root.push(dir);
root.push(rest);
@@ -167,16 +142,13 @@ fn rm_node(path: &Path, node: &str) -> io::Result<()> {
}
fn add_node(path: &Path, node: &str) -> io::Result<()> {
let mut root = match path::objects() {
Some(path) => path,
None => todo!(),
};
let mut root = path::objects();
let (dir, rest) = hash_obj(path.clone().to_str().unwrap());
let (dir, rest) = hash_obj(path.to_str().unwrap());
root.push(dir);
if !root.exists() {
todo!();
//todo!();
}
root.push(rest);
@@ -190,11 +162,50 @@ fn add_node(path: &Path, node: &str) -> io::Result<()> {
Ok(())
}
fn create_object(name: String, content: &str) -> io::Result<()> {
let mut root = match path::objects() {
Some(path) => path,
None => todo!(),
};
fn update_dates(mut path: PathBuf, date: &str) -> io::Result<()> {
let mut obj_p = path::objects();
while path.pop() {
let (dir, res) = hash_obj(path.to_str().unwrap());
obj_p.push(dir);
obj_p.push(res);
update_date(obj_p.clone(), date)?;
obj_p.pop();
obj_p.pop();
}
Ok(())
}
pub fn update_date(path: PathBuf, date: &str) -> io::Result<()> {
let mut file = OpenOptions::new()
.read(true)
.write(true)
.open(path.clone())?;
let mut buffer = [0; 1];
file.seek(SeekFrom::Start(0))?;
// Seek and read until a space is found
loop {
let bytes_read = file.read(&mut buffer)?;
if bytes_read == 0 {
// Reached the end of the file without finding a space
break;
}
if buffer[0] == b' ' {
break;
}
}
file.write_all(&date.as_bytes())?;
Ok(())
}
fn create_obj(name: String, content: &str) -> io::Result<()> {
let mut root = path::objects();
let c = name.clone();
let (dir, rest) = c.split_at(2);
@@ -212,3 +223,4 @@ fn create_object(name: String, content: &str) -> io::Result<()> {
writeln!(file, "{}", content)?;
Ok(())
}

330
src/store/object/blob.rs Normal file
View File

@@ -0,0 +1,330 @@
use std::io::{self, Read};
use std::fs::{self, File};
use std::io::Write;
use std::fs::OpenOptions;
use std::path::PathBuf;
use std::time::SystemTime;
use crate::commands::status::State;
use crate::utils::into::IntoPathBuf;
use crate::utils::{path, read};
use crate::store::object::update_dates;
use crate::store::object::object::ObjMethods;
use crate::store::object::object::Obj;
const HASH_EMPTY: &str = "d41d8cd98f00b204e9800998ecf8427e";
pub struct Blob {
pub obj: Obj,
data: Vec<String>, // content of the ref file
file_hash: Option<String>, // hash of the file's content
}
//pub struct Blob {
// r_path: PathBuf, // relative path
// a_path: PathBuf, // absolute path
// hash: String, // hash of relative path
// file_hash: Option<String>,
// obj_p: PathBuf, // path of the object file
// data: Vec<String>, // content of the blob
//}
impl Blob {
pub fn new(obj: Obj) -> Self {
Self {
obj,
data: vec![],
file_hash: None,
}
}
pub fn from_path<S>(r_path: S) -> Blob where S: IntoPathBuf {
let r_path = r_path.into();
Self {
obj: Obj::from_path(r_path),
data: vec![],
file_hash: None,
}
}
fn get_file_hash(&mut self) -> String {
if self.file_hash.is_none() {
let bytes = std::fs::read(self.get_file_path()).unwrap();
let hash = md5::compute(&bytes);
self.file_hash = Some(format!("{:x}", hash))
}
self.file_hash.clone().unwrap()
}
/// read line of blob to get all informations and store them in self.data
pub fn read_data(&mut self) {
if self.data.len() == 0 {
if let Ok(mut file) = File::open(self.get_obj_path()) {
let mut buffer = String::new();
let _ = file.read_to_string(&mut buffer);
let data = buffer.rsplit(' ').collect::<Vec<_>>();
for e in data {
self.data.push(String::from(e));
}
self.data.reverse();
// remove \n of last element
if let Some(last) = self.data.last_mut() {
if last.ends_with("\n") {
last.pop();
}
}
}
}
}
fn get_data_index(&mut self, index: usize) -> String {
self.read_data();
if self.data.len() >= index + 1 {
self.data[index].clone()
} else {
String::new()
}
}
fn saved_filename(&mut self) -> String {
self.get_data_index(0)
}
pub fn saved_remote_ts(&mut self) -> String {
self.get_data_index(1)
}
fn saved_local_size(&mut self) -> String {
self.get_data_index(2)
}
fn saved_local_ts(&mut self) -> u64 {
match self.get_data_index(3).as_str() {
"" => 0,
str => str.parse::<u64>().unwrap()
}
}
fn saved_hash(&mut self) -> String {
self.get_data_index(4)
}
fn has_same_size(&mut self) -> bool {
let metadata = match fs::metadata(self.get_file_path()) {
Ok(m) => m,
Err(_) => return true,
};
if self.saved_local_size() == String::new() { return true; }
metadata.len().to_string() == self.saved_local_size()
}
fn is_newer(&mut self) -> bool {
let metadata = match fs::metadata(self.get_file_path()) {
Ok(m) => m,
Err(_) => return true,
};
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
if self.saved_local_ts() == 0 { return true; }
secs > self.saved_local_ts()
}
fn has_same_hash(&mut self) -> bool {
if self.saved_hash() == String::new() { return false; }
let file_hash = self.get_file_hash().clone();
self.saved_hash() == file_hash
}
pub fn has_changes(&mut self) -> bool {
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
}
pub fn get_all_identical_blobs(&mut self) -> Vec<String> {
// an empty file is a new file not the copy of another empty file
if self.get_file_hash() == HASH_EMPTY {
return vec![];
}
let refs_p = self.get_obj_path();
let mut blobs: Vec<String> = vec![];
if let Ok(lines) = read::read_lines(refs_p) {
for line in lines {
if let Ok(l) = line {
blobs.push(l.clone());
}
}
}
blobs
}
pub fn status(&mut self, path_from: &mut Option<PathBuf>) -> State {
let has_obj_ref = self.get_obj_path().exists();
let blob_exists = self.get_file_path().exists();
if has_obj_ref && !blob_exists {
State::Deleted
} else if !has_obj_ref && blob_exists {
let identical_blobs = self.get_all_identical_blobs();
if identical_blobs.len() != 0 {
let identical_blob = Blob::from_path(identical_blobs[0].clone()).get_local_obj();
if identical_blob.state == State::Deleted {
*path_from = Some(identical_blob.path);
State::Moved
} else if identical_blob.state == State::Default {
*path_from = Some(identical_blob.path);
State::Copied
} else {
State::New
}
} else {
State::New
}
} else if !has_obj_ref && !blob_exists {
State::Default
} else if self.has_changes() {
State::Modified
} else {
State::Default
}
}
fn create_blob_ref(&mut self, ts_remote: &str) -> io::Result<()> {
let metadata = fs::metadata(self.get_file_path())?;
let secs = metadata
.modified()
.unwrap()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap()
.as_secs();
// build line with all needed properties
let content = format!("{} {} {} {} {}",
self.get_name(),
ts_remote,
metadata.len().to_string(),
secs.to_string(),
self.get_file_hash());
// create parent dir if needed
let mut obj_path = self.get_obj_path();
obj_path.pop();
if !obj_path.exists() {
fs::create_dir_all(obj_path)?;
}
// open ref file
let mut file = OpenOptions::new()
.create_new(true)
.write(true)
.open(self.get_obj_path())?;
writeln!(file, "{}", content)?;
Ok(())
}
fn get_file_ref(&mut self) -> PathBuf {
let mut refs_p = path::refs();
let file_hash = self.get_file_hash().clone();
let (dir, res) = file_hash.split_at(2);
refs_p.push(dir);
if !refs_p.exists() {
let _ = fs::create_dir_all(refs_p.clone());
}
refs_p.push(res);
refs_p
}
// create a file in .nextsync/refs with the hash of this blob that
// redirect to the relative path
fn create_hash_ref(&mut self) -> io::Result<()> {
// todo check if the file has been modified for moved and copy
let refs_p = self.get_file_ref();
let mut file = OpenOptions::new()
.create(true)
.write(true)
.open(refs_p)?;
// todo deal with duplicate content
writeln!(file, "{}", self.get_relative_file_path().to_str().unwrap())?;
Ok(())
}
pub fn create(&mut self, ts_remote: &str, up_parent: bool) -> io::Result<()> {
// add blob reference to parent
let _ = self.add_ref_to_parent();
if let Err(err) = self.create_blob_ref(ts_remote.clone()) {
eprintln!("err: saving blob ref of {}: {}", self.get_relative_file_path().display(), err);
}
if let Err(err) = self.create_hash_ref() {
eprintln!("err: saving hash ref of {}: {}", self.get_relative_file_path().display(), err);
}
// update date for all parent
if up_parent {
if let Err(err) = update_dates(self.get_relative_file_path(), ts_remote) {
eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
}
}
Ok(())
}
pub fn update(&mut self, ts_remote: &str) -> io::Result<()> {
// // remove old hash ref
// let mut refs_p = path::refs();
// let binding = self.saved_hash();
// let (dir, res) = binding.split_at(2);
// refs_p.push(dir);
// refs_p.push(res);
// if let Err(err) = fs::remove_file(refs_p) {
// eprintln!("err: removing hash ref of {}: {}", self.r_path.clone().display(), err);
// }
//
// // creating new hash ref
// if let Err(err) = self.create_hash_ref() {
// eprintln!("err: saving hash ref of {}: {}", self.r_path.clone().display(), err);
// }
//
// // updating content of blob's ref
// let metadata = fs::metadata(self.a_path.clone())?;
// let secs = metadata
// .modified()
// .unwrap()
// .duration_since(SystemTime::UNIX_EPOCH)
// .unwrap()
// .as_secs();
//
// let mut content = self.saved_filename();
// content.push_str(" ");
// content.push_str(ts_remote);
// content.push_str(" ");
// content.push_str(&metadata.len().to_string());
// content.push_str(" ");
// content.push_str(&secs.to_string());
// content.push_str(" ");
// content.push_str(&self.get_file_hash());
//
// let mut file = OpenOptions::new()
// .write(true)
// .open(self.obj_p.clone())?;
//
// writeln!(file, "{}", &content)?;
Ok(())
}
}

415
src/store/object/object.rs Normal file
View File

@@ -0,0 +1,415 @@
use std::io;
use std::fs;
use std::path::PathBuf;
use crate::utils::path;
use crate::store::head;
use crate::store::object::{add_node, rm_node};
use crypto::sha1::Sha1;
use crypto::digest::Digest;
use crate::utils::into::IntoPathBuf;
use crate::store::object::{blob::Blob, tree::Tree};
use crate::commands::status::{State, LocalObj};
#[derive(Clone, Copy)]
pub enum ObjType {
TREE,
BLOB,
DEFAULT
}
pub trait ObjMethods {
fn get_type(&self) -> ObjType;
fn get_obj_path(&self) -> PathBuf;
fn get_file_path(&self) -> PathBuf;
fn get_relative_file_path(&self) -> PathBuf;
fn get_repo_file_path(&self) -> PathBuf;
fn get_name(&self) -> String;
fn get_hash_path(&self) -> String;
fn get_local_obj(&self) -> LocalObj;
fn get_line(&self, obj_type: ObjType) -> String;
fn add_ref_to_parent(&self) -> io::Result<()>;
fn rm(&mut self) -> io::Result<()>;
fn rm_node(&mut self) -> io::Result<()>;
fn rm_node_down(&mut self) -> io::Result<()>;
fn exists_on_remote(&mut self) -> bool;
fn has_changes(&mut self) -> bool;
}
pub struct Obj {
name: String,
obj_path: PathBuf,
obj_type: ObjType,
file_path: PathBuf, // file here is used as both file and directory
relative_file_path: PathBuf,
repo_file_path: PathBuf,
hash_path: String, // hash of the relative path of the file
}
impl ObjMethods for Obj {
fn get_type(&self) -> ObjType {
self.obj_type
}
fn get_obj_path(&self) -> PathBuf {
self.obj_path.clone()
}
fn get_file_path(&self) -> PathBuf {
self.file_path.clone()
}
fn get_relative_file_path(&self) -> PathBuf {
self.relative_file_path.clone()
}
fn get_repo_file_path(&self) -> PathBuf {
self.repo_file_path.clone()
}
fn get_local_obj(&self) -> LocalObj {
LocalObj {
otype: match self.obj_type {
ObjType::BLOB => String::from("blob"),
ObjType::TREE => String::from("tree"),
ObjType::DEFAULT => String::from("default"),
},
name: self.get_name(),
path: self.get_repo_file_path(),
path_from: None,
state: State::New
}
}
fn get_name(&self) -> String {
self.name.clone()
}
fn get_hash_path(&self) -> String {
self.hash_path.clone()
}
// build line for parent reference
fn get_line(&self, obj_type: ObjType) -> String {
let type_str = match obj_type {
ObjType::BLOB => "blob",
ObjType::TREE => "tree",
ObjType::DEFAULT => "default",
};
format!("{} {} {}", type_str, self.get_hash_path(), self.get_name())
}
fn add_ref_to_parent(&self) -> io::Result<()> {
let line = self.get_line(self.obj_type);
if self.get_relative_file_path().iter().count() == 1 {
head::add_line(line)?;
} else {
add_node(self.get_relative_file_path().parent().unwrap(), &line)?;
}
Ok(())
}
fn rm_node(&mut self) -> io::Result<()> {
// remove parent reference to self
let line = self.get_line(self.obj_type);
if self.get_relative_file_path().iter().count() == 1 {
head::rm_line(&line)?;
} else {
rm_node(self.get_relative_file_path().parent().unwrap(), &line)?;
}
Ok(())
}
fn rm_node_down(&mut self) -> io::Result<()> {
eprintln!("rm_node_down: tried to do this on Obj");
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
eprintln!("rm: tried to do this on Obj");
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj_path.exists()
}
fn has_changes(&mut self) -> bool {
if !self.obj_path.exists() {
return true;
}
match self.obj_type {
ObjType::BLOB => Blob::from_path(self.relative_file_path.clone()).has_changes(),
ObjType::TREE => Tree::from_path(self.relative_file_path.clone()).has_changes(),
ObjType::DEFAULT => {
unreachable!();
}
}
}
}
impl ObjMethods for Blob {
fn get_type(&self) -> ObjType {
self.obj.get_type()
}
fn get_obj_path(&self) -> PathBuf {
self.obj.get_obj_path()
}
fn get_file_path(&self) -> PathBuf {
self.obj.get_file_path()
}
fn get_relative_file_path(&self) -> PathBuf {
self.obj.get_relative_file_path()
}
fn get_repo_file_path(&self) -> PathBuf {
self.obj.get_repo_file_path()
}
fn get_local_obj(&self) -> LocalObj {
self.obj.get_local_obj()
}
fn get_name(&self) -> String {
self.obj.get_name()
}
fn get_hash_path(&self) -> String {
self.obj.get_hash_path()
}
fn get_line(&self, _: ObjType) -> String {
self.obj.get_line(ObjType::BLOB)
}
fn add_ref_to_parent(&self) -> io::Result<()> {
self.obj.add_ref_to_parent()
}
fn rm_node(&mut self) -> io::Result<()> {
// remove self object and children object
let _ = self.rm_node_down();
self.obj.rm_node()
}
fn rm_node_down(&mut self) -> io::Result<()> {
// remove reference to self
fs::remove_file(self.get_obj_path())?;
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
// remove all references, including children's one
self.rm_node()?;
// remove file
fs::remove_file(self.get_file_path())?;
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj.exists_on_remote()
}
fn has_changes(&mut self) -> bool {
self.obj.has_changes()
}
}
impl ObjMethods for Tree {
fn get_type(&self) -> ObjType {
self.obj.get_type()
}
fn get_obj_path(&self) -> PathBuf {
self.obj.get_obj_path()
}
fn get_file_path(&self) -> PathBuf {
self.obj.get_file_path()
}
fn get_relative_file_path(&self) -> PathBuf {
self.obj.get_relative_file_path()
}
fn get_repo_file_path(&self) -> PathBuf {
self.obj.get_repo_file_path()
}
fn get_local_obj(&self) -> LocalObj {
self.obj.get_local_obj()
}
fn get_name(&self) -> String {
self.obj.get_name()
}
fn get_hash_path(&self) -> String {
self.obj.get_hash_path()
}
fn get_line(&self, _: ObjType) -> String {
self.obj.get_line(ObjType::TREE)
}
fn add_ref_to_parent(&self) -> io::Result<()> {
self.obj.add_ref_to_parent()
}
fn rm_node(&mut self) -> io::Result<()> {
// remove self object and children object
let _ = self.rm_node_down();
self.obj.rm_node()
}
/// remove objects and children but not parent reference to self
fn rm_node_down(&mut self) -> io::Result<()> {
// remove children
while let Some(mut child) = self.next() {
match child.get_type() {
ObjType::TREE => child.rm_node_down(),
ObjType::BLOB => child.rm_node_down(),
_ => Ok(())
}?;
};
// remove reference to self
fs::remove_file(self.get_obj_path())?;
Ok(())
}
fn rm(&mut self) -> io::Result<()> {
// remove all references, including children's one
self.rm_node()?;
// remove directory and all subfiles
fs::remove_dir_all(self.get_file_path())?;
Ok(())
}
fn exists_on_remote(&mut self) -> bool {
self.obj.exists_on_remote()
}
fn has_changes(&mut self) -> bool {
self.obj.has_changes()
}
}
impl Obj {
fn new() -> Self {
Obj {
name: String::new(),
obj_path: PathBuf::new(),
file_path: PathBuf::new(),
obj_type: ObjType::DEFAULT,
hash_path: String::new(),
relative_file_path: PathBuf::new(),
repo_file_path: PathBuf::new()
}
}
pub fn from_path<S>(path: S) -> Obj where S: IntoPathBuf {
let path = path.into();
let mut hasher = Sha1::new();
hasher.input_str(path.to_str().unwrap());
let hash = hasher.result_str();
let (dir, res) = hash.split_at(2);
let mut obj_path = path::objects();
obj_path.push(dir);
obj_path.push(res);
// set to absolute path if not already
let root = path::repo_root();
let abs_path = match path.clone().starts_with(root.clone()) {
true => path.clone(),
false => root.join(path.clone())
};
Obj {
name: match abs_path.file_name() {
None => String::new(),
Some(name) => name.to_str().unwrap().to_owned()
},
obj_path,
obj_type: match path.exists() {
true => match path.is_dir() {
true => ObjType::TREE,
false => ObjType::BLOB
},
false => ObjType::DEFAULT
},
file_path: abs_path,
relative_file_path: path.clone(),
repo_file_path: path,
hash_path: hash,
}
}
/// load from the information line stored in the object
pub fn from_line(line: String, base_dir: Option<PathBuf>) -> Box<dyn ObjMethods> {
let mut split = line.trim().rsplit(' ');
if split.clone().count() != 3 {
eprintln!("fatal: invalid object(s) ({})", line.trim());
std::process::exit(1);
}
let name = split.next().unwrap();
let hash_path = split.next().unwrap();
let obj_type = split.next().unwrap();
let (dir, res) = hash_path.split_at(2);
let mut obj_path = path::objects();
obj_path.push(dir);
obj_path.push(res);
let path = match base_dir {
Some(dir) => dir.join(name),
None => PathBuf::from(name),
};
let root = path::repo_root();
let abs_path = root.join(path.clone());
let obj = Obj {
name: String::from(name),
obj_path,
obj_type: match obj_type {
"tree" => ObjType::TREE,
"blob" => ObjType::BLOB,
_ => ObjType::DEFAULT
},
file_path: abs_path,
relative_file_path: path.clone(),
repo_file_path: path,
hash_path: String::from(hash_path),
};
match obj.obj_type {
ObjType::TREE => Box::new(Tree::new(obj)),
ObjType::BLOB => Box::new(Blob::new(obj)),
ObjType::DEFAULT => Box::new(Tree::new(obj))
}
}
pub fn from_head() -> Self {
Obj {
name: String::new(),
obj_path: head::path(),
obj_type: ObjType::TREE,
file_path: PathBuf::new(),
relative_file_path: PathBuf::new(),
repo_file_path: PathBuf::new(),
hash_path: String::new(),
}
}
}

119
src/store/object/tree.rs Normal file
View File

@@ -0,0 +1,119 @@
use crate::utils::into::IntoPathBuf;
use crate::store::object::object::Obj;
use crate::store::object::update_dates;
use crate::store::object::object::ObjMethods;
use std::fs::{self, File, OpenOptions};
use std::io::{self, BufRead, BufReader, Write};
pub struct Tree {
pub obj: Obj,
pub buf_reader: Option<BufReader<File>>,
is_head: bool,
}
impl Tree {
pub fn new(obj: Obj) -> Self {
Tree {
obj,
buf_reader: None,
is_head: false,
}
}
pub fn from_head() -> Self {
Tree {
obj: Obj::from_head(),
buf_reader: None,
is_head: true,
}
}
pub fn from_path<S>(r_path: S) -> Tree where S: IntoPathBuf {
Tree {
obj: Obj::from_path(r_path.into()),
buf_reader: None,
is_head: false,
}
}
pub fn read(&mut self) {
if self.buf_reader.is_none() {
if let Ok(file) = File::open(self.get_obj_path()) {
self.buf_reader = Some(BufReader::new(file));
// skip first line (declaration) if is not head
if !self.is_head {
let mut line = String::new();
self.buf_reader.as_mut().unwrap().read_line(&mut line);
}
}
}
}
pub fn has_changes(&mut self) -> bool {
todo!();
return true;
}
pub fn next(&mut self) -> Option<Box<dyn ObjMethods>> {
self.read();
//if let Some(ref mut file) = self.buf_reader {
// let mut line = String::new();
// match file.read_line(&mut line) {
// Ok(0) => Ok(None), // End of file
// Ok(_) => Ok(Some(line.trim_end().len())), // Return length of line
// Err(e) => Err(e),
// }
//} else {
// Ok(None) // If file is None, return None
//}
match self.buf_reader {
Some(ref mut file) => {
let mut line = String::new();
match file.read_line(&mut line) {
Ok(0) => None,
Ok(_) => Some(Obj::from_line(line, Some(self.get_relative_file_path()))),
Err(e) => {
eprintln!("tree::next: failed to read next line: {}", e);
None
}
}
},
None => None
}
}
pub fn create(&self, date: &str, up_parent: bool) -> io::Result<()> {
// add tree reference to parent
let _ = self.add_ref_to_parent();
// create tree object
let content = format!("{} {}", self.get_name(), date);
// create parent dir if needed
let mut obj_path = self.get_obj_path();
obj_path.pop();
if !obj_path.exists() {
fs::create_dir_all(obj_path)?;
}
// open ref file
let mut file = OpenOptions::new()
.create_new(true)
.write(true)
.open(self.get_obj_path())?;
// update date for all parent
// if up_parent {
// if let Err(err) = update_dates(self.get_relative_file_path(), date) {
// eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
// }
// }
writeln!(file, "{}", content)?;
Ok(())
}
}

11
src/subcommands.rs Normal file
View File

@@ -0,0 +1,11 @@
pub mod init;
pub mod status;
pub mod add;
pub mod reset;
pub mod clone;
pub mod push;
pub mod config;
pub mod remote_diff;
pub mod pull;
pub mod remote;
pub mod credential;

42
src/subcommands/add.rs Normal file
View File

@@ -0,0 +1,42 @@
use clap::{Arg, ArgMatches, Command, ArgAction};
use crate::commands;
use crate::commands::add::AddArgs;
pub fn create() -> Command {
Command::new("add")
.arg(
Arg::new("files")
.required_unless_present("all")
.conflicts_with("all")
.num_args(1..)
.value_name("FILE")
.help("Files to add"),
)
.arg(
Arg::new("force")
.short('f')
.long("force")
.action(ArgAction::SetTrue)
.help("Allow adding otherwise ignored files."),
)
.arg(
Arg::new("all")
.short('A')
.long("all")
.action(ArgAction::SetTrue)
.help("This adds, modifies, and removes index entries to match the working tree"),
)
.about("Add changes to the index")
}
pub fn handler(args: &ArgMatches) {
commands::add::add(AddArgs {
files: match args.get_many::<String>("files") {
None => vec![],
Some(vals) => vals.map(|s| s.to_string()).collect(),
},
force: *args.get_one::<bool>("force").unwrap(),
all: *args.get_one::<bool>("all").unwrap(),
});
}

52
src/subcommands/clone.rs Normal file
View File

@@ -0,0 +1,52 @@
use clap::{Arg, Command, ArgMatches};
// use textwrap::{fill, Options};
use crate::commands::clone::CloneArgs;
use crate::global;
use crate::commands;
// fn sized_str<'a>(content: &'a str) -> &'a str {
// fill(content, Options::new(70).width).as_str();
// "ok"
// }
pub fn create() -> Command {
// let remote_desc = sized_str(&format!("The repository to clone from. See the NEXTSYNC URLS section below for more information on specifying repositories."));
// let depth_desc = sized_str(&format!("Depth of the recursive fetch of object properties. This value should be lower when there are a lot of files per directory and higher when there are a lot of subdirectories with fewer files. (Default: {})", clone::DEPTH));
Command::new("clone")
.arg(
Arg::new("remote")
.required(true)
.num_args(1)
.value_name("REMOTE")
//.help(_desc)
)
.arg(
Arg::new("depth")
.short('d')
.long("depth")
.required(false)
.num_args(1)
//.help(&depth_desc)
)
.arg(
Arg::new("directory")
.required(false)
.num_args(1)
.value_name("DIRECTORY")
)
.about("Clone a repository into a new directory")
.after_help("NEXTSYNC URLS\nThe following syntaxes may be used:\n\t- user@host.xz/path/to/repo\n\t- http[s]://host.xz/apps/files/?dir=/path/to/repo&fileid=111111\n\t- [http[s]://]host.xz/remote.php/dav/files/user/path/to/repo\n")
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(String::from(val.to_string()));
}
if let Some(remote) = args.get_one::<String>("remote") {
commands::clone::clone(CloneArgs {
remote: remote.to_string(),
depth: args.get_one::<String>("depth").cloned(),
});
}
}

48
src/subcommands/config.rs Normal file
View File

@@ -0,0 +1,48 @@
use clap::{Arg, Command, ArgMatches};
use crate::commands::config::ConfigSetArgs;
use crate::commands;
pub fn create() -> Command {
Command::new("config")
.about("Get and set repository or global options")
.subcommand(
Command::new("get")
.about("Get the value of a configuration variable")
.arg(
Arg::new("name")
.help("The name of the configuration variable")
.required(true)
.index(1)
)
)
.subcommand(
Command::new("set")
.about("Set a configuration variable")
.arg(
Arg::new("name")
.help("The name of the configuration variable")
.required(true)
.index(1)
)
.arg(
Arg::new("value")
.help("The value to set")
.required(true)
.index(2)
)
)
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("set", set_matches)) => {
commands::config::config_set(ConfigSetArgs {
name: set_matches.get_one::<String>("name").unwrap().to_string(),
value: set_matches.get_one::<String>("value").unwrap().to_string(),
});
}
_ => println!("Invalid or missing subcommand for 'config'"),
}
}

View File

@@ -0,0 +1,39 @@
use clap::{Arg, Command, ArgMatches};
use crate::commands;
use crate::commands::credential::CredentialArgs;
pub fn create() -> Command {
Command::new("credential")
.about("Manage set of credentials")
.subcommand(
Command::new("add")
.arg(
Arg::new("username")
.required(true)
.num_args(1)
.value_name("NAME")
.help("The username used to connect to nextcloud"),
)
.arg(
Arg::new("password")
.required(false)
.num_args(1)
.value_name("PASSWORD")
.help("The passowd used to connect to nextcloud (optional)"),
)
.about("Add a new set of credential")
)
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("add", add_matches)) => {
commands::credential::credential_add(CredentialArgs {
username: add_matches.get_one::<String>("username").unwrap().to_string(),
password: add_matches.get_one::<String>("password").cloned(),
});
}
_ => println!("Invalid or missing subcommand for 'credential'"),
}
}

23
src/subcommands/init.rs Normal file
View File

@@ -0,0 +1,23 @@
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> Command {
Command::new("init")
.arg(
Arg::new("directory")
.required(false)
.num_args(1)
.value_name("DIRECTORY")
)
.about("Create an empty Nextsync repository")
// Create an empty nextsync repository or reinitialize an existing one
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(val.to_string());
}
commands::init::init();
}

23
src/subcommands/pull.rs Normal file
View File

@@ -0,0 +1,23 @@
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> Command {
Command::new("pull")
.arg(
Arg::new("path")
.required(false)
.num_args(1)
.value_name("PATH")
.help("The path to pull."),
)
.about("Fetch and integrate changes from the nextcloud server.")
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("path") {
global::global::set_dir_path(val.to_string());
}
commands::pull::pull();
}

6
src/subcommands/push.rs Normal file
View File

@@ -0,0 +1,6 @@
use clap::Command;
pub fn create() -> Command {
Command::new("push")
.about("Push changes on nextcloud")
}

46
src/subcommands/remote.rs Normal file
View File

@@ -0,0 +1,46 @@
use clap::{Arg, Command, ArgMatches, ArgAction};
use crate::commands;
use crate::commands::remote::RemoteArgs;
pub fn create() -> Command {
Command::new("remote")
.about("Manage set of tracked repositories")
.subcommand(
Command::new("add")
.arg(
Arg::new("name")
.required(true)
.index(1)
.help("The name of the remote"),
)
.arg(
Arg::new("url")
.required(true)
.index(2)
.help("The url of the remote"),
)
.about("Add a new remote to this repository")
)
.arg(
Arg::new("verbose")
.short('v')
.long("verbose")
.action(ArgAction::SetTrue)
.help("Be a little more verbose and show remote url after name.")
)
}
pub fn handler(args: &ArgMatches) {
match args.subcommand() {
Some(("add", add_matches)) => {
commands::remote::remote_add(RemoteArgs {
name: add_matches.get_one::<String>("name").unwrap().to_string(),
url: add_matches.get_one::<String>("url").unwrap().to_string(),
});
}
_ => {
commands::remote::remote_list(*args.get_one::<bool>("verbose").unwrap());
}
}
}

View File

@@ -0,0 +1,24 @@
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
pub fn create() -> Command {
Command::new("remote-diff")
.arg(
Arg::new("path")
.required(false)
.num_args(1)
.value_name("PATH")
.help("The path to pull."),
)
.about("Fetch changes from the nextcloud server.")
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("path") {
global::global::set_dir_path(val.to_string());
}
commands::remote_diff::remote_diff();
}

6
src/subcommands/reset.rs Normal file
View File

@@ -0,0 +1,6 @@
use clap::Command;
pub fn create() -> Command {
Command::new("reset")
.about("Clear the index")
}

30
src/subcommands/status.rs Normal file
View File

@@ -0,0 +1,30 @@
use clap::{Arg, Command, ArgMatches};
use crate::global;
use crate::commands;
use crate::commands::status::StatusArgs;
pub fn create() -> Command {
Command::new("status")
.arg(
Arg::new("directory")
.num_args(1)
.value_name("DIRECTORY")
)
.arg(
Arg::new("nostyle")
.long("nostyle")
.help("Status with minium information and style"),
)
.about("Show the working tree status")
}
pub fn handler(args: &ArgMatches) {
if let Some(val) = args.get_one::<String>("directory") {
global::global::set_dir_path(val.to_string());
}
commands::status::status(StatusArgs {
nostyle: args.contains_id("nostyle"),
});
}

View File

@@ -1,3 +1,7 @@
pub mod path;
pub mod read;
pub mod nextsyncignore;
pub mod api;
pub mod time;
pub mod remote;
pub mod into;

46
src/utils/api.rs Normal file
View File

@@ -0,0 +1,46 @@
use crate::commands::{clone::get_url_props, config};
#[derive(Debug)]
pub struct ApiProps {
pub host: String, // nextcloud.example.com
pub username: String,
pub root: String, // /dir/cloned
}
impl Clone for ApiProps {
fn clone(&self) -> Self {
ApiProps {
host: self.host.to_string(),
username: self.username.to_string(),
root: self.root.to_string(),
}
}
}
pub fn get_api_props() -> ApiProps {
let remote = match config::get_remote("origin") {
Some(r) => r,
None => {
eprintln!("fatal: unable to find a remote");
std::process::exit(1);
}
};
let (host, username, root) = get_url_props(&remote);
ApiProps {
host,
username: username.unwrap().to_owned(),
root: root.to_owned(),
}
}
pub fn get_relative_s(p: String, api_props: &ApiProps) -> String {
let mut final_p = p.clone();
final_p = final_p.strip_prefix("/remote.php/dav/files/").unwrap().to_string();
final_p = final_p.strip_prefix(&api_props.username).unwrap().to_string();
final_p = final_p.strip_prefix(&api_props.root).unwrap().to_string();
if final_p.starts_with("/") {
final_p = final_p.strip_prefix("/").unwrap().to_string();
}
final_p
}

30
src/utils/into.rs Normal file
View File

@@ -0,0 +1,30 @@
use std::path::{PathBuf, Path};
pub trait IntoPathBuf {
fn into(self) -> PathBuf;
}
impl IntoPathBuf for PathBuf {
fn into(self) -> PathBuf {
self
}
}
impl IntoPathBuf for &Path {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}
impl IntoPathBuf for String {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}
impl IntoPathBuf for &str {
fn into(self) -> PathBuf {
PathBuf::from(self)
}
}

View File

@@ -1,9 +1,9 @@
use crate::utils::{read, path};
use regex::Regex;
use std::fs::File;
use std::io::{Cursor, Lines, BufReader, empty, BufRead};
use std::io::{BufReader, BufRead};
use regex::Regex;
use crate::utils::path;
fn read_lines() -> Result<Vec<String>, ()> {
pub fn read_lines() -> Result<Vec<String>, ()> {
if let Some(path) = path::nextsyncignore() {
let file = match File::open(path) {
Ok(buffer) => buffer,
@@ -23,12 +23,17 @@ fn read_lines() -> Result<Vec<String>, ()> {
Ok(vec![])
}
pub fn ignore_files(files: &mut Vec<String>) -> (bool, Vec<String>) {
pub fn get_rules() -> Vec<String> {
match read_lines() {
Ok(r) => r,
Err(_) => vec![],
}
}
pub fn _ignore_files(files: &mut Vec<String>) -> (bool, Vec<String>) {
let mut ignored_f = vec![];
if let Some(path) = path::nextsyncignore() {
if let Ok(lines) = read_lines() {
files.retain(|file| !ignore_file(file, lines.clone(), &mut ignored_f));
}
if let Ok(lines) = read_lines() {
files.retain(|file| !ignore_file(file, lines.clone(), &mut ignored_f));
}
(ignored_f.len() > 0, ignored_f)
}
@@ -56,7 +61,7 @@ fn normalize_rule(l: String) -> String {
pub fn ignore_file(path: &String, lines: Vec<String>, ignored_f: &mut Vec<String>) -> bool {
let mut ignored = false;
for mut line in lines {
for line in lines {
if line.starts_with("!") {
if !ignored {
continue;
@@ -82,6 +87,7 @@ pub fn ignore_file(path: &String, lines: Vec<String>, ignored_f: &mut Vec<String
#[cfg(test)]
mod tests {
use super::*;
use std::io::Cursor;
#[test]
fn test_ignore_files() {

View File

@@ -1,7 +1,60 @@
use std::env;
use std::path::{PathBuf, Path};
use crate::global::global::DIR_PATH;
use std::fs::canonicalize;
use std::path::{PathBuf, Path, Component};
use crate::global::global::DIR_PATH;
/// Improve the path to try remove and solve .. token.
/// Taken from https://stackoverflow.com/questions/68231306/stdfscanonicalize-for-files-that-dont-exist
///
/// This assumes that `a/b/../c` is `a/c` which might be different from
/// what the OS would have chosen when b is a link. This is OK
/// for broot verb arguments but can't be generally used elsewhere
///
/// This function ensures a given path ending with '/' still
/// ends with '/' after normalization.
pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
let ends_with_slash = path.as_ref()
.to_str()
.map_or(false, |s| s.ends_with('/'));
let mut normalized = PathBuf::new();
for component in path.as_ref().components() {
match &component {
Component::ParentDir => {
if !normalized.pop() {
normalized.push(component);
}
}
_ => {
normalized.push(component);
}
}
}
if ends_with_slash {
normalized.push("");
}
normalized
}
pub fn normalize_relative(file: &str) -> Result<String, String> {
let current = match current() {
Some(p) => p,
None => {
return Err("cannot find current location".to_owned());
}
};
let p = {
let tmp_p = current.join(PathBuf::from(file));
normalize_path(tmp_p)
};
let relative_p = match p.strip_prefix(repo_root()) {
Ok(p) => p,
Err(_) => return Err("is not in a nextsync repo or doesn't exist".to_owned()),
};
Ok(relative_p.to_str().unwrap().to_owned())
}
pub fn current() -> Option<PathBuf> {
let d = DIR_PATH.lock().unwrap();
@@ -23,7 +76,7 @@ pub fn current() -> Option<PathBuf> {
}
}
pub fn nextsync_root() -> Option<PathBuf> {
pub fn repo_root_without_err() -> Option<PathBuf> {
let mut path = current()?;
let root = loop {
@@ -41,32 +94,61 @@ pub fn nextsync_root() -> Option<PathBuf> {
root
}
pub fn nextsync() -> Option<PathBuf> {
if let Some(mut path) = nextsync_root() {
path.push(".nextsync");
return Some(path);
pub fn repo_root() -> PathBuf {
match repo_root_without_err() {
Some(p) => p,
None => {
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
std::process::exit(1);
}
}
None
}
pub fn objects() -> Option<PathBuf> {
if let Some(mut path) = nextsync_root() {
pub fn is_nextsync_config(path: PathBuf) -> bool {
path.ends_with(".nextsync") || path.starts_with(".nextsync")
}
pub fn nextsync() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");
path.push("objects");
return Some(path);
}
None
path
}
pub fn config() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");
path.push("config");
path
}
pub fn objects() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");
path.push("objects");
path
}
pub fn refs() -> PathBuf {
let mut path = repo_root();
path.push(".nextsync");
path.push("refs");
path
}
pub fn nextsyncignore() -> Option<PathBuf> {
if let Some(mut path) = nextsync_root() {
path.push(".nextsyncignore");
if path.exists() {
return Some(path);
} else {
return None;
}
let mut path = repo_root();
path.push(".nextsyncignore");
if path.exists() {
Some(path)
} else {
None
}
}
pub fn path_buf_to_string(p: PathBuf) -> String {
if let Some(str) = p.to_str() {
str.to_string()
} else {
String::new()
}
None
}

View File

@@ -1,6 +1,6 @@
use std::path::{Path, PathBuf};
use std::io::{self, BufRead, BufReader, Write};
use std::fs::{self, File, OpenOptions};
use std::io::{self, BufRead, BufReader, Write};
pub fn read_lines<P>(filename: P) -> io::Result<io::Lines<io::BufReader<File>>>
where P: AsRef<Path>, {

258
src/utils/remote.rs Normal file
View File

@@ -0,0 +1,258 @@
use std::path::PathBuf;
use crate::{services::{req_props::ObjProps, api::ApiError}, store::object::{blob::Blob, Object}, commands::status::State};
use std::collections::HashMap;
use super::{path::{path_buf_to_string, self}, read};
pub struct EnumerateOptions {
pub depth: Option<String>,
pub relative_s: Option<String>,
}
pub fn enumerate_remote(
req: impl Fn(&str) -> Result<Vec<ObjProps>, ApiError>,
should_skip: Option<&dyn Fn(ObjProps) -> bool>,
options: EnumerateOptions
) -> (Vec<ObjProps>, Vec<ObjProps>) {
let mut folders: Vec<ObjProps> = vec![ObjProps::new()];
let mut all_folders: Vec<ObjProps> = vec![];
let mut deleted: Vec<PathBuf> = vec![];
let mut files: Vec<ObjProps> = vec![];
let mut objs_hashmap: HashMap<String, Vec<String>> = HashMap::new();
objs_hashmap.insert(
options.relative_s.clone().unwrap_or(String::new()),
Vec::new());
while folders.len() > 0 {
let folder = folders.pop().unwrap();
let relative_s = match folder.relative_s {
Some(relative_s) => relative_s,
None => options.relative_s.clone().unwrap_or(String::new())
};
// request folder content
let res = req(relative_s.as_str());
let objs = match res {
Ok(o) => o,
Err(ApiError::IncorrectRequest(err)) => {
eprintln!("fatal: {}", err.status());
std::process::exit(1);
},
Err(ApiError::EmptyError(_)) => {
eprintln!("Failed to get body");
vec![]
}
Err(ApiError::RequestError(err)) => {
eprintln!("fatal: {}", err);
std::process::exit(1);
},
Err(ApiError::Unexpected(_)) => todo!()
};
// separate folders and files in response
let d = options.depth.clone()
.unwrap_or("0".to_owned())
.parse::<u16>()
.unwrap();
// first element is not used as it is the fetched folder
if let Some(should_skip_fct) = should_skip.clone() {
iter_with_skip_fct(
objs,
d,
&mut files,
&mut folders,
should_skip_fct,
&mut objs_hashmap,
&mut all_folders);
// check for deletion only when folder are not empty
// as the folder's content may not have been fetched yet
for (key, children) in objs_hashmap.clone() {
if children.len() != 0 {
get_deleted(key.clone(), children, &mut deleted);
objs_hashmap.remove(&key);
}
}
} else {
iter_without_skip_fct(
objs,
d,
&mut files,
&mut folders,
&mut all_folders);
}
}
// go through all folders not checked for deletion before
// as they were empty
if let Some(_) = should_skip.clone() {
for (key, children) in objs_hashmap.clone() {
get_deleted(key.clone(), children, &mut deleted);
objs_hashmap.remove(&key);
}
}
(all_folders, files)
}
fn calc_depth(obj: &ObjProps) -> u16 {
let path = obj.relative_s.clone().unwrap_or(String::new());
path.split("/").count() as u16
}
fn iter_with_skip_fct(
objs: Vec<ObjProps>,
d: u16,
files: &mut Vec<ObjProps>,
folders: &mut Vec<ObjProps>,
should_skip: &dyn Fn(ObjProps) -> bool,
objs_hashmap: &mut HashMap<String, Vec<String>>,
all_folders: &mut Vec<ObjProps>) {
let mut iter = objs.iter();
let default_depth = calc_depth(iter.next().unwrap());
let mut skip_depth = 0;
for object in iter {
let current_depth = calc_depth(object);
if object.is_dir() {
// add folder to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
let r_ps = path_buf_to_string(r_path);
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
values.push(object.relative_s.clone().unwrap());
}
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
let should_skip = should_skip(object.clone());
if should_skip {
skip_depth = current_depth;
} else {
// if this folder is not skipped then we initialised its vector
let r_ps_dir = object.relative_s.clone().unwrap();
let mut r_ps_key = r_ps_dir.chars();
r_ps_key.next_back();
objs_hashmap.insert(r_ps_key.as_str().to_owned(), Vec::new());
skip_depth = 0;
all_folders.push(object.clone());
}
// should get content of this folder if it is not already in this reponse
if current_depth - default_depth == d && !should_skip {
folders.push(object.clone());
}
} else {
// add file to parent folder only if exists
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
r_path.pop();
let r_ps = path_buf_to_string(r_path);
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
values.push(object.relative_s.clone().unwrap());
}
// skip children of skiped folder
if skip_depth != 0 && skip_depth < current_depth {
continue;
}
if !should_skip(object.clone()) {
skip_depth = 0;
files.push(object.clone());
}
}
}
}
fn iter_without_skip_fct(
objs: Vec<ObjProps>,
d: u16,
files: &mut Vec<ObjProps>,
folders: &mut Vec<ObjProps>,
all_folders: &mut Vec<ObjProps>) {
let mut iter = objs.iter();
let default_depth = calc_depth(iter.next().unwrap());
for object in iter {
if object.is_dir() {
// should get content of this folder if it is not already in this reponse
let current_depth = calc_depth(object);
if current_depth - default_depth == d {
folders.push(object.clone());
}
all_folders.push(object.clone());
} else {
files.push(object.clone());
}
}
}
fn get_non_new_local_element(iter: &mut dyn Iterator<Item = &PathBuf>) -> Option<PathBuf> {
let mut el = iter.next();
while !el.is_none() && {
if el.unwrap().is_dir() {
// ignore newly created directory (not sync)
!Object::new(el.unwrap().clone().to_str().unwrap()).exists()
} else {
// ignore newly created file (not sync)
Blob::from_path(el.unwrap().clone()).status(&mut None) == State::New
}
} {
el = iter.next();
}
match el {
Some(e) => Some(e.to_owned()),
None => None
}
}
fn get_deleted(source: String, children: Vec<String>, deleted: &mut Vec<PathBuf>) {
let root = path::repo_root();
let abs_p = root.join(PathBuf::from(source.clone()));
let folder_read = read::read_folder(abs_p.clone());
if let Ok(mut local_objs) = folder_read {
// set path to be ref one not abs
local_objs.iter_mut().for_each(|e| {
*e = e.strip_prefix(path_buf_to_string(root.clone())).unwrap().to_path_buf();
});
let mut iter = local_objs.iter();
let mut local_element = get_non_new_local_element(&mut iter);
while let Some(local) = local_element {
if let None = children.iter().position(|child| {
let child_compared = {
// remove traling / of directory
if child.ends_with("/") {
let t = child.clone();
let mut ts = t.chars();
ts.next_back();
ts.as_str().to_owned()
} else {
child.clone()
}
};
child_compared == path_buf_to_string(local.clone())
}) {
deleted.push(local.clone());
}
local_element = get_non_new_local_element(&mut iter);
}
}
}

6
src/utils/time.rs Normal file
View File

@@ -0,0 +1,6 @@
use chrono::{DateTime, TimeZone, Utc, ParseError};
pub fn parse_timestamp(timestamp: &str) -> Result<DateTime<Utc>, ParseError> {
let format = "%a, %d %b %Y %H:%M:%S %Z";
Utc.datetime_from_str(timestamp, format)
}

116
tests/add.rs Normal file
View File

@@ -0,0 +1,116 @@
use std::str;
mod utils;
use utils::{utils::*, client::ClientTest};
fn line_should_contains(lines: &Vec<String>, nb: usize, str: &str) {
if lines[nb].find(str).is_none()
{
eprintln!("'{}' not found in '{}'", str, lines[nb]);
dbg!(lines);
}
assert!(lines[nb].find(str).is_some());
}
fn lines_should_not_contains(lines: Vec<String>, str: &str) {
for line in lines {
if line.find("Changes not staged for push").is_some() {
return;
}
if line.find(str).is_some() {
eprintln!("'{}' found in '{}'", str, line);
}
assert!(line.find(str).is_none());
}
}
fn collect_status_lines(client: &mut ClientTest) -> Vec<String> {
let out = client.run_cmd("status");
str::from_utf8(&out.stdout)
.unwrap()
.split("\n")
.map(|s| s.to_owned())
.collect()
}
#[cfg(test)]
mod add_tests {
use crate::utils::{server::ServerTest, status_utils::status_should_be_empty};
use super::*;
#[test]
fn simple_add() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
let lines = collect_status_lines(&mut client);
// test
line_should_contains(&lines, 2, "file1");
client.clean();
}
#[test]
fn add_config_file() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add .nextsync -f");
let lines = collect_status_lines(&mut client);
// test
lines_should_not_contains(lines, ".nextsync");
client.clean();
}
#[test]
fn add_dir_implicit() {
let id = get_random_test_id();
let mut client = ClientTest::new(id).init();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file1", "foo");
// adding the file should add the dir
client.run_cmd_ok("add dir/file1");
let lines = collect_status_lines(&mut client);
// tests
line_should_contains(&lines, 2, "dir");
line_should_contains(&lines, 3, "dir/file1");
client.clean();
}
#[test]
fn add_file_no_changes() {
// add a file push it and add it again
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
status_should_be_empty(&mut client);
client.run_cmd_ok("add file1");
status_should_be_empty(&mut client);
clean_test(client, &mut server)
}
}

36
tests/pull.rs Normal file
View File

@@ -0,0 +1,36 @@
mod utils;
use utils::{utils::*};
#[cfg(test)]
mod pull_tests {
use super::*;
#[test]
fn simple_pull() {
let (mut client, mut server) = init_test();
let _ = server.add_file("file1", "foo");
client.run_cmd_ok("pull");
// tests
assert!(client.has_file("file1", "foo"));
clean_test(client, &mut server);
}
#[test]
fn simple_pull_directory() {
let (mut client, mut server) = init_test();
let _ = server.add_dir("dir");
let _ = server.add_file("dir/file1", "foo");
client.run_cmd_ok("pull");
// tests
assert!(client.has_file("dir/file1", "foo"));
clean_test(client, &mut server);
}
}

168
tests/push.rs Normal file
View File

@@ -0,0 +1,168 @@
mod utils;
use utils::{utils::*, status_utils::*};
#[cfg(test)]
mod push_tests {
use super::*;
#[test]
fn simple_push() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
clean_test(client, &mut server);
}
#[test]
fn push_update() {
let (mut client, mut server) = init_test();
// init content of file1
let _ = client.add_file("file1", "foo");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
// change content of file1
let _ = client.add_file("file1", "bar");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged, "file1");
lines_should_not_contains(not_staged, "file1");
clean_test(client, &mut server);
}
#[test]
fn push_dir_explicit() {
let (mut client, mut server) = init_test();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged.clone(), "file2");
lines_should_not_contains(staged, "foo");
lines_should_not_contains(not_staged.clone(), "file2");
lines_should_not_contains(not_staged, "foo");
clean_test(client, &mut server);
}
#[test]
fn push_dir_implicit() {
let (mut client, mut server) = init_test();
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add dir/file2");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
lines_should_not_contains(staged.clone(), "file2");
lines_should_not_contains(staged, "foo");
lines_should_not_contains(not_staged.clone(), "file2");
lines_should_not_contains(not_staged, "foo");
clean_test(client, &mut server);
}
#[test]
fn push_all() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
// push dir and file2
client.run_cmd_ok("add *");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
assert!(server.has_file("dir/file2", "bar"));
let (staged, not_staged) = client.get_status();
assert!(staged.len() == 0);
assert!(not_staged.len() == 0);
clean_test(client, &mut server);
}
#[test]
fn push_file_deletion() {
let (mut client, mut server) = init_test();
let _ = client.add_file("file1", "foo");
// push file1
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("file1", "foo"));
status_should_be_empty(&mut client);
// remove it
let _ = client.remove_file("file1");
client.run_cmd_ok("add file1");
client.run_cmd_ok("push");
// tests
assert!(server.has_not_file("file1"));
status_should_be_empty(&mut client);
clean_test(client, &mut server);
}
#[test]
fn push_dir_deletion() {
let (mut client, mut server) = init_test();
// push dir and file2
let _ = client.add_dir("dir");
let _ = client.add_file("dir/file2", "bar");
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
// tests
assert!(server.has_file("dir/file2", "bar"));
// push deletion
let _ = client.remove_dir("dir");
client.run_cmd_ok("add dir");
client.run_cmd_ok("push");
assert!(server.has_not_dir("dir"));
clean_test(client, &mut server);
}
}

14
tests/utils.rs Normal file
View File

@@ -0,0 +1,14 @@
#[path = "utils/server.rs"]
pub mod server;
#[path = "utils/client.rs"]
pub mod client;
#[path = "utils/utils.rs"]
pub mod utils;
#[path = "utils/status_utils.rs"]
pub mod status_utils;
#[path = "utils/files_utils.rs"]
pub mod files_utils;

162
tests/utils/client.rs Normal file
View File

@@ -0,0 +1,162 @@
use std::str;
use std::process::{Command, Output};
use std::fs::{self, File};
use std::io::Write;
use std::env;
use std::path::PathBuf;
use super::files_utils::has_files;
#[cfg(test)]
pub struct ClientTest {
user: String, // the nextcloud user
volume: String, // temp dir for the test
pub test_id: String, // name of the test (e.g nextsync_rand)
exe_path: PathBuf, // absolute path of nextsync executable
}
#[cfg(test)]
impl ClientTest {
pub fn new(id: String) -> Self {
// create a directory in /tmp with the given id
let mut vol = String::from("/tmp/");
vol.push_str(&id);
let _ = fs::create_dir(vol.clone());
// get nextsync path
let mut exe_path = env::current_dir().unwrap();
exe_path = exe_path.join("target/debug/nextsync");
// build the client
ClientTest {
user: String::from("admin"),
volume: vol,
test_id: id,
exe_path
}
}
pub fn init(mut self) -> Self {
self.run_cmd_ok("init");
// set remote url
let url = String::from(format!("{}@nextcloud.local/{}", self.user, self.test_id));
self.run_cmd_ok(&format!("remote add origin {}", url));
// set force_unsecure as debug server has not certificate
self.run_cmd_ok("config set force_insecure true");
// set token for request
self.run_cmd_ok(&format!("credential add {} {}", self.user, self.user));
self
}
pub fn clean(self) -> Self {
let _ = fs::remove_dir_all(&self.volume);
self
}
pub fn run_cmd_ok(&mut self, args: &str) -> Output {
let output = self.run_cmd(args);
if !output.status.success() {
println!("id: {}", self.test_id.clone());
println!("Failed to execute: '{}'", args);
println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
}
assert!(output.status.success());
output
}
pub fn run_cmd(&mut self, args: &str) -> Output {
let output = Command::new(self.exe_path.to_str().unwrap())
.current_dir(self.volume.clone())
.args(args.split(" "))
.output()
.expect("Could not execute nextsync command");
return output;
}
pub fn add_dir(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
let _ = fs::create_dir_all(path)?;
Ok(())
}
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
let mut file = File::create(path)?;
file.write_all(content.as_bytes())?;
Ok(())
}
pub fn remove_file(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
fs::remove_file(path)?;
Ok(())
}
pub fn remove_dir(&mut self, name: &str) -> std::io::Result<()> {
let mut path = self.volume.clone();
path.push_str("/");
path.push_str(name);
fs::remove_dir_all(path)?;
Ok(())
}
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
let full_path = PathBuf::from(self.volume.clone()).join(file);
has_files(full_path, file, content, self.test_id.clone())
}
/// get the files given by the status command in two vector (staged and not staged)
pub fn get_status(&mut self) -> (Vec<String>, Vec<String>) {
let out = self.run_cmd("status");
let lines: Vec<String> = str::from_utf8(&out.stdout)
.unwrap()
.split("\n")
.map(|s| s.to_owned())
.collect();
let mut staged = vec![];
let mut not_staged = vec![];
let mut in_staged = true;
let mut counter = 0;
for line in lines {
if line.find("not staged").is_some() {
in_staged = false;
counter = 1;
continue;
}
// skip two first line as there are not files
if counter < 2 {
counter += 1;
continue;
}
if line == String::from("") {
continue;
}
if in_staged {
staged.push(line);
} else {
not_staged.push(line);
}
}
return (staged, not_staged);
}
}

View File

@@ -0,0 +1,50 @@
use std::io::{BufReader, BufRead};
use std::fs::File;
use std::path::PathBuf;
#[cfg(test)]
pub fn has_files(full_path: PathBuf, file: &str, content: &str, test_id: String) -> bool
{
if !full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("File '{}' doesn't exists", file);
return false;
}
let f = File::open(full_path).unwrap();
for line in BufReader::new(f).lines(){
if let Ok(line) = line {
if line != content {
println!("id: {}", test_id);
eprintln!("File '{}' is not equal, {} != {}", file, line, content);
return false;
}
return line == content;
}
}
return true;
}
#[cfg(test)]
pub fn has_not_file(full_path: PathBuf, file: &str, test_id: String) -> bool
{
if full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("File '{}' exists but it shouldn't", file);
return false;
}
return true;
}
#[cfg(test)]
pub fn has_not_dir(full_path: PathBuf, dir: &str, test_id: String) -> bool
{
if full_path.exists() {
println!("id: {}", test_id.clone());
eprintln!("Dir '{}' exists but it shouldn't", dir);
return false;
}
return true;
}

123
tests/utils/server.rs Normal file
View File

@@ -0,0 +1,123 @@
use std::process::Command;
use std::os::unix::fs::PermissionsExt;
use std::fs::{self, File, Permissions};
use std::io::Write;
use std::env;
use std::path::PathBuf;
use super::files_utils::{self, has_files};
#[cfg(test)]
pub struct ServerTest {
user: String,
volume: PathBuf,
pub test_id: String
}
#[cfg(test)]
impl ServerTest {
pub fn new(id: String) -> Self {
let mut volume = env::current_dir().unwrap();
volume = volume.join("tests/data/admin/files");
ServerTest {
user: String::from("admin"),
volume,
test_id: id
}
}
pub fn init(&mut self) -> &mut ServerTest{
self.add_dir(&self.test_id.clone());
self.volume = self.volume.join(self.test_id.clone());
self.sync_root();
self
}
pub fn clean(&mut self) -> &mut ServerTest{
self.remove_dir(self.test_id.clone());
self.sync_root();
self
}
pub fn add_dir(&mut self, path: &str) -> &mut ServerTest {
let mut full_path = self.volume.clone();
full_path.push(path);
match fs::create_dir(&full_path) {
Ok(_) => {
// Set permissions to 777 to allow nextcloud to access it (workaround avoiding to
// set group and owner to www-data)
if let Err(e) = fs::set_permissions(&full_path, Permissions::from_mode(0o777)) {
eprintln!("Error setting permissions: {}", e);
}
},
Err(e) => eprintln!("Error creating directory: {}", e),
}
// do not sync test directory when creating it
if !path.ends_with("_nextsync")
{
self.sync_test();
}
self
}
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
let mut full_path = self.volume.clone();
full_path.push(name);
let mut file = File::create(full_path)?;
file.write_all(content.as_bytes())?;
self.sync_test();
Ok(())
}
pub fn remove_dir(&mut self, path: String) -> &mut ServerTest {
let mut full_path = self.volume.clone();
full_path.push(path);
let _ = fs::remove_dir_all(&full_path);
self.sync_test();
self
}
fn sync_root(&self) -> &Self {
self.sync("")
}
fn sync_test(&self) -> &Self {
let test_id = self.test_id.clone();
self.sync(&test_id)
}
fn sync(&self, path: &str) -> &Self {
// perform the occ files:scan command inside the nextcloud docker container
let nextcloud_docker = "master-nextcloud-1";
let args = format!("exec -t --user www-data {} /var/www/html/occ files:scan --path=/{}/files/{}", nextcloud_docker, &self.user, path);
let _output = Command::new("docker")
.args(args.split(" "))
.output()
.expect("Could not execute docker exec command");
self
}
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
let full_path = self.volume.clone().join(file);
has_files(full_path, file, content, self.test_id.clone())
}
pub fn has_not_file(&mut self, file: &str) -> bool {
let full_path = self.volume.clone().join(file);
files_utils::has_not_file(full_path, file, self.test_id.clone())
}
pub fn has_not_dir(&mut self, dir: &str) -> bool {
let full_path = self.volume.clone().join(dir);
dbg!(full_path.clone());
files_utils::has_not_file(full_path, dir, self.test_id.clone())
}
}

View File

@@ -0,0 +1,27 @@
use super::client::ClientTest;
#[cfg(test)]
pub fn lines_should_not_contains(lines: Vec<String>, str: &str) {
for line in lines {
if line.find(str).is_some() {
eprintln!("'{}' found in '{}'", str, line);
}
assert!(line.find(str).is_none());
}
}
#[cfg(test)]
pub fn status_should_be_empty(client: &mut ClientTest) {
let (staged, not_staged) = client.get_status();
if staged.len() != 0 {
eprintln!("id: {}", client.test_id.clone());
eprintln!("Staged should be empty but has '{}' line(s)", staged.len());
assert!(staged.len() == 0);
}
if staged.len() != 0 {
eprintln!("id: {}", client.test_id.clone());
eprintln!("Not Staged should be empty but has '{}' line(s)", not_staged.len());
assert!(not_staged.len() == 0);
}
}

31
tests/utils/utils.rs Normal file
View File

@@ -0,0 +1,31 @@
use rand::{distributions::Alphanumeric, Rng};
use super::client::ClientTest;
use super::server::ServerTest;
#[cfg(test)]
pub fn get_random_test_id() -> String {
let mut id: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(7)
.map(char::from)
.collect();
id.push_str("_nextsync");
id.to_owned()
}
#[cfg(test)]
pub fn init_test() -> (ClientTest, ServerTest) {
let id = get_random_test_id();
let mut server = ServerTest::new(id.clone());
server.init();
let client = ClientTest::new(id).init();
(client, server)
}
#[cfg(test)]
pub fn clean_test(client: ClientTest, server: &mut ServerTest) {
client.clean();
server.clean();
}