Compare commits
98 Commits
a1aeb65600
...
fix-update
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3420634bea | ||
|
|
1aa02a24af | ||
|
|
5e43800d6c | ||
|
|
dc7df00ac9 | ||
|
|
a1b9cde71a | ||
|
|
7180647d26 | ||
|
|
d5891a1a93 | ||
|
|
3207391fdb | ||
|
|
fa65b6b071 | ||
|
|
34dee1ceb6 | ||
|
|
fe628ffc9f | ||
|
|
6b7a82bec6 | ||
|
|
fdcd4633e5 | ||
|
|
06bb51476b | ||
|
|
d8b2116aeb | ||
|
|
8ed86a05ea | ||
|
|
7951ad0520 | ||
|
|
faf7341525 | ||
|
|
642c358737 | ||
|
|
e67082b85a | ||
|
|
211e3702a3 | ||
|
|
a2f746d7f6 | ||
|
|
69614b0c9f | ||
|
|
a5c5f4a713 | ||
|
|
eaacff0e55 | ||
|
|
287953c086 | ||
|
|
6a11bb494b | ||
|
|
1c60560c6e | ||
|
|
c6534cfd40 | ||
|
|
7719e27fe8 | ||
|
|
fc8e976c9c | ||
|
|
53b103af9e | ||
|
|
81c24b5e3c | ||
|
|
22b9351862 | ||
|
|
0c925bc4f4 | ||
|
|
d34b9bab5e | ||
|
|
56234eaa3d | ||
|
|
fd477a8139 | ||
|
|
559316e756 | ||
|
|
f4a905c57f | ||
|
|
c6cf8a9730 | ||
|
|
f6db6992a0 | ||
|
|
908ead5b11 | ||
|
|
9ea1d01c27 | ||
|
|
07f6405b26 | ||
|
|
dadf00f4a5 | ||
|
|
a35c7b20d8 | ||
|
|
863e3bd68a | ||
|
|
57647e5df2 | ||
|
|
41c4796555 | ||
|
|
aced8b992a | ||
|
|
d323ae3070 | ||
|
|
d476622305 | ||
|
|
498fada9ec | ||
|
|
f64d719b31 | ||
|
|
dcf137667b | ||
|
|
5b46b1e2f1 | ||
|
|
4b12edbe5c | ||
|
|
16dbd25168 | ||
|
|
91a29480df | ||
|
|
ce047eba12 | ||
|
|
94220be935 | ||
|
|
d5097727cb | ||
|
|
cb43a46456 | ||
|
|
4c34df7cfe | ||
|
|
29def4967c | ||
|
|
2775c77c55 | ||
|
|
4e20ec94f9 | ||
|
|
f01983b29d | ||
|
|
0832100d83 | ||
|
|
30004ebd8b | ||
|
|
dfd42389f3 | ||
|
|
80d497d47c | ||
|
|
70fb733b05 | ||
|
|
60e0bf76a0 | ||
|
|
b4cb78c676 | ||
|
|
0922066baa | ||
|
|
2d4905f506 | ||
|
|
5bc9ef0035 | ||
|
|
f56dcd30b8 | ||
|
|
ddf2169950 | ||
|
|
5ccce48381 | ||
|
|
4ec389b6cc | ||
|
|
ed599d1399 | ||
|
|
8173bbc7d5 | ||
|
|
95bd5e7366 | ||
|
|
b08e6d3898 | ||
|
|
723ceb2655 | ||
|
|
2a0fe6d1d1 | ||
|
|
45f8d486d8 | ||
|
|
f812ad411e | ||
|
|
601f176198 | ||
|
|
da3d605baa | ||
|
|
b74c5c176b | ||
|
|
d3592a5209 | ||
|
|
39d1032c14 | ||
|
|
20926514d2 | ||
|
|
b07e3062b7 |
15
.gitignore
vendored
15
.gitignore
vendored
@@ -1,7 +1,10 @@
|
|||||||
|
*
|
||||||
|
!/**/
|
||||||
|
!*.rs
|
||||||
|
!.gitignore
|
||||||
|
!README.md
|
||||||
|
!LICENSE
|
||||||
|
|
||||||
target
|
target
|
||||||
*.test
|
tests/nextcloud-docker-dev
|
||||||
.env
|
tests/data
|
||||||
todo
|
|
||||||
.nextsync
|
|
||||||
.nextsyncignore
|
|
||||||
test
|
|
||||||
|
|||||||
829
Cargo.lock
generated
829
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
25
Cargo.toml
25
Cargo.toml
@@ -6,14 +6,25 @@ edition = "2021"
|
|||||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
reqwest = { version = "0.11", features = ["blocking", "json", "multipart"] }
|
rustc-serialize="0.3.25"
|
||||||
tokio = { version = "1", features = ["full"] }
|
reqwest = { version = "0.12", features = ["stream", "json", "multipart"] }
|
||||||
|
tokio = { version = "1.37", features = ["full"] }
|
||||||
dotenv ="0.15.0"
|
dotenv ="0.15.0"
|
||||||
clap = "2.33"
|
clap = "4.5.4"
|
||||||
rust-crypto = "0.2.36"
|
rust-crypto = "0.2.36"
|
||||||
colored = "2.0.0"
|
colored = "2.1.0"
|
||||||
xml-rs = "0.8.0"
|
xml-rs = "0.8.19"
|
||||||
regex = "1.8.3"
|
regex = "1.10.4"
|
||||||
lazy_static = "1.4.0"
|
lazy_static = "1.4.0"
|
||||||
glob = "0.3.1"
|
glob = "0.3.1"
|
||||||
chrono = "0.4.26"
|
textwrap = "0.16.1"
|
||||||
|
chrono = "0.4.37"
|
||||||
|
indicatif = "0.17.8"
|
||||||
|
md5 = "0.7.0"
|
||||||
|
futures-util = "0.3.30"
|
||||||
|
rpassword = "7.3.1"
|
||||||
|
rand = "0.8.5"
|
||||||
|
tempfile = "3.10.1"
|
||||||
|
|
||||||
|
[profile.release]
|
||||||
|
debug = true
|
||||||
|
|||||||
674
LICENSE
Normal file
674
LICENSE
Normal file
@@ -0,0 +1,674 @@
|
|||||||
|
GNU GENERAL PUBLIC LICENSE
|
||||||
|
Version 3, 29 June 2007
|
||||||
|
|
||||||
|
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
|
||||||
|
Everyone is permitted to copy and distribute verbatim copies
|
||||||
|
of this license document, but changing it is not allowed.
|
||||||
|
|
||||||
|
Preamble
|
||||||
|
|
||||||
|
The GNU General Public License is a free, copyleft license for
|
||||||
|
software and other kinds of works.
|
||||||
|
|
||||||
|
The licenses for most software and other practical works are designed
|
||||||
|
to take away your freedom to share and change the works. By contrast,
|
||||||
|
the GNU General Public License is intended to guarantee your freedom to
|
||||||
|
share and change all versions of a program--to make sure it remains free
|
||||||
|
software for all its users. We, the Free Software Foundation, use the
|
||||||
|
GNU General Public License for most of our software; it applies also to
|
||||||
|
any other work released this way by its authors. You can apply it to
|
||||||
|
your programs, too.
|
||||||
|
|
||||||
|
When we speak of free software, we are referring to freedom, not
|
||||||
|
price. Our General Public Licenses are designed to make sure that you
|
||||||
|
have the freedom to distribute copies of free software (and charge for
|
||||||
|
them if you wish), that you receive source code or can get it if you
|
||||||
|
want it, that you can change the software or use pieces of it in new
|
||||||
|
free programs, and that you know you can do these things.
|
||||||
|
|
||||||
|
To protect your rights, we need to prevent others from denying you
|
||||||
|
these rights or asking you to surrender the rights. Therefore, you have
|
||||||
|
certain responsibilities if you distribute copies of the software, or if
|
||||||
|
you modify it: responsibilities to respect the freedom of others.
|
||||||
|
|
||||||
|
For example, if you distribute copies of such a program, whether
|
||||||
|
gratis or for a fee, you must pass on to the recipients the same
|
||||||
|
freedoms that you received. You must make sure that they, too, receive
|
||||||
|
or can get the source code. And you must show them these terms so they
|
||||||
|
know their rights.
|
||||||
|
|
||||||
|
Developers that use the GNU GPL protect your rights with two steps:
|
||||||
|
(1) assert copyright on the software, and (2) offer you this License
|
||||||
|
giving you legal permission to copy, distribute and/or modify it.
|
||||||
|
|
||||||
|
For the developers' and authors' protection, the GPL clearly explains
|
||||||
|
that there is no warranty for this free software. For both users' and
|
||||||
|
authors' sake, the GPL requires that modified versions be marked as
|
||||||
|
changed, so that their problems will not be attributed erroneously to
|
||||||
|
authors of previous versions.
|
||||||
|
|
||||||
|
Some devices are designed to deny users access to install or run
|
||||||
|
modified versions of the software inside them, although the manufacturer
|
||||||
|
can do so. This is fundamentally incompatible with the aim of
|
||||||
|
protecting users' freedom to change the software. The systematic
|
||||||
|
pattern of such abuse occurs in the area of products for individuals to
|
||||||
|
use, which is precisely where it is most unacceptable. Therefore, we
|
||||||
|
have designed this version of the GPL to prohibit the practice for those
|
||||||
|
products. If such problems arise substantially in other domains, we
|
||||||
|
stand ready to extend this provision to those domains in future versions
|
||||||
|
of the GPL, as needed to protect the freedom of users.
|
||||||
|
|
||||||
|
Finally, every program is threatened constantly by software patents.
|
||||||
|
States should not allow patents to restrict development and use of
|
||||||
|
software on general-purpose computers, but in those that do, we wish to
|
||||||
|
avoid the special danger that patents applied to a free program could
|
||||||
|
make it effectively proprietary. To prevent this, the GPL assures that
|
||||||
|
patents cannot be used to render the program non-free.
|
||||||
|
|
||||||
|
The precise terms and conditions for copying, distribution and
|
||||||
|
modification follow.
|
||||||
|
|
||||||
|
TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
0. Definitions.
|
||||||
|
|
||||||
|
"This License" refers to version 3 of the GNU General Public License.
|
||||||
|
|
||||||
|
"Copyright" also means copyright-like laws that apply to other kinds of
|
||||||
|
works, such as semiconductor masks.
|
||||||
|
|
||||||
|
"The Program" refers to any copyrightable work licensed under this
|
||||||
|
License. Each licensee is addressed as "you". "Licensees" and
|
||||||
|
"recipients" may be individuals or organizations.
|
||||||
|
|
||||||
|
To "modify" a work means to copy from or adapt all or part of the work
|
||||||
|
in a fashion requiring copyright permission, other than the making of an
|
||||||
|
exact copy. The resulting work is called a "modified version" of the
|
||||||
|
earlier work or a work "based on" the earlier work.
|
||||||
|
|
||||||
|
A "covered work" means either the unmodified Program or a work based
|
||||||
|
on the Program.
|
||||||
|
|
||||||
|
To "propagate" a work means to do anything with it that, without
|
||||||
|
permission, would make you directly or secondarily liable for
|
||||||
|
infringement under applicable copyright law, except executing it on a
|
||||||
|
computer or modifying a private copy. Propagation includes copying,
|
||||||
|
distribution (with or without modification), making available to the
|
||||||
|
public, and in some countries other activities as well.
|
||||||
|
|
||||||
|
To "convey" a work means any kind of propagation that enables other
|
||||||
|
parties to make or receive copies. Mere interaction with a user through
|
||||||
|
a computer network, with no transfer of a copy, is not conveying.
|
||||||
|
|
||||||
|
An interactive user interface displays "Appropriate Legal Notices"
|
||||||
|
to the extent that it includes a convenient and prominently visible
|
||||||
|
feature that (1) displays an appropriate copyright notice, and (2)
|
||||||
|
tells the user that there is no warranty for the work (except to the
|
||||||
|
extent that warranties are provided), that licensees may convey the
|
||||||
|
work under this License, and how to view a copy of this License. If
|
||||||
|
the interface presents a list of user commands or options, such as a
|
||||||
|
menu, a prominent item in the list meets this criterion.
|
||||||
|
|
||||||
|
1. Source Code.
|
||||||
|
|
||||||
|
The "source code" for a work means the preferred form of the work
|
||||||
|
for making modifications to it. "Object code" means any non-source
|
||||||
|
form of a work.
|
||||||
|
|
||||||
|
A "Standard Interface" means an interface that either is an official
|
||||||
|
standard defined by a recognized standards body, or, in the case of
|
||||||
|
interfaces specified for a particular programming language, one that
|
||||||
|
is widely used among developers working in that language.
|
||||||
|
|
||||||
|
The "System Libraries" of an executable work include anything, other
|
||||||
|
than the work as a whole, that (a) is included in the normal form of
|
||||||
|
packaging a Major Component, but which is not part of that Major
|
||||||
|
Component, and (b) serves only to enable use of the work with that
|
||||||
|
Major Component, or to implement a Standard Interface for which an
|
||||||
|
implementation is available to the public in source code form. A
|
||||||
|
"Major Component", in this context, means a major essential component
|
||||||
|
(kernel, window system, and so on) of the specific operating system
|
||||||
|
(if any) on which the executable work runs, or a compiler used to
|
||||||
|
produce the work, or an object code interpreter used to run it.
|
||||||
|
|
||||||
|
The "Corresponding Source" for a work in object code form means all
|
||||||
|
the source code needed to generate, install, and (for an executable
|
||||||
|
work) run the object code and to modify the work, including scripts to
|
||||||
|
control those activities. However, it does not include the work's
|
||||||
|
System Libraries, or general-purpose tools or generally available free
|
||||||
|
programs which are used unmodified in performing those activities but
|
||||||
|
which are not part of the work. For example, Corresponding Source
|
||||||
|
includes interface definition files associated with source files for
|
||||||
|
the work, and the source code for shared libraries and dynamically
|
||||||
|
linked subprograms that the work is specifically designed to require,
|
||||||
|
such as by intimate data communication or control flow between those
|
||||||
|
subprograms and other parts of the work.
|
||||||
|
|
||||||
|
The Corresponding Source need not include anything that users
|
||||||
|
can regenerate automatically from other parts of the Corresponding
|
||||||
|
Source.
|
||||||
|
|
||||||
|
The Corresponding Source for a work in source code form is that
|
||||||
|
same work.
|
||||||
|
|
||||||
|
2. Basic Permissions.
|
||||||
|
|
||||||
|
All rights granted under this License are granted for the term of
|
||||||
|
copyright on the Program, and are irrevocable provided the stated
|
||||||
|
conditions are met. This License explicitly affirms your unlimited
|
||||||
|
permission to run the unmodified Program. The output from running a
|
||||||
|
covered work is covered by this License only if the output, given its
|
||||||
|
content, constitutes a covered work. This License acknowledges your
|
||||||
|
rights of fair use or other equivalent, as provided by copyright law.
|
||||||
|
|
||||||
|
You may make, run and propagate covered works that you do not
|
||||||
|
convey, without conditions so long as your license otherwise remains
|
||||||
|
in force. You may convey covered works to others for the sole purpose
|
||||||
|
of having them make modifications exclusively for you, or provide you
|
||||||
|
with facilities for running those works, provided that you comply with
|
||||||
|
the terms of this License in conveying all material for which you do
|
||||||
|
not control copyright. Those thus making or running the covered works
|
||||||
|
for you must do so exclusively on your behalf, under your direction
|
||||||
|
and control, on terms that prohibit them from making any copies of
|
||||||
|
your copyrighted material outside their relationship with you.
|
||||||
|
|
||||||
|
Conveying under any other circumstances is permitted solely under
|
||||||
|
the conditions stated below. Sublicensing is not allowed; section 10
|
||||||
|
makes it unnecessary.
|
||||||
|
|
||||||
|
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
|
||||||
|
|
||||||
|
No covered work shall be deemed part of an effective technological
|
||||||
|
measure under any applicable law fulfilling obligations under article
|
||||||
|
11 of the WIPO copyright treaty adopted on 20 December 1996, or
|
||||||
|
similar laws prohibiting or restricting circumvention of such
|
||||||
|
measures.
|
||||||
|
|
||||||
|
When you convey a covered work, you waive any legal power to forbid
|
||||||
|
circumvention of technological measures to the extent such circumvention
|
||||||
|
is effected by exercising rights under this License with respect to
|
||||||
|
the covered work, and you disclaim any intention to limit operation or
|
||||||
|
modification of the work as a means of enforcing, against the work's
|
||||||
|
users, your or third parties' legal rights to forbid circumvention of
|
||||||
|
technological measures.
|
||||||
|
|
||||||
|
4. Conveying Verbatim Copies.
|
||||||
|
|
||||||
|
You may convey verbatim copies of the Program's source code as you
|
||||||
|
receive it, in any medium, provided that you conspicuously and
|
||||||
|
appropriately publish on each copy an appropriate copyright notice;
|
||||||
|
keep intact all notices stating that this License and any
|
||||||
|
non-permissive terms added in accord with section 7 apply to the code;
|
||||||
|
keep intact all notices of the absence of any warranty; and give all
|
||||||
|
recipients a copy of this License along with the Program.
|
||||||
|
|
||||||
|
You may charge any price or no price for each copy that you convey,
|
||||||
|
and you may offer support or warranty protection for a fee.
|
||||||
|
|
||||||
|
5. Conveying Modified Source Versions.
|
||||||
|
|
||||||
|
You may convey a work based on the Program, or the modifications to
|
||||||
|
produce it from the Program, in the form of source code under the
|
||||||
|
terms of section 4, provided that you also meet all of these conditions:
|
||||||
|
|
||||||
|
a) The work must carry prominent notices stating that you modified
|
||||||
|
it, and giving a relevant date.
|
||||||
|
|
||||||
|
b) The work must carry prominent notices stating that it is
|
||||||
|
released under this License and any conditions added under section
|
||||||
|
7. This requirement modifies the requirement in section 4 to
|
||||||
|
"keep intact all notices".
|
||||||
|
|
||||||
|
c) You must license the entire work, as a whole, under this
|
||||||
|
License to anyone who comes into possession of a copy. This
|
||||||
|
License will therefore apply, along with any applicable section 7
|
||||||
|
additional terms, to the whole of the work, and all its parts,
|
||||||
|
regardless of how they are packaged. This License gives no
|
||||||
|
permission to license the work in any other way, but it does not
|
||||||
|
invalidate such permission if you have separately received it.
|
||||||
|
|
||||||
|
d) If the work has interactive user interfaces, each must display
|
||||||
|
Appropriate Legal Notices; however, if the Program has interactive
|
||||||
|
interfaces that do not display Appropriate Legal Notices, your
|
||||||
|
work need not make them do so.
|
||||||
|
|
||||||
|
A compilation of a covered work with other separate and independent
|
||||||
|
works, which are not by their nature extensions of the covered work,
|
||||||
|
and which are not combined with it such as to form a larger program,
|
||||||
|
in or on a volume of a storage or distribution medium, is called an
|
||||||
|
"aggregate" if the compilation and its resulting copyright are not
|
||||||
|
used to limit the access or legal rights of the compilation's users
|
||||||
|
beyond what the individual works permit. Inclusion of a covered work
|
||||||
|
in an aggregate does not cause this License to apply to the other
|
||||||
|
parts of the aggregate.
|
||||||
|
|
||||||
|
6. Conveying Non-Source Forms.
|
||||||
|
|
||||||
|
You may convey a covered work in object code form under the terms
|
||||||
|
of sections 4 and 5, provided that you also convey the
|
||||||
|
machine-readable Corresponding Source under the terms of this License,
|
||||||
|
in one of these ways:
|
||||||
|
|
||||||
|
a) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by the
|
||||||
|
Corresponding Source fixed on a durable physical medium
|
||||||
|
customarily used for software interchange.
|
||||||
|
|
||||||
|
b) Convey the object code in, or embodied in, a physical product
|
||||||
|
(including a physical distribution medium), accompanied by a
|
||||||
|
written offer, valid for at least three years and valid for as
|
||||||
|
long as you offer spare parts or customer support for that product
|
||||||
|
model, to give anyone who possesses the object code either (1) a
|
||||||
|
copy of the Corresponding Source for all the software in the
|
||||||
|
product that is covered by this License, on a durable physical
|
||||||
|
medium customarily used for software interchange, for a price no
|
||||||
|
more than your reasonable cost of physically performing this
|
||||||
|
conveying of source, or (2) access to copy the
|
||||||
|
Corresponding Source from a network server at no charge.
|
||||||
|
|
||||||
|
c) Convey individual copies of the object code with a copy of the
|
||||||
|
written offer to provide the Corresponding Source. This
|
||||||
|
alternative is allowed only occasionally and noncommercially, and
|
||||||
|
only if you received the object code with such an offer, in accord
|
||||||
|
with subsection 6b.
|
||||||
|
|
||||||
|
d) Convey the object code by offering access from a designated
|
||||||
|
place (gratis or for a charge), and offer equivalent access to the
|
||||||
|
Corresponding Source in the same way through the same place at no
|
||||||
|
further charge. You need not require recipients to copy the
|
||||||
|
Corresponding Source along with the object code. If the place to
|
||||||
|
copy the object code is a network server, the Corresponding Source
|
||||||
|
may be on a different server (operated by you or a third party)
|
||||||
|
that supports equivalent copying facilities, provided you maintain
|
||||||
|
clear directions next to the object code saying where to find the
|
||||||
|
Corresponding Source. Regardless of what server hosts the
|
||||||
|
Corresponding Source, you remain obligated to ensure that it is
|
||||||
|
available for as long as needed to satisfy these requirements.
|
||||||
|
|
||||||
|
e) Convey the object code using peer-to-peer transmission, provided
|
||||||
|
you inform other peers where the object code and Corresponding
|
||||||
|
Source of the work are being offered to the general public at no
|
||||||
|
charge under subsection 6d.
|
||||||
|
|
||||||
|
A separable portion of the object code, whose source code is excluded
|
||||||
|
from the Corresponding Source as a System Library, need not be
|
||||||
|
included in conveying the object code work.
|
||||||
|
|
||||||
|
A "User Product" is either (1) a "consumer product", which means any
|
||||||
|
tangible personal property which is normally used for personal, family,
|
||||||
|
or household purposes, or (2) anything designed or sold for incorporation
|
||||||
|
into a dwelling. In determining whether a product is a consumer product,
|
||||||
|
doubtful cases shall be resolved in favor of coverage. For a particular
|
||||||
|
product received by a particular user, "normally used" refers to a
|
||||||
|
typical or common use of that class of product, regardless of the status
|
||||||
|
of the particular user or of the way in which the particular user
|
||||||
|
actually uses, or expects or is expected to use, the product. A product
|
||||||
|
is a consumer product regardless of whether the product has substantial
|
||||||
|
commercial, industrial or non-consumer uses, unless such uses represent
|
||||||
|
the only significant mode of use of the product.
|
||||||
|
|
||||||
|
"Installation Information" for a User Product means any methods,
|
||||||
|
procedures, authorization keys, or other information required to install
|
||||||
|
and execute modified versions of a covered work in that User Product from
|
||||||
|
a modified version of its Corresponding Source. The information must
|
||||||
|
suffice to ensure that the continued functioning of the modified object
|
||||||
|
code is in no case prevented or interfered with solely because
|
||||||
|
modification has been made.
|
||||||
|
|
||||||
|
If you convey an object code work under this section in, or with, or
|
||||||
|
specifically for use in, a User Product, and the conveying occurs as
|
||||||
|
part of a transaction in which the right of possession and use of the
|
||||||
|
User Product is transferred to the recipient in perpetuity or for a
|
||||||
|
fixed term (regardless of how the transaction is characterized), the
|
||||||
|
Corresponding Source conveyed under this section must be accompanied
|
||||||
|
by the Installation Information. But this requirement does not apply
|
||||||
|
if neither you nor any third party retains the ability to install
|
||||||
|
modified object code on the User Product (for example, the work has
|
||||||
|
been installed in ROM).
|
||||||
|
|
||||||
|
The requirement to provide Installation Information does not include a
|
||||||
|
requirement to continue to provide support service, warranty, or updates
|
||||||
|
for a work that has been modified or installed by the recipient, or for
|
||||||
|
the User Product in which it has been modified or installed. Access to a
|
||||||
|
network may be denied when the modification itself materially and
|
||||||
|
adversely affects the operation of the network or violates the rules and
|
||||||
|
protocols for communication across the network.
|
||||||
|
|
||||||
|
Corresponding Source conveyed, and Installation Information provided,
|
||||||
|
in accord with this section must be in a format that is publicly
|
||||||
|
documented (and with an implementation available to the public in
|
||||||
|
source code form), and must require no special password or key for
|
||||||
|
unpacking, reading or copying.
|
||||||
|
|
||||||
|
7. Additional Terms.
|
||||||
|
|
||||||
|
"Additional permissions" are terms that supplement the terms of this
|
||||||
|
License by making exceptions from one or more of its conditions.
|
||||||
|
Additional permissions that are applicable to the entire Program shall
|
||||||
|
be treated as though they were included in this License, to the extent
|
||||||
|
that they are valid under applicable law. If additional permissions
|
||||||
|
apply only to part of the Program, that part may be used separately
|
||||||
|
under those permissions, but the entire Program remains governed by
|
||||||
|
this License without regard to the additional permissions.
|
||||||
|
|
||||||
|
When you convey a copy of a covered work, you may at your option
|
||||||
|
remove any additional permissions from that copy, or from any part of
|
||||||
|
it. (Additional permissions may be written to require their own
|
||||||
|
removal in certain cases when you modify the work.) You may place
|
||||||
|
additional permissions on material, added by you to a covered work,
|
||||||
|
for which you have or can give appropriate copyright permission.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, for material you
|
||||||
|
add to a covered work, you may (if authorized by the copyright holders of
|
||||||
|
that material) supplement the terms of this License with terms:
|
||||||
|
|
||||||
|
a) Disclaiming warranty or limiting liability differently from the
|
||||||
|
terms of sections 15 and 16 of this License; or
|
||||||
|
|
||||||
|
b) Requiring preservation of specified reasonable legal notices or
|
||||||
|
author attributions in that material or in the Appropriate Legal
|
||||||
|
Notices displayed by works containing it; or
|
||||||
|
|
||||||
|
c) Prohibiting misrepresentation of the origin of that material, or
|
||||||
|
requiring that modified versions of such material be marked in
|
||||||
|
reasonable ways as different from the original version; or
|
||||||
|
|
||||||
|
d) Limiting the use for publicity purposes of names of licensors or
|
||||||
|
authors of the material; or
|
||||||
|
|
||||||
|
e) Declining to grant rights under trademark law for use of some
|
||||||
|
trade names, trademarks, or service marks; or
|
||||||
|
|
||||||
|
f) Requiring indemnification of licensors and authors of that
|
||||||
|
material by anyone who conveys the material (or modified versions of
|
||||||
|
it) with contractual assumptions of liability to the recipient, for
|
||||||
|
any liability that these contractual assumptions directly impose on
|
||||||
|
those licensors and authors.
|
||||||
|
|
||||||
|
All other non-permissive additional terms are considered "further
|
||||||
|
restrictions" within the meaning of section 10. If the Program as you
|
||||||
|
received it, or any part of it, contains a notice stating that it is
|
||||||
|
governed by this License along with a term that is a further
|
||||||
|
restriction, you may remove that term. If a license document contains
|
||||||
|
a further restriction but permits relicensing or conveying under this
|
||||||
|
License, you may add to a covered work material governed by the terms
|
||||||
|
of that license document, provided that the further restriction does
|
||||||
|
not survive such relicensing or conveying.
|
||||||
|
|
||||||
|
If you add terms to a covered work in accord with this section, you
|
||||||
|
must place, in the relevant source files, a statement of the
|
||||||
|
additional terms that apply to those files, or a notice indicating
|
||||||
|
where to find the applicable terms.
|
||||||
|
|
||||||
|
Additional terms, permissive or non-permissive, may be stated in the
|
||||||
|
form of a separately written license, or stated as exceptions;
|
||||||
|
the above requirements apply either way.
|
||||||
|
|
||||||
|
8. Termination.
|
||||||
|
|
||||||
|
You may not propagate or modify a covered work except as expressly
|
||||||
|
provided under this License. Any attempt otherwise to propagate or
|
||||||
|
modify it is void, and will automatically terminate your rights under
|
||||||
|
this License (including any patent licenses granted under the third
|
||||||
|
paragraph of section 11).
|
||||||
|
|
||||||
|
However, if you cease all violation of this License, then your
|
||||||
|
license from a particular copyright holder is reinstated (a)
|
||||||
|
provisionally, unless and until the copyright holder explicitly and
|
||||||
|
finally terminates your license, and (b) permanently, if the copyright
|
||||||
|
holder fails to notify you of the violation by some reasonable means
|
||||||
|
prior to 60 days after the cessation.
|
||||||
|
|
||||||
|
Moreover, your license from a particular copyright holder is
|
||||||
|
reinstated permanently if the copyright holder notifies you of the
|
||||||
|
violation by some reasonable means, this is the first time you have
|
||||||
|
received notice of violation of this License (for any work) from that
|
||||||
|
copyright holder, and you cure the violation prior to 30 days after
|
||||||
|
your receipt of the notice.
|
||||||
|
|
||||||
|
Termination of your rights under this section does not terminate the
|
||||||
|
licenses of parties who have received copies or rights from you under
|
||||||
|
this License. If your rights have been terminated and not permanently
|
||||||
|
reinstated, you do not qualify to receive new licenses for the same
|
||||||
|
material under section 10.
|
||||||
|
|
||||||
|
9. Acceptance Not Required for Having Copies.
|
||||||
|
|
||||||
|
You are not required to accept this License in order to receive or
|
||||||
|
run a copy of the Program. Ancillary propagation of a covered work
|
||||||
|
occurring solely as a consequence of using peer-to-peer transmission
|
||||||
|
to receive a copy likewise does not require acceptance. However,
|
||||||
|
nothing other than this License grants you permission to propagate or
|
||||||
|
modify any covered work. These actions infringe copyright if you do
|
||||||
|
not accept this License. Therefore, by modifying or propagating a
|
||||||
|
covered work, you indicate your acceptance of this License to do so.
|
||||||
|
|
||||||
|
10. Automatic Licensing of Downstream Recipients.
|
||||||
|
|
||||||
|
Each time you convey a covered work, the recipient automatically
|
||||||
|
receives a license from the original licensors, to run, modify and
|
||||||
|
propagate that work, subject to this License. You are not responsible
|
||||||
|
for enforcing compliance by third parties with this License.
|
||||||
|
|
||||||
|
An "entity transaction" is a transaction transferring control of an
|
||||||
|
organization, or substantially all assets of one, or subdividing an
|
||||||
|
organization, or merging organizations. If propagation of a covered
|
||||||
|
work results from an entity transaction, each party to that
|
||||||
|
transaction who receives a copy of the work also receives whatever
|
||||||
|
licenses to the work the party's predecessor in interest had or could
|
||||||
|
give under the previous paragraph, plus a right to possession of the
|
||||||
|
Corresponding Source of the work from the predecessor in interest, if
|
||||||
|
the predecessor has it or can get it with reasonable efforts.
|
||||||
|
|
||||||
|
You may not impose any further restrictions on the exercise of the
|
||||||
|
rights granted or affirmed under this License. For example, you may
|
||||||
|
not impose a license fee, royalty, or other charge for exercise of
|
||||||
|
rights granted under this License, and you may not initiate litigation
|
||||||
|
(including a cross-claim or counterclaim in a lawsuit) alleging that
|
||||||
|
any patent claim is infringed by making, using, selling, offering for
|
||||||
|
sale, or importing the Program or any portion of it.
|
||||||
|
|
||||||
|
11. Patents.
|
||||||
|
|
||||||
|
A "contributor" is a copyright holder who authorizes use under this
|
||||||
|
License of the Program or a work on which the Program is based. The
|
||||||
|
work thus licensed is called the contributor's "contributor version".
|
||||||
|
|
||||||
|
A contributor's "essential patent claims" are all patent claims
|
||||||
|
owned or controlled by the contributor, whether already acquired or
|
||||||
|
hereafter acquired, that would be infringed by some manner, permitted
|
||||||
|
by this License, of making, using, or selling its contributor version,
|
||||||
|
but do not include claims that would be infringed only as a
|
||||||
|
consequence of further modification of the contributor version. For
|
||||||
|
purposes of this definition, "control" includes the right to grant
|
||||||
|
patent sublicenses in a manner consistent with the requirements of
|
||||||
|
this License.
|
||||||
|
|
||||||
|
Each contributor grants you a non-exclusive, worldwide, royalty-free
|
||||||
|
patent license under the contributor's essential patent claims, to
|
||||||
|
make, use, sell, offer for sale, import and otherwise run, modify and
|
||||||
|
propagate the contents of its contributor version.
|
||||||
|
|
||||||
|
In the following three paragraphs, a "patent license" is any express
|
||||||
|
agreement or commitment, however denominated, not to enforce a patent
|
||||||
|
(such as an express permission to practice a patent or covenant not to
|
||||||
|
sue for patent infringement). To "grant" such a patent license to a
|
||||||
|
party means to make such an agreement or commitment not to enforce a
|
||||||
|
patent against the party.
|
||||||
|
|
||||||
|
If you convey a covered work, knowingly relying on a patent license,
|
||||||
|
and the Corresponding Source of the work is not available for anyone
|
||||||
|
to copy, free of charge and under the terms of this License, through a
|
||||||
|
publicly available network server or other readily accessible means,
|
||||||
|
then you must either (1) cause the Corresponding Source to be so
|
||||||
|
available, or (2) arrange to deprive yourself of the benefit of the
|
||||||
|
patent license for this particular work, or (3) arrange, in a manner
|
||||||
|
consistent with the requirements of this License, to extend the patent
|
||||||
|
license to downstream recipients. "Knowingly relying" means you have
|
||||||
|
actual knowledge that, but for the patent license, your conveying the
|
||||||
|
covered work in a country, or your recipient's use of the covered work
|
||||||
|
in a country, would infringe one or more identifiable patents in that
|
||||||
|
country that you have reason to believe are valid.
|
||||||
|
|
||||||
|
If, pursuant to or in connection with a single transaction or
|
||||||
|
arrangement, you convey, or propagate by procuring conveyance of, a
|
||||||
|
covered work, and grant a patent license to some of the parties
|
||||||
|
receiving the covered work authorizing them to use, propagate, modify
|
||||||
|
or convey a specific copy of the covered work, then the patent license
|
||||||
|
you grant is automatically extended to all recipients of the covered
|
||||||
|
work and works based on it.
|
||||||
|
|
||||||
|
A patent license is "discriminatory" if it does not include within
|
||||||
|
the scope of its coverage, prohibits the exercise of, or is
|
||||||
|
conditioned on the non-exercise of one or more of the rights that are
|
||||||
|
specifically granted under this License. You may not convey a covered
|
||||||
|
work if you are a party to an arrangement with a third party that is
|
||||||
|
in the business of distributing software, under which you make payment
|
||||||
|
to the third party based on the extent of your activity of conveying
|
||||||
|
the work, and under which the third party grants, to any of the
|
||||||
|
parties who would receive the covered work from you, a discriminatory
|
||||||
|
patent license (a) in connection with copies of the covered work
|
||||||
|
conveyed by you (or copies made from those copies), or (b) primarily
|
||||||
|
for and in connection with specific products or compilations that
|
||||||
|
contain the covered work, unless you entered into that arrangement,
|
||||||
|
or that patent license was granted, prior to 28 March 2007.
|
||||||
|
|
||||||
|
Nothing in this License shall be construed as excluding or limiting
|
||||||
|
any implied license or other defenses to infringement that may
|
||||||
|
otherwise be available to you under applicable patent law.
|
||||||
|
|
||||||
|
12. No Surrender of Others' Freedom.
|
||||||
|
|
||||||
|
If conditions are imposed on you (whether by court order, agreement or
|
||||||
|
otherwise) that contradict the conditions of this License, they do not
|
||||||
|
excuse you from the conditions of this License. If you cannot convey a
|
||||||
|
covered work so as to satisfy simultaneously your obligations under this
|
||||||
|
License and any other pertinent obligations, then as a consequence you may
|
||||||
|
not convey it at all. For example, if you agree to terms that obligate you
|
||||||
|
to collect a royalty for further conveying from those to whom you convey
|
||||||
|
the Program, the only way you could satisfy both those terms and this
|
||||||
|
License would be to refrain entirely from conveying the Program.
|
||||||
|
|
||||||
|
13. Use with the GNU Affero General Public License.
|
||||||
|
|
||||||
|
Notwithstanding any other provision of this License, you have
|
||||||
|
permission to link or combine any covered work with a work licensed
|
||||||
|
under version 3 of the GNU Affero General Public License into a single
|
||||||
|
combined work, and to convey the resulting work. The terms of this
|
||||||
|
License will continue to apply to the part which is the covered work,
|
||||||
|
but the special requirements of the GNU Affero General Public License,
|
||||||
|
section 13, concerning interaction through a network will apply to the
|
||||||
|
combination as such.
|
||||||
|
|
||||||
|
14. Revised Versions of this License.
|
||||||
|
|
||||||
|
The Free Software Foundation may publish revised and/or new versions of
|
||||||
|
the GNU General Public License from time to time. Such new versions will
|
||||||
|
be similar in spirit to the present version, but may differ in detail to
|
||||||
|
address new problems or concerns.
|
||||||
|
|
||||||
|
Each version is given a distinguishing version number. If the
|
||||||
|
Program specifies that a certain numbered version of the GNU General
|
||||||
|
Public License "or any later version" applies to it, you have the
|
||||||
|
option of following the terms and conditions either of that numbered
|
||||||
|
version or of any later version published by the Free Software
|
||||||
|
Foundation. If the Program does not specify a version number of the
|
||||||
|
GNU General Public License, you may choose any version ever published
|
||||||
|
by the Free Software Foundation.
|
||||||
|
|
||||||
|
If the Program specifies that a proxy can decide which future
|
||||||
|
versions of the GNU General Public License can be used, that proxy's
|
||||||
|
public statement of acceptance of a version permanently authorizes you
|
||||||
|
to choose that version for the Program.
|
||||||
|
|
||||||
|
Later license versions may give you additional or different
|
||||||
|
permissions. However, no additional obligations are imposed on any
|
||||||
|
author or copyright holder as a result of your choosing to follow a
|
||||||
|
later version.
|
||||||
|
|
||||||
|
15. Disclaimer of Warranty.
|
||||||
|
|
||||||
|
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
|
||||||
|
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
|
||||||
|
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
|
||||||
|
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
|
||||||
|
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
|
||||||
|
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
|
||||||
|
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
|
||||||
|
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
|
||||||
|
|
||||||
|
16. Limitation of Liability.
|
||||||
|
|
||||||
|
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
|
||||||
|
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
|
||||||
|
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
|
||||||
|
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
|
||||||
|
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
|
||||||
|
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
|
||||||
|
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
|
||||||
|
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
|
||||||
|
SUCH DAMAGES.
|
||||||
|
|
||||||
|
17. Interpretation of Sections 15 and 16.
|
||||||
|
|
||||||
|
If the disclaimer of warranty and limitation of liability provided
|
||||||
|
above cannot be given local legal effect according to their terms,
|
||||||
|
reviewing courts shall apply local law that most closely approximates
|
||||||
|
an absolute waiver of all civil liability in connection with the
|
||||||
|
Program, unless a warranty or assumption of liability accompanies a
|
||||||
|
copy of the Program in return for a fee.
|
||||||
|
|
||||||
|
END OF TERMS AND CONDITIONS
|
||||||
|
|
||||||
|
How to Apply These Terms to Your New Programs
|
||||||
|
|
||||||
|
If you develop a new program, and you want it to be of the greatest
|
||||||
|
possible use to the public, the best way to achieve this is to make it
|
||||||
|
free software which everyone can redistribute and change under these terms.
|
||||||
|
|
||||||
|
To do so, attach the following notices to the program. It is safest
|
||||||
|
to attach them to the start of each source file to most effectively
|
||||||
|
state the exclusion of warranty; and each file should have at least
|
||||||
|
the "copyright" line and a pointer to where the full notice is found.
|
||||||
|
|
||||||
|
<one line to give the program's name and a brief idea of what it does.>
|
||||||
|
Copyright (C) <year> <name of author>
|
||||||
|
|
||||||
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
it under the terms of the GNU General Public License as published by
|
||||||
|
the Free Software Foundation, either version 3 of the License, or
|
||||||
|
(at your option) any later version.
|
||||||
|
|
||||||
|
This program is distributed in the hope that it will be useful,
|
||||||
|
but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
GNU General Public License for more details.
|
||||||
|
|
||||||
|
You should have received a copy of the GNU General Public License
|
||||||
|
along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
Also add information on how to contact you by electronic and paper mail.
|
||||||
|
|
||||||
|
If the program does terminal interaction, make it output a short
|
||||||
|
notice like this when it starts in an interactive mode:
|
||||||
|
|
||||||
|
<program> Copyright (C) <year> <name of author>
|
||||||
|
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
|
||||||
|
This is free software, and you are welcome to redistribute it
|
||||||
|
under certain conditions; type `show c' for details.
|
||||||
|
|
||||||
|
The hypothetical commands `show w' and `show c' should show the appropriate
|
||||||
|
parts of the General Public License. Of course, your program's commands
|
||||||
|
might be different; for a GUI interface, you would use an "about box".
|
||||||
|
|
||||||
|
You should also get your employer (if you work as a programmer) or school,
|
||||||
|
if any, to sign a "copyright disclaimer" for the program, if necessary.
|
||||||
|
For more information on this, and how to apply and follow the GNU GPL, see
|
||||||
|
<https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
|
The GNU General Public License does not permit incorporating your program
|
||||||
|
into proprietary programs. If your program is a subroutine library, you
|
||||||
|
may consider it more useful to permit linking proprietary applications with
|
||||||
|
the library. If this is what you want to do, use the GNU Lesser General
|
||||||
|
Public License instead of this License. But first, please read
|
||||||
|
<https://www.gnu.org/licenses/why-not-lgpl.html>.
|
||||||
39
README.md
39
README.md
@@ -1,2 +1,39 @@
|
|||||||
## NextSync
|
# Nextsync
|
||||||
|
|
||||||
|
A git-like command line tool to interact with Nextcloud.
|
||||||
|
|
||||||
|
This is **in working progress**.
|
||||||
|
|
||||||
|
This should work pretty much like git with some adaptations to be more debuggable (for now) and easier to code. There is no history and with that no need to commit, to upload new files you have to add and push them.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- [x] Cloning
|
||||||
|
- [x] Status (new, deleted, modified, copied, moved)
|
||||||
|
- [x] Pushing updates (new, deleted, modified)
|
||||||
|
- [x] Using a .nextsyncignore to ignore files
|
||||||
|
- [ ] Pulling changes
|
||||||
|
- [x] Auth with a token
|
||||||
|
- [ ] Remember token
|
||||||
|
- [ ] Various optimisation
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
```
|
||||||
|
USAGE:
|
||||||
|
nextsync [SUBCOMMAND]
|
||||||
|
|
||||||
|
FLAGS:
|
||||||
|
-h, --help Prints help information
|
||||||
|
-V, --version Prints version information
|
||||||
|
|
||||||
|
SUBCOMMANDS:
|
||||||
|
add Add changes to the index
|
||||||
|
clone Clone a repository into a new directory
|
||||||
|
config
|
||||||
|
help Prints this message or the help of the given subcommand(s)
|
||||||
|
init Create an empty Nextsync repository
|
||||||
|
push Push changes on nextcloud
|
||||||
|
reset Clear the index
|
||||||
|
status Show the working tree status
|
||||||
|
```
|
||||||
|
|||||||
@@ -2,8 +2,10 @@
|
|||||||
## Blob object
|
## Blob object
|
||||||
|
|
||||||
```
|
```
|
||||||
file_name timestamp size hash
|
file_name timestamp1 size timestamp2 hash
|
||||||
```
|
```
|
||||||
|
timestamp1: timestamp of file on server to know if the server has an update
|
||||||
|
timestamp2: timestamp of file locally to know when the file has changed on the system
|
||||||
|
|
||||||
## Tree object
|
## Tree object
|
||||||
```
|
```
|
||||||
|
|||||||
@@ -5,3 +5,7 @@ pub mod reset;
|
|||||||
pub mod clone;
|
pub mod clone;
|
||||||
pub mod push;
|
pub mod push;
|
||||||
pub mod config;
|
pub mod config;
|
||||||
|
pub mod remote_diff;
|
||||||
|
pub mod remote;
|
||||||
|
pub mod pull;
|
||||||
|
pub mod credential;
|
||||||
|
|||||||
@@ -1,58 +1,118 @@
|
|||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use clap::Values;
|
use glob::glob;
|
||||||
use crate::store;
|
use crate::store::{self, object::Object};
|
||||||
use crate::utils::{self};
|
use crate::utils::{self, path};
|
||||||
|
use crate::store::object::object::{Obj, ObjMethods};
|
||||||
use crate::utils::nextsyncignore::{self, ignore_file};
|
use crate::utils::nextsyncignore::{self, ignore_file};
|
||||||
|
use crate::utils::path::{normalize_relative, repo_root, path_buf_to_string};
|
||||||
|
|
||||||
pub struct AddArgs<'a> {
|
pub struct AddArgs {
|
||||||
pub files: Values<'a>,
|
pub files: Vec<String>,
|
||||||
pub force: bool,
|
pub force: bool,
|
||||||
|
pub all: bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo match deleted files
|
// todo match deleted files
|
||||||
// todo match weird reg expression
|
|
||||||
pub fn add(args: AddArgs) {
|
pub fn add(args: AddArgs) {
|
||||||
let mut index_file = store::index::open();
|
|
||||||
let mut added_files: Vec<String> = vec![];
|
let mut pattern: String;
|
||||||
let rules = match nextsyncignore::read_lines() {
|
let file_vec: Vec<String> = match args.all {
|
||||||
Ok(r) => r,
|
true => {
|
||||||
Err(_) => vec![],
|
pattern = path_buf_to_string(repo_root());
|
||||||
|
pattern.push_str("/*");
|
||||||
|
vec![pattern]
|
||||||
|
},
|
||||||
|
false => args.files,
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut added_files: Vec<String> = vec![];
|
||||||
let mut ignored_f = vec![];
|
let mut ignored_f = vec![];
|
||||||
let file_vec: Vec<&str> = args.files.collect();
|
let rules = nextsyncignore::get_rules();
|
||||||
|
|
||||||
for file in file_vec {
|
for file in file_vec {
|
||||||
if !args.force && ignore_file(&file.to_string(), rules.clone(), &mut ignored_f) {
|
let f = match normalize_relative(&file) {
|
||||||
|
Ok(f) => f,
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!("err: {} {}", file, err);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
let path = Path::new(file);
|
};
|
||||||
|
|
||||||
|
let path = repo_root().join(Path::new(&f));
|
||||||
match path.exists() {
|
match path.exists() {
|
||||||
true => {
|
true => {
|
||||||
if path.is_dir() {
|
add_entry(path, args.force, &mut added_files, rules.clone(), &mut ignored_f);
|
||||||
added_files.push(String::from(path.to_str().unwrap()));
|
|
||||||
add_folder_content(path.to_path_buf(), &mut added_files);
|
|
||||||
} else {
|
|
||||||
added_files.push(String::from(path.to_str().unwrap()));
|
|
||||||
}
|
|
||||||
},
|
},
|
||||||
false => {
|
false => {
|
||||||
// todo deleted file/folder verif if exists
|
if Object::new(path.to_str().unwrap()).exists() {
|
||||||
added_files.push(String::from(path.to_str().unwrap()));
|
// object is deleted so not present but can still be added for deletion
|
||||||
|
added_files.push(String::from(f));
|
||||||
|
} else {
|
||||||
|
// try globbing if nothing has been found
|
||||||
|
for entry in try_globbing(path) {
|
||||||
|
add_entry(entry, args.force, &mut added_files, rules.clone(), &mut ignored_f);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ignored_f.len() > 0 {
|
print_ignored_files(ignored_f);
|
||||||
|
write_added_files(added_files);
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_entry(entry: PathBuf, force: bool, added_files: &mut Vec<String>, rules: Vec<String>, ignored_f: &mut Vec<String>) {
|
||||||
|
// ignore nextsync config files
|
||||||
|
if path::is_nextsync_config(entry.clone()) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if the file must be ignored
|
||||||
|
if !force && ignore_file(&path_buf_to_string(entry.clone()), rules, ignored_f) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// add the parent if there is one and it is not already created
|
||||||
|
add_parent(entry.clone(), added_files);
|
||||||
|
|
||||||
|
added_files.push(path_buf_to_string(entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
|
||||||
|
if entry.is_dir() {
|
||||||
|
add_folder_content(entry.to_path_buf(), added_files);
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_parent(entry: PathBuf, added_files: &mut Vec<String>) {
|
||||||
|
let test_parent = entry.strip_prefix(repo_root()).unwrap().parent();
|
||||||
|
if test_parent.is_none() || test_parent.unwrap() == PathBuf::new() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let parent = entry.parent().unwrap();
|
||||||
|
|
||||||
|
if !Obj::from_path(parent).exists_on_remote() {
|
||||||
|
add_parent(parent.to_path_buf(), added_files);
|
||||||
|
added_files.push(path_buf_to_string(parent.strip_prefix(repo_root()).unwrap().to_path_buf()));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn print_ignored_files(ignored_files: Vec<String>) {
|
||||||
|
if ignored_files.len() > 0 {
|
||||||
// todo multiple nextsyncignore
|
// todo multiple nextsyncignore
|
||||||
println!("The following paths are ignored by your .nextsyncignore file:");
|
println!("The following paths are ignored by your .nextsyncignore file:");
|
||||||
for file in ignored_f {
|
for file in ignored_files {
|
||||||
println!("{}", file);
|
println!("{}", file);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// save all added_files in index
|
fn write_added_files(added_files: Vec<String>) {
|
||||||
// todo avoid duplication
|
let mut index_file = store::index::open();
|
||||||
for file in added_files {
|
for file in added_files {
|
||||||
|
if store::index::alread_added(file.clone()) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
match writeln!(index_file, "{}", file) {
|
match writeln!(index_file, "{}", file) {
|
||||||
Ok(()) => (),
|
Ok(()) => (),
|
||||||
Err(err) => eprintln!("{}", err),
|
Err(err) => eprintln!("{}", err),
|
||||||
@@ -61,7 +121,25 @@ pub fn add(args: AddArgs) {
|
|||||||
drop(index_file);
|
drop(index_file);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn try_globbing(path: PathBuf) -> Vec<PathBuf> {
|
||||||
|
let mut paths: Vec<PathBuf> = vec![];
|
||||||
|
if let Ok(entries) = glob(path.to_str().unwrap()) {
|
||||||
|
for entry in entries {
|
||||||
|
match entry {
|
||||||
|
Ok(ppath) => paths.push(ppath),
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("err: {} incorrect pattern ({})", path.display(), e);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
eprintln!("err: {} is not something you can add.", path.to_str().unwrap());
|
||||||
|
}
|
||||||
|
return paths;
|
||||||
|
}
|
||||||
|
|
||||||
fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
|
fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
|
||||||
|
// todo check for changes
|
||||||
let mut folders: Vec<PathBuf> = vec![];
|
let mut folders: Vec<PathBuf> = vec![];
|
||||||
folders.push(path);
|
folders.push(path);
|
||||||
|
|
||||||
@@ -69,12 +147,16 @@ fn add_folder_content(path: PathBuf, added_files: &mut Vec<String>) {
|
|||||||
if let Ok(entries) = utils::read::read_folder(folder.clone()) {
|
if let Ok(entries) = utils::read::read_folder(folder.clone()) {
|
||||||
for entry in entries {
|
for entry in entries {
|
||||||
let path_entry = PathBuf::from(entry);
|
let path_entry = PathBuf::from(entry);
|
||||||
|
if !path::is_nextsync_config(path_entry.clone())
|
||||||
|
{
|
||||||
if path_entry.is_dir() {
|
if path_entry.is_dir() {
|
||||||
folders.push(path_entry.clone());
|
folders.push(path_entry.clone());
|
||||||
}
|
}
|
||||||
added_files.push(String::from(path_entry.to_str().unwrap()));
|
added_files.push(path_buf_to_string(path_entry.strip_prefix(repo_root()).unwrap().to_path_buf()));
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,30 +1,43 @@
|
|||||||
|
use std::io;
|
||||||
|
use std::io::prelude::*;
|
||||||
use std::fs::DirBuilder;
|
use std::fs::DirBuilder;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use clap::Values;
|
|
||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
|
use crate::services::downloader::Downloader;
|
||||||
use crate::utils::api::ApiProps;
|
use crate::utils::api::ApiProps;
|
||||||
|
use crate::utils::path::path_buf_to_string;
|
||||||
|
use crate::utils::remote::{enumerate_remote, EnumerateOptions};
|
||||||
use crate::global::global::{DIR_PATH, set_dir_path};
|
use crate::global::global::{DIR_PATH, set_dir_path};
|
||||||
use crate::services::api::ApiError;
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
use crate::services::req_props::{ReqProps, ObjProps};
|
use crate::services::req_props::{ReqProps, ObjProps};
|
||||||
use crate::services::download_files::DownloadFiles;
|
use crate::store::object::{tree::Tree, blob::Blob};
|
||||||
use crate::store::object::{tree, blob};
|
use crate::commands::config;
|
||||||
use crate::commands::init;
|
use crate::commands::init;
|
||||||
|
|
||||||
pub fn clone(remote: Values<'_>) {
|
pub const DEPTH: &str = "3";
|
||||||
|
|
||||||
|
pub struct CloneArgs {
|
||||||
|
pub remote: String,
|
||||||
|
pub depth: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clone(args: CloneArgs) {
|
||||||
let d = DIR_PATH.lock().unwrap().clone();
|
let d = DIR_PATH.lock().unwrap().clone();
|
||||||
|
|
||||||
let url = remote.clone().next().unwrap();
|
let url = args.remote.clone();
|
||||||
let (host, tmp_user, dist_path_str) = get_url_props(url);
|
let (host, tmp_user, dist_path_str) = get_url_props(&url);
|
||||||
let username = match tmp_user {
|
let username = match tmp_user {
|
||||||
Some(u) => u,
|
Some(u) => u.to_string(),
|
||||||
None => {
|
None => {
|
||||||
eprintln!("No username found");
|
println!("Please enter the username of the webdav instance: ");
|
||||||
todo!();
|
let stdin = io::stdin();
|
||||||
|
stdin.lock().lines().next().unwrap().unwrap()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
let api_props = ApiProps {
|
let api_props = ApiProps {
|
||||||
host: host.clone(),
|
host: host.clone(),
|
||||||
username: username.to_string(),
|
username,
|
||||||
root: dist_path_str.to_string(),
|
root: dist_path_str.to_string(),
|
||||||
};
|
};
|
||||||
|
|
||||||
@@ -34,58 +47,42 @@ pub fn clone(remote: Values<'_>) {
|
|||||||
let iter = Path::new(dist_path_str).iter();
|
let iter = Path::new(dist_path_str).iter();
|
||||||
let dest_dir = iter.last().unwrap();
|
let dest_dir = iter.last().unwrap();
|
||||||
let lp = std::env::current_dir().unwrap().join(dest_dir);
|
let lp = std::env::current_dir().unwrap().join(dest_dir);
|
||||||
set_dir_path(lp.to_str().unwrap().to_string());
|
set_dir_path(path_buf_to_string(lp.clone()));
|
||||||
lp
|
lp
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
|
|
||||||
let mut folders: Vec<ObjProps> = vec![ObjProps::new()];
|
// try to create root folder
|
||||||
let mut files: Vec<ObjProps> = vec![];
|
if DirBuilder::new().recursive(true).create(ref_path.clone()).is_err() {
|
||||||
let mut first_iter = true;
|
eprintln!("fatal: unable to create the destination directory");
|
||||||
while folders.len() > 0 {
|
|
||||||
let folder = folders.pop().unwrap();
|
|
||||||
|
|
||||||
let relative_s = match folder.relative_s {
|
|
||||||
Some(relative_s) => relative_s,
|
|
||||||
None => String::from(""),
|
|
||||||
};
|
|
||||||
|
|
||||||
// request folder content
|
|
||||||
let res = ReqProps::new()
|
|
||||||
.set_request(relative_s.as_str(), &api_props)
|
|
||||||
.gethref()
|
|
||||||
.getlastmodified()
|
|
||||||
.send_req_multiple();
|
|
||||||
|
|
||||||
let mut objs = match res {
|
|
||||||
Ok(o) => o,
|
|
||||||
Err(ApiError::IncorrectRequest(err)) => {
|
|
||||||
eprintln!("fatal: {}", err.status());
|
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
},
|
|
||||||
Err(ApiError::EmptyError(_)) => {
|
|
||||||
eprintln!("Failed to get body");
|
|
||||||
vec![]
|
|
||||||
}
|
|
||||||
Err(ApiError::RequestError(err)) => {
|
|
||||||
eprintln!("fatal: {}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
},
|
|
||||||
Err(ApiError::Unexpected(_)) => todo!()
|
|
||||||
};
|
|
||||||
|
|
||||||
// create folder
|
|
||||||
if first_iter {
|
|
||||||
if DirBuilder::new().create(ref_path.clone()).is_err() {
|
|
||||||
eprintln!("fatal: directory already exist");
|
|
||||||
// destination path 'path' already exists and is not an empty directory.
|
|
||||||
//std::process::exit(1);
|
|
||||||
} else {
|
} else {
|
||||||
init::init();
|
init::init();
|
||||||
|
|
||||||
|
// set remote origin in config file
|
||||||
|
let mut remote_url = api_props.username.clone();
|
||||||
|
remote_url.push_str("@");
|
||||||
|
remote_url.push_str(api_props.host.strip_prefix("https://").unwrap());
|
||||||
|
remote_url.push_str(&api_props.root);
|
||||||
|
|
||||||
|
if config::add_remote("origin", &remote_url).is_err()
|
||||||
|
{
|
||||||
|
eprintln!("err: not able to save remote");
|
||||||
}
|
}
|
||||||
} else {
|
}
|
||||||
|
|
||||||
|
let depth = &args.depth.clone().unwrap_or(DEPTH.to_string());
|
||||||
|
let (folders, files) = enumerate_remote(
|
||||||
|
|a| req(&api_props, depth, a),
|
||||||
|
None,
|
||||||
|
EnumerateOptions {
|
||||||
|
depth: Some(depth.to_owned()),
|
||||||
|
relative_s: None
|
||||||
|
});
|
||||||
|
|
||||||
|
for folder in folders {
|
||||||
// create folder
|
// create folder
|
||||||
let p = ref_path.clone().join(Path::new(&relative_s));
|
let p = ref_path.clone().join(Path::new(&folder.relative_s.unwrap()));
|
||||||
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
|
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
|
||||||
eprintln!("err: cannot create directory {} ({})", p.display(), err);
|
eprintln!("err: cannot create directory {} ({})", p.display(), err);
|
||||||
}
|
}
|
||||||
@@ -93,59 +90,38 @@ pub fn clone(remote: Values<'_>) {
|
|||||||
// add tree
|
// add tree
|
||||||
let path_folder = p.strip_prefix(ref_path.clone()).unwrap();
|
let path_folder = p.strip_prefix(ref_path.clone()).unwrap();
|
||||||
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
|
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
|
||||||
if let Err(err) = tree::add(&path_folder, &lastmodified.to_string()) {
|
if let Err(err) = Tree::from_path(path_folder.to_path_buf()).create(&lastmodified.to_string(), false) {
|
||||||
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
|
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// find folders and files in response
|
Downloader::new()
|
||||||
let mut iter = objs.iter();
|
.set_api_props(api_props.clone())
|
||||||
iter.next(); // jump first element which is the folder cloned
|
.set_files(files)
|
||||||
for object in iter {
|
.should_log()
|
||||||
if object.href.clone().unwrap().chars().last().unwrap() == '/' {
|
.download(ref_path.clone(), Some(&save_blob));
|
||||||
folders.push(object.clone());
|
|
||||||
} else {
|
|
||||||
files.push(object.clone());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
first_iter = false;
|
|
||||||
}
|
|
||||||
|
|
||||||
download_files(ref_path.clone(), files, &api_props);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn download_files(ref_p: PathBuf, files: Vec<ObjProps>, api_props: &ApiProps) {
|
fn save_blob(obj: ObjProps) {
|
||||||
for obj in files {
|
|
||||||
let relative_s = &obj.clone().relative_s.unwrap();
|
let relative_s = &obj.clone().relative_s.unwrap();
|
||||||
let res = DownloadFiles::new()
|
let relative_p = PathBuf::from(&relative_s);
|
||||||
.set_url(&relative_s, api_props)
|
|
||||||
.save(ref_p.clone());
|
|
||||||
|
|
||||||
match res {
|
|
||||||
Ok(()) => {
|
|
||||||
let relative_p = Path::new(&relative_s);
|
|
||||||
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
|
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
|
||||||
if let Err(err) = blob::add(relative_p, &lastmodified.to_string()) {
|
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
|
||||||
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
|
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
|
||||||
}
|
}
|
||||||
},
|
|
||||||
Err(ApiError::Unexpected(_)) => {
|
|
||||||
eprintln!("err: writing {}", relative_s);
|
|
||||||
},
|
|
||||||
Err(ApiError::IncorrectRequest(err)) => {
|
|
||||||
eprintln!("fatal: {}", err.status());
|
|
||||||
std::process::exit(1);
|
|
||||||
},
|
|
||||||
Err(ApiError::EmptyError(_)) => eprintln!("Failed to get body"),
|
|
||||||
Err(ApiError::RequestError(err)) => {
|
|
||||||
eprintln!("fatal: {}", err);
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_url_props(url: &str) -> (String, Option<&str>, &str) {
|
fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> {
|
||||||
|
ReqProps::new()
|
||||||
|
.set_request(relative_s, &api_props)
|
||||||
|
.set_depth(depth)
|
||||||
|
.gethref()
|
||||||
|
.getcontentlength()
|
||||||
|
.getlastmodified()
|
||||||
|
.send_req_multiple()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_url_props(url: &str) -> (String, Option<&str>, &str) {
|
||||||
let mut username = None;
|
let mut username = None;
|
||||||
let mut domain = "";
|
let mut domain = "";
|
||||||
let mut path = "";
|
let mut path = "";
|
||||||
@@ -201,7 +177,7 @@ mod tests {
|
|||||||
fn test_get_url_props() {
|
fn test_get_url_props() {
|
||||||
let p = "/foo/bar";
|
let p = "/foo/bar";
|
||||||
let u = Some("user");
|
let u = Some("user");
|
||||||
let d = String::from("http://nextcloud.com");
|
// let d = String::from("http://nextcloud.com");
|
||||||
let sd = String::from("https://nextcloud.com");
|
let sd = String::from("https://nextcloud.com");
|
||||||
let sld = String::from("https://nextcloud.example.com");
|
let sld = String::from("https://nextcloud.example.com");
|
||||||
let ld = String::from("http://nextcloud.example.com");
|
let ld = String::from("http://nextcloud.example.com");
|
||||||
|
|||||||
@@ -1,51 +1,190 @@
|
|||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write, BufRead, Seek, SeekFrom};
|
||||||
use crate::utils::{path, read};
|
use crate::utils::{path, read};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
pub fn set(var: &str, val: &str) -> io::Result<()> {
|
pub struct ConfigSetArgs {
|
||||||
let mut root = match path::nextsync() {
|
pub name: String,
|
||||||
Some(path) => path,
|
pub value: String,
|
||||||
None => {
|
}
|
||||||
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
|
|
||||||
|
pub fn config_set(args: ConfigSetArgs) {
|
||||||
|
// configure possible options and their associated category
|
||||||
|
let mut option_categories: HashMap<&str, &str> = HashMap::new();
|
||||||
|
option_categories.insert("force_insecure", "core");
|
||||||
|
option_categories.insert("token", "core");
|
||||||
|
|
||||||
|
// get category of option
|
||||||
|
let category = option_categories.get(args.name.as_str());
|
||||||
|
if category.is_none() {
|
||||||
|
eprintln!("fatal: '{}' is not a valid option.", args.name.clone());
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
};
|
|
||||||
root.push("config");
|
|
||||||
|
|
||||||
// todo check if exist
|
let _ = write_option_in_cat(category.unwrap(), &args.name, &args.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub fn find_option_in_cat(category: &str, option: &str) -> Option<String> {
|
||||||
|
let mut config = path::nextsync();
|
||||||
|
config.push("config");
|
||||||
|
|
||||||
|
let mut in_target_category = false;
|
||||||
|
if let Ok(lines) = read::read_lines(config) {
|
||||||
|
|
||||||
|
for line in lines {
|
||||||
|
if let Ok(line) = line {
|
||||||
|
let trimmed_line = line.trim();
|
||||||
|
|
||||||
|
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
|
||||||
|
in_target_category = trimmed_line == format!("[{}]", category);
|
||||||
|
} else if in_target_category {
|
||||||
|
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
|
||||||
|
if parts.len() == 2 && parts[0].trim() == option {
|
||||||
|
return Some(parts[1].trim().to_string());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_option_in_cat(category: &str, option: &str, value: &str) -> io::Result<()> {
|
||||||
|
let mut config = path::nextsync();
|
||||||
|
config.push("config");
|
||||||
|
|
||||||
|
let mut file = OpenOptions::new()
|
||||||
|
.read(true)
|
||||||
|
.write(true)
|
||||||
|
.create(true)
|
||||||
|
.open(&config)?;
|
||||||
|
|
||||||
|
let mut in_target_category = false;
|
||||||
|
let mut option_found = false;
|
||||||
|
|
||||||
|
// Go to the beginning of the file
|
||||||
|
file.seek(SeekFrom::Start(0))?;
|
||||||
|
|
||||||
|
// Create a temporary file to hold the modified content
|
||||||
|
let mut tmp_file = tempfile::Builder::new()
|
||||||
|
.prefix(".nextsyncconfig")
|
||||||
|
.tempfile()?;
|
||||||
|
|
||||||
|
let reader = io::BufReader::new(&file);
|
||||||
|
for line in reader.lines() {
|
||||||
|
let line = line?;
|
||||||
|
let trimmed_line = line.trim();
|
||||||
|
|
||||||
|
if trimmed_line.starts_with('[') && trimmed_line.ends_with(']') {
|
||||||
|
// if we were already in target category we are now leaving it
|
||||||
|
// add option only if not found before
|
||||||
|
if in_target_category && !option_found {
|
||||||
|
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
|
||||||
|
} else if !in_target_category {
|
||||||
|
in_target_category = trimmed_line == format!("[{}]", category);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if in_target_category && !option_found && trimmed_line.starts_with(&format!("{} =", option)) {
|
||||||
|
// Option already exists, update its value
|
||||||
|
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
|
||||||
|
option_found = true;
|
||||||
|
} else {
|
||||||
|
// Write the original line
|
||||||
|
writeln!(&mut tmp_file, "{}", line)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// add to last category
|
||||||
|
if in_target_category && !option_found {
|
||||||
|
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if the category didn't exist create it and add the option
|
||||||
|
if !in_target_category {
|
||||||
|
writeln!(&mut tmp_file, "[{}]", category)?;
|
||||||
|
writeln!(&mut tmp_file, "\t{} = {}", option, value)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Flush and sync the temporary file to ensure data is written to disk
|
||||||
|
tmp_file.flush()?;
|
||||||
|
|
||||||
|
// Go back to the beginning of the file
|
||||||
|
tmp_file.seek(SeekFrom::Start(0))?;
|
||||||
|
file.seek(SeekFrom::Start(0))?;
|
||||||
|
|
||||||
|
// Copy the contents of the temporary file to the original file
|
||||||
|
io::copy(&mut tmp_file, &mut file)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_remote(name: &str, url: &str) -> io::Result<()> {
|
||||||
|
let config = path::config();
|
||||||
|
|
||||||
|
// check if there is already a remote with this name
|
||||||
|
if get_remote(name).is_some()
|
||||||
|
{
|
||||||
|
eprintln!("error: remote {} already exists.", name);
|
||||||
|
std::process::exit(3);
|
||||||
|
}
|
||||||
|
|
||||||
let mut file = OpenOptions::new()
|
let mut file = OpenOptions::new()
|
||||||
.read(true)
|
.read(true)
|
||||||
.write(true)
|
.write(true)
|
||||||
.create(true)
|
.create(true)
|
||||||
.append(true)
|
.append(true)
|
||||||
.open(root)?;
|
.open(config)?;
|
||||||
|
|
||||||
|
writeln!(file, "[remote \"{}\"]", name)?;
|
||||||
|
writeln!(file, "\turl = {}", url)?;
|
||||||
|
|
||||||
let mut line = var.to_owned();
|
|
||||||
line.push_str(" ");
|
|
||||||
line.push_str(val);
|
|
||||||
writeln!(file, "{}", line)?;
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get(var: &str) -> Option<String> {
|
pub fn get_remote(name: &str) -> Option<String> {
|
||||||
let mut root = match path::nextsync() {
|
find_option_in_cat(&format!("remote \"{}\"", name), "url")
|
||||||
Some(path) => path,
|
}
|
||||||
None => {
|
|
||||||
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
|
/// return a vector of remote found in config file (e.g: ("origin", "https://example.com"))
|
||||||
std::process::exit(1);
|
pub fn get_all_remote() -> Vec<(String, String)> {
|
||||||
}
|
let config = path::config();
|
||||||
};
|
|
||||||
root.push("config");
|
let mut remotes: Vec<(String, String)> = vec![];
|
||||||
|
|
||||||
if let Ok(lines) = read::read_lines(root) {
|
let mut in_remote = false;
|
||||||
for line in lines {
|
let mut remote_name = String::new();
|
||||||
if let Ok(l) = line {
|
if let Ok(lines) = read::read_lines(config) {
|
||||||
if l.starts_with(var.clone()) {
|
|
||||||
let (_, val) = l.split_once(" ").unwrap();
|
for line in lines {
|
||||||
return Some(val.to_owned());
|
if let Ok(line) = line {
|
||||||
}
|
let trimmed_line = line.trim();
|
||||||
}
|
|
||||||
}
|
if trimmed_line.starts_with("[remote ") {
|
||||||
}
|
in_remote = true;
|
||||||
None
|
remote_name = trimmed_line.strip_prefix("[remote \"").unwrap().strip_suffix("\"]").unwrap().to_string();
|
||||||
|
}
|
||||||
|
else if trimmed_line.starts_with('[')
|
||||||
|
{
|
||||||
|
in_remote = false;
|
||||||
|
}
|
||||||
|
else if in_remote {
|
||||||
|
let parts: Vec<&str> = trimmed_line.splitn(2, '=').collect();
|
||||||
|
if parts.len() == 2 {
|
||||||
|
remotes.push((remote_name.to_string(), parts[1].trim().to_string()))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
remotes
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_core(name: &str) -> Option<String> {
|
||||||
|
find_option_in_cat("core", name)
|
||||||
}
|
}
|
||||||
|
|||||||
54
src/commands/credential.rs
Normal file
54
src/commands/credential.rs
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
use crate::commands::clone::get_url_props;
|
||||||
|
use crate::services::api::ApiError::RequestError;
|
||||||
|
|
||||||
|
use crate::services::login::Login;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::commands::config;
|
||||||
|
|
||||||
|
pub struct CredentialArgs {
|
||||||
|
pub username: String,
|
||||||
|
pub password: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn credential_add(args: CredentialArgs) {
|
||||||
|
// get remote if exists
|
||||||
|
let remote = match config::get_remote("origin") {
|
||||||
|
None => {
|
||||||
|
eprintln!("fatal: No remote origin, impossible to send request to get token");
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Some(remote) => remote
|
||||||
|
};
|
||||||
|
let (host, _, _) = get_url_props(&remote);
|
||||||
|
|
||||||
|
// get username and password
|
||||||
|
let username = args.username.to_owned();
|
||||||
|
let password = match args.password {
|
||||||
|
Some(mut pwd) => pwd.to_owned(),
|
||||||
|
None => {
|
||||||
|
println!("Please enter the password for {}: ", username);
|
||||||
|
rpassword::read_password().unwrap()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// get token
|
||||||
|
let get_token = Login::new()
|
||||||
|
.set_auth(&username, &password)
|
||||||
|
.set_host(Some(host))
|
||||||
|
.send_login();
|
||||||
|
|
||||||
|
// deal with error
|
||||||
|
if let Err(err) = get_token {
|
||||||
|
if let RequestError(err) = err {
|
||||||
|
eprintln!("fatal: Failed to get token for these credential. ({})", err);
|
||||||
|
}
|
||||||
|
else {
|
||||||
|
eprintln!("fatal: Failed to get token for these credential.");
|
||||||
|
}
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
// save token
|
||||||
|
let _ = config::write_option_in_cat("core", "token", get_token.unwrap().as_str());
|
||||||
|
}
|
||||||
|
|
||||||
@@ -10,36 +10,55 @@ pub fn init() {
|
|||||||
Some(dir) => PathBuf::from(dir),
|
Some(dir) => PathBuf::from(dir),
|
||||||
None => env::current_dir().unwrap(),
|
None => env::current_dir().unwrap(),
|
||||||
};
|
};
|
||||||
let builder = DirBuilder::new();
|
|
||||||
// todo check if dir empty
|
|
||||||
|
|
||||||
// .nextsync folder
|
// todo
|
||||||
|
// check if dir is empty
|
||||||
|
// if let Ok(entries) = read_folder(path.clone()) {
|
||||||
|
// if entries.len() != 0 {
|
||||||
|
// eprintln!("fatal: destination path '{}' already exists and is not an empty directory.", path.display());
|
||||||
|
// std::process::exit(1);
|
||||||
|
// }
|
||||||
|
// } else {
|
||||||
|
// eprintln!("fatal: cannot open the destination directory");
|
||||||
|
// std::process::exit(1);
|
||||||
|
// }
|
||||||
|
|
||||||
|
let builder = DirBuilder::new();
|
||||||
|
|
||||||
path.push(".nextsync");
|
path.push(".nextsync");
|
||||||
match builder.create(path.clone()) {
|
match builder.create(path.clone()) {
|
||||||
Ok(()) => (),
|
Ok(()) => (),
|
||||||
Err(_) => println!("Error: cannot create directory"),
|
Err(_) => println!("Error: cannot create .nextsync"),
|
||||||
};
|
};
|
||||||
|
|
||||||
path.push("objects");
|
path.push("objects");
|
||||||
match builder.create(path.clone()) {
|
match builder.create(path.clone()) {
|
||||||
Ok(()) => (),
|
Ok(()) => (),
|
||||||
Err(_) => println!("Error: cannot create directory"),
|
Err(_) => println!("Error: cannot create objects"),
|
||||||
|
};
|
||||||
|
path.pop();
|
||||||
|
|
||||||
|
path.push("refs");
|
||||||
|
match builder.create(path.clone()) {
|
||||||
|
Ok(()) => (),
|
||||||
|
Err(_) => println!("Error: cannot create refs"),
|
||||||
};
|
};
|
||||||
path.pop();
|
path.pop();
|
||||||
|
|
||||||
path.push("HEAD");
|
path.push("HEAD");
|
||||||
match File::create(path.clone()) {
|
match File::create(path.clone()) {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(_) => println!("Error: cannot create .nextsyncignore"),
|
Err(_) => println!("Error: cannot create HEAD"),
|
||||||
}
|
}
|
||||||
|
|
||||||
path.pop();
|
path.pop();
|
||||||
path.push("index");
|
path.push("index");
|
||||||
match File::create(path.clone()) {
|
match File::create(path.clone()) {
|
||||||
Ok(_) => (),
|
Ok(_) => (),
|
||||||
Err(_) => println!("Error: cannot create .nextsyncignore"),
|
Err(_) => println!("Error: cannot create index"),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// todo
|
||||||
path.pop();
|
path.pop();
|
||||||
path.pop();
|
path.pop();
|
||||||
path.push(".nextsyncignore");
|
path.push(".nextsyncignore");
|
||||||
|
|||||||
54
src/commands/pull.rs
Normal file
54
src/commands/pull.rs
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use std::fs::DirBuilder;
|
||||||
|
|
||||||
|
use crate::services::downloader::Downloader;
|
||||||
|
use crate::services::req_props::ObjProps;
|
||||||
|
use crate::store::object::blob::Blob;
|
||||||
|
use crate::store::object::tree::Tree;
|
||||||
|
use crate::utils::api::get_api_props;
|
||||||
|
use crate::utils::path;
|
||||||
|
use crate::commands::remote_diff::get_diff;
|
||||||
|
|
||||||
|
|
||||||
|
pub fn pull() {
|
||||||
|
let relative_p = path::current()
|
||||||
|
.unwrap()
|
||||||
|
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
|
||||||
|
let (folders, files) = get_diff(relative_p);
|
||||||
|
|
||||||
|
let root = path::repo_root();
|
||||||
|
|
||||||
|
for folder in folders {
|
||||||
|
let p = root.clone().join(PathBuf::from(folder.relative_s.unwrap()));
|
||||||
|
if !p.exists() {
|
||||||
|
// create folder
|
||||||
|
if let Err(err) = DirBuilder::new().recursive(true).create(p.clone()) {
|
||||||
|
eprintln!("err: cannot create directory {} ({})", p.display(), err);
|
||||||
|
}
|
||||||
|
|
||||||
|
// add tree
|
||||||
|
let path_folder = p.strip_prefix(root.clone()).unwrap();
|
||||||
|
let lastmodified = folder.lastmodified.unwrap().timestamp_millis();
|
||||||
|
if let Err(err) = Tree::from_path(path_folder).create(&lastmodified.to_string(), false) {
|
||||||
|
eprintln!("err: saving ref of {} ({})", path_folder.display(), err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Downloader::new()
|
||||||
|
.set_api_props(get_api_props())
|
||||||
|
.set_files(files)
|
||||||
|
.should_log()
|
||||||
|
.download(root, Some(&update_blob));
|
||||||
|
// todo look if need to download or update
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_blob(obj: ObjProps) {
|
||||||
|
let relative_s = &obj.clone().relative_s.unwrap();
|
||||||
|
let relative_p = PathBuf::from(&relative_s);
|
||||||
|
let lastmodified = obj.clone().lastmodified.unwrap().timestamp_millis();
|
||||||
|
// todo update function
|
||||||
|
if let Err(err) = Blob::from_path(relative_p).create(&lastmodified.to_string(), false) {
|
||||||
|
eprintln!("err: saving ref of {} ({})", relative_s.clone(), err);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,65 +1,86 @@
|
|||||||
use std::path::Path;
|
use std::path::PathBuf;
|
||||||
use crate::services::api::ApiError;
|
|
||||||
use crate::services::upload_file::UploadFile;
|
|
||||||
use crate::services::delete_path::DeletePath;
|
|
||||||
use crate::services::req_props::{ReqProps, ObjProps};
|
|
||||||
use crate::store::index;
|
|
||||||
use crate::store::object::blob;
|
|
||||||
use crate::commands::{status, config};
|
use crate::commands::{status, config};
|
||||||
use crate::commands::status::{State, LocalObj};
|
|
||||||
use crate::commands::push::push_factory::{PushFactory, PushState};
|
use crate::commands::push::push_factory::{PushFactory, PushState};
|
||||||
|
use crate::store::index;
|
||||||
|
|
||||||
|
use super::status::LocalObj;
|
||||||
|
|
||||||
pub mod push_factory;
|
pub mod push_factory;
|
||||||
pub mod new;
|
pub mod new;
|
||||||
//pub mod new_dir;
|
pub mod new_dir;
|
||||||
//pub mod deleted;
|
pub mod rm_dir;
|
||||||
|
pub mod deleted;
|
||||||
|
pub mod modified;
|
||||||
|
pub mod moved;
|
||||||
|
pub mod copied;
|
||||||
|
|
||||||
pub fn push() {
|
pub fn push() {
|
||||||
dbg!(status::get_all_staged());
|
let _remote = match config::get_remote("origin") {
|
||||||
|
|
||||||
let remote = match config::get("remote") {
|
|
||||||
Some(r) => r,
|
Some(r) => r,
|
||||||
None => {
|
None => {
|
||||||
eprintln!("fatal: no remote set in configuration");
|
eprintln!("fatal: no remote set in configuration");
|
||||||
|
// todo debug
|
||||||
//std::process::exit(1);
|
//std::process::exit(1);
|
||||||
String::from("")
|
String::new()
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
let staged_objs = status::get_all_staged();
|
let staged_objs = status::get_all_staged();
|
||||||
// todo sort folder first
|
|
||||||
|
// exit if there is nothing to push
|
||||||
|
if staged_objs.len() == 0 {
|
||||||
|
println!("Everything up-to-date");
|
||||||
|
std::process::exit(0);
|
||||||
|
}
|
||||||
|
|
||||||
// path that certify that all its children can be push whithout hesistation
|
// path that certify that all its children can be push whithout hesistation
|
||||||
// (e.g if remote dir has no changes since last sync all children
|
// (e.g. if remote dir has no changes since last sync all children
|
||||||
// can be pushed without verification)
|
// can be pushed without verification)
|
||||||
let whitelist: Option<&Path> = None;
|
let mut whitelist: Option<PathBuf> = None;
|
||||||
|
|
||||||
for obj in staged_objs {
|
for obj in staged_objs {
|
||||||
if obj.otype == String::from("tree") {
|
if obj.otype == String::from("tree") {
|
||||||
//let push_factory = PushFactory.new_dir(obj.clone());
|
let push_factory = PushFactory.new_dir(obj.clone());
|
||||||
//let res = match push_factory.can_push(whitelist.clone()) {
|
let res = push_factory.can_push(&mut whitelist);
|
||||||
// PushState::Valid => push_factory.push(),
|
match res {
|
||||||
// PushState::Done => (),
|
PushState::Valid => {
|
||||||
// PushState::Conflict => (),
|
match push_factory.push() {
|
||||||
// _ => todo!(),
|
Ok(()) => (),
|
||||||
//};
|
Err(err) => {
|
||||||
|
eprintln!("err: pushing {}: {}", obj.name, err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
PushState::Done => (),
|
||||||
|
PushState::Conflict => {
|
||||||
|
println!("CONFLICT: {}", obj.clone().name);
|
||||||
|
},
|
||||||
|
_ => todo!(),
|
||||||
|
};
|
||||||
|
|
||||||
//match res {
|
|
||||||
//
|
|
||||||
//}
|
|
||||||
//dbg!("should push folder");
|
|
||||||
} else {
|
} else {
|
||||||
let push_factory = PushFactory.new(obj.clone());
|
let push_factory = PushFactory.new(obj.clone());
|
||||||
match push_factory.can_push(whitelist.clone()) {
|
match push_factory.can_push(&mut whitelist) {
|
||||||
PushState::Valid => push_factory.push(),
|
PushState::Valid => {
|
||||||
PushState::Done => (),
|
match push_factory.push() {
|
||||||
|
Ok(()) => remove_obj_from_index(obj.clone()),
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!("err: pushing {}: {}", obj.name, err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
PushState::Done => remove_obj_from_index(obj.clone()),
|
||||||
PushState::Conflict => {
|
PushState::Conflict => {
|
||||||
// download file
|
// download file
|
||||||
}
|
}
|
||||||
_ => todo!(),
|
PushState::Error => (),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// read index
|
}
|
||||||
// if dir upload dir
|
|
||||||
|
fn remove_obj_from_index(obj: LocalObj) {
|
||||||
|
if let Err(err) = index::rm_line(obj.path.to_str().unwrap()) {
|
||||||
|
eprintln!("err: removing {} from index: {}", obj.name, err);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
84
src/commands/push/copied.rs
Normal file
84
src/commands/push/copied.rs
Normal file
@@ -0,0 +1,84 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use std::io;
|
||||||
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::r#copy::Copy;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::req_props::ReqProps;
|
||||||
|
use crate::commands::status::LocalObj;
|
||||||
|
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||||
|
use crate::store::object::blob::Blob;
|
||||||
|
use crate::utils::path::path_buf_to_string;
|
||||||
|
|
||||||
|
pub struct Copied {
|
||||||
|
pub obj: LocalObj,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PushChange for Copied {
|
||||||
|
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||||
|
match self.flow(&self.obj, whitelist.clone()) {
|
||||||
|
PushFlowState::Whitelisted => PushState::Done,
|
||||||
|
PushFlowState::NotOnRemote => PushState::Valid,
|
||||||
|
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||||
|
PushFlowState::LocalIsNewer => PushState::Conflict,
|
||||||
|
PushFlowState::Error => PushState::Error,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push(&self) -> io::Result<()> {
|
||||||
|
let obj = &self.obj;
|
||||||
|
let res = Copy::new()
|
||||||
|
.set_url_copy(
|
||||||
|
&path_buf_to_string(obj.path_from.clone().unwrap()),
|
||||||
|
obj.path.to_str().unwrap())
|
||||||
|
.send();
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: error copying file {}: {}", obj.name, err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::RequestError(_)) => {
|
||||||
|
eprintln!("fatal: request error copying file {}", obj.name);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
// get lastmodified props to update it
|
||||||
|
let props = ReqProps::new()
|
||||||
|
.set_url(obj.path.to_str().unwrap())
|
||||||
|
.getlastmodified()
|
||||||
|
.send_req_single();
|
||||||
|
|
||||||
|
let prop = match props {
|
||||||
|
Ok(o) => o,
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: {}", err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::EmptyError(_)) => {
|
||||||
|
eprintln!("Failed to get body");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Err(ApiError::RequestError(err)) => {
|
||||||
|
eprintln!("fatal: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::Unexpected(_)) => todo!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||||
|
|
||||||
|
// create destination blob
|
||||||
|
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
|
||||||
|
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// download file with .distant at the end
|
||||||
|
fn conflict(&self) {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,19 +1,21 @@
|
|||||||
use std::path::Path;
|
use std::path::PathBuf;
|
||||||
|
use std::io;
|
||||||
use crate::services::api::ApiError;
|
use crate::services::api::ApiError;
|
||||||
use crate::services::req_props::ReqProps;
|
use crate::services::api_call::ApiCall;
|
||||||
use crate::services::delete_path::DeletePath;
|
use crate::services::delete_path::DeletePath;
|
||||||
use crate::store::index;
|
use crate::store::index;
|
||||||
use crate::store::object::blob;
|
use crate::store::object::blob::Blob;
|
||||||
use crate::commands::status::LocalObj;
|
use crate::commands::status::LocalObj;
|
||||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||||
|
use crate::store::object::object::ObjMethods;
|
||||||
|
|
||||||
pub struct Deleted {
|
pub struct Deleted {
|
||||||
pub obj: LocalObj
|
pub obj: LocalObj
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PushChange for Deleted {
|
impl PushChange for Deleted {
|
||||||
fn can_push(&self, whitelist: Option<&Path>) -> PushState {
|
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||||
match self.flow(&self.obj, whitelist) {
|
match self.flow(&self.obj, whitelist.clone()) {
|
||||||
PushFlowState::Whitelisted => PushState::Done,
|
PushFlowState::Whitelisted => PushState::Done,
|
||||||
PushFlowState::NotOnRemote => PushState::Done,
|
PushFlowState::NotOnRemote => PushState::Done,
|
||||||
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||||
@@ -22,11 +24,11 @@ impl PushChange for Deleted {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push(&self) {
|
fn push(&self) -> io::Result<()> {
|
||||||
let obj = &self.obj;
|
let obj = &self.obj;
|
||||||
let res = DeletePath::new()
|
let res = DeletePath::new()
|
||||||
.set_url(obj.path.to_str().unwrap())
|
.set_url(obj.path.to_str().unwrap())
|
||||||
.send_with_err();
|
.send();
|
||||||
|
|
||||||
match res {
|
match res {
|
||||||
Err(ApiError::IncorrectRequest(err)) => {
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
@@ -41,9 +43,13 @@ impl PushChange for Deleted {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// update tree
|
// update tree
|
||||||
blob::rm(&obj.path.clone());
|
// todo date
|
||||||
|
Blob::from_path(obj.path.clone()).rm()?;
|
||||||
|
|
||||||
// remove index
|
// remove index
|
||||||
index::rm_line(obj.path.to_str().unwrap());
|
index::rm_line(obj.path.to_str().unwrap())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
fn conflict(&self) {
|
fn conflict(&self) {
|
||||||
|
|||||||
80
src/commands/push/modified.rs
Normal file
80
src/commands/push/modified.rs
Normal file
@@ -0,0 +1,80 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use std::io;
|
||||||
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::req_props::ReqProps;
|
||||||
|
use crate::services::upload_file::UploadFile;
|
||||||
|
use crate::commands::status::LocalObj;
|
||||||
|
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||||
|
use crate::store::object::blob::Blob;
|
||||||
|
|
||||||
|
pub struct Modified {
|
||||||
|
pub obj: LocalObj,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PushChange for Modified {
|
||||||
|
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||||
|
match self.flow(&self.obj, whitelist.clone()) {
|
||||||
|
PushFlowState::Whitelisted => PushState::Done,
|
||||||
|
PushFlowState::NotOnRemote => PushState::Valid,
|
||||||
|
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||||
|
PushFlowState::LocalIsNewer => PushState::Valid,
|
||||||
|
PushFlowState::Error => PushState::Error,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push(&self) -> io::Result<()> {
|
||||||
|
let obj = &self.obj;
|
||||||
|
let res = UploadFile::new()
|
||||||
|
.set_url(obj.path.to_str().unwrap())
|
||||||
|
.set_file(obj.path.clone())
|
||||||
|
.send();
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::RequestError(_)) => {
|
||||||
|
eprintln!("fatal: request error pushing file {}", obj.name);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
// get lastmodified props to update it
|
||||||
|
let props = ReqProps::new()
|
||||||
|
.set_url(obj.path.to_str().unwrap())
|
||||||
|
.getlastmodified()
|
||||||
|
.send_req_single();
|
||||||
|
|
||||||
|
let prop = match props {
|
||||||
|
Ok(o) => o,
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: {}", err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::EmptyError(_)) => {
|
||||||
|
eprintln!("Failed to get body");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Err(ApiError::RequestError(err)) => {
|
||||||
|
eprintln!("fatal: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::Unexpected(_)) => todo!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||||
|
|
||||||
|
// update blob
|
||||||
|
Blob::from_path(obj.path.clone()).update(&lastmodified.to_string())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// download file with .distant at the end
|
||||||
|
fn conflict(&self) {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
88
src/commands/push/moved.rs
Normal file
88
src/commands/push/moved.rs
Normal file
@@ -0,0 +1,88 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use std::io;
|
||||||
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::r#move::Move;
|
||||||
|
use crate::services::req_props::ReqProps;
|
||||||
|
use crate::commands::status::LocalObj;
|
||||||
|
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||||
|
use crate::store::object::blob::Blob;
|
||||||
|
use crate::utils::path::path_buf_to_string;
|
||||||
|
use crate::store::object::object::ObjMethods;
|
||||||
|
|
||||||
|
pub struct Moved {
|
||||||
|
pub obj: LocalObj,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PushChange for Moved {
|
||||||
|
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||||
|
match self.flow(&self.obj, whitelist.clone()) {
|
||||||
|
PushFlowState::Whitelisted => PushState::Done,
|
||||||
|
PushFlowState::NotOnRemote => PushState::Valid,
|
||||||
|
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||||
|
PushFlowState::LocalIsNewer => PushState::Conflict,
|
||||||
|
PushFlowState::Error => PushState::Error,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push(&self) -> io::Result<()> {
|
||||||
|
let obj = &self.obj;
|
||||||
|
let res = Move::new()
|
||||||
|
.set_url_move(
|
||||||
|
&path_buf_to_string(obj.path_from.clone().unwrap()),
|
||||||
|
obj.path.to_str().unwrap())
|
||||||
|
.send();
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: error moving file {}: {}", obj.name, err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::RequestError(_)) => {
|
||||||
|
eprintln!("fatal: request error moving file {}", obj.name);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
// get lastmodified props to update it
|
||||||
|
let props = ReqProps::new()
|
||||||
|
.set_url(obj.path.to_str().unwrap())
|
||||||
|
.getlastmodified()
|
||||||
|
.send_req_single();
|
||||||
|
|
||||||
|
let prop = match props {
|
||||||
|
Ok(o) => o,
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: {}", err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::EmptyError(_)) => {
|
||||||
|
eprintln!("Failed to get body");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Err(ApiError::RequestError(err)) => {
|
||||||
|
eprintln!("fatal: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::Unexpected(_)) => todo!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||||
|
|
||||||
|
// delete source and create destination blob
|
||||||
|
if let Err(err) = Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false) {
|
||||||
|
eprintln!("err: creating ref of {}: {}", obj.name.clone(), err);
|
||||||
|
}
|
||||||
|
if let Err(err) = Blob::from_path(obj.path_from.clone().unwrap()).rm() {
|
||||||
|
eprintln!("err: removing ref of {}: {}", obj.name.clone(), err);
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
// download file with .distant at the end
|
||||||
|
fn conflict(&self) {
|
||||||
|
todo!()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,8 +1,10 @@
|
|||||||
use std::path::Path;
|
use std::path::PathBuf;
|
||||||
|
use std::io;
|
||||||
use crate::services::api::ApiError;
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::req_props::ReqProps;
|
||||||
use crate::services::upload_file::UploadFile;
|
use crate::services::upload_file::UploadFile;
|
||||||
use crate::store::index;
|
use crate::store::object::blob::Blob;
|
||||||
use crate::store::object::blob;
|
|
||||||
use crate::commands::status::LocalObj;
|
use crate::commands::status::LocalObj;
|
||||||
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||||
|
|
||||||
@@ -11,8 +13,8 @@ pub struct New {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl PushChange for New {
|
impl PushChange for New {
|
||||||
fn can_push(&self, whitelist: Option<&Path>) -> PushState {
|
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||||
match self.flow(&self.obj, whitelist) {
|
match self.flow(&self.obj, whitelist.clone()) {
|
||||||
PushFlowState::Whitelisted => PushState::Valid,
|
PushFlowState::Whitelisted => PushState::Valid,
|
||||||
PushFlowState::NotOnRemote => PushState::Valid,
|
PushFlowState::NotOnRemote => PushState::Valid,
|
||||||
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||||
@@ -21,30 +23,55 @@ impl PushChange for New {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn push(&self) {
|
fn push(&self) -> io::Result<()> {
|
||||||
let obj = &self.obj;
|
let obj = &self.obj;
|
||||||
let res = UploadFile::new()
|
let res = UploadFile::new()
|
||||||
.set_url(obj.path.to_str().unwrap())
|
.set_url(obj.path.to_str().unwrap())
|
||||||
.set_file(obj.path.clone())
|
.set_file(obj.path.clone())
|
||||||
.send_with_err();
|
.send();
|
||||||
|
|
||||||
match res {
|
match res {
|
||||||
Err(ApiError::IncorrectRequest(err)) => {
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
eprintln!("fatal: error pushing file {}: {}", obj.name, err.status());
|
dbg!(&err);
|
||||||
|
eprintln!("fatal: error pushing file '{}': {}", obj.name, err.status());
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
},
|
},
|
||||||
Err(ApiError::RequestError(_)) => {
|
Err(ApiError::RequestError(_)) => {
|
||||||
eprintln!("fatal: request error pushing file {}", obj.name);
|
eprintln!("fatal: request error pushing file '{}'", obj.name);
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
|
|
||||||
// update tree
|
// get lastmodified props to update it
|
||||||
blob::add(&obj.path.clone(), "todo_date");
|
let props = ReqProps::new()
|
||||||
|
.set_url(obj.path.to_str().unwrap())
|
||||||
|
.getlastmodified()
|
||||||
|
.send_req_single();
|
||||||
|
|
||||||
// remove index
|
let prop = match props {
|
||||||
index::rm_line(obj.path.to_str().unwrap());
|
Ok(o) => o,
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: {}", err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::EmptyError(_)) => {
|
||||||
|
eprintln!("Failed to get body");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Err(ApiError::RequestError(err)) => {
|
||||||
|
eprintln!("fatal: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::Unexpected(_)) => todo!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||||
|
|
||||||
|
// create new blob
|
||||||
|
Blob::from_path(obj.path.clone()).create(&lastmodified.to_string(), false)?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
// download file with .distant at the end
|
// download file with .distant at the end
|
||||||
|
|||||||
87
src/commands/push/new_dir.rs
Normal file
87
src/commands/push/new_dir.rs
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use std::io;
|
||||||
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::req_props::ReqProps;
|
||||||
|
use crate::services::create_folder::CreateFolder;
|
||||||
|
use crate::store::index;
|
||||||
|
use crate::store::object::tree::Tree;
|
||||||
|
use crate::commands::status::LocalObj;
|
||||||
|
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||||
|
|
||||||
|
pub struct NewDir {
|
||||||
|
pub obj: LocalObj
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PushChange for NewDir {
|
||||||
|
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||||
|
match self.flow(&self.obj, whitelist.clone()) {
|
||||||
|
PushFlowState::Whitelisted => PushState::Valid,
|
||||||
|
PushFlowState::NotOnRemote => {
|
||||||
|
*whitelist = Some(self.obj.path.clone());
|
||||||
|
PushState::Valid
|
||||||
|
},
|
||||||
|
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||||
|
PushFlowState::LocalIsNewer => {
|
||||||
|
*whitelist = Some(self.obj.path.clone());
|
||||||
|
PushState::Done
|
||||||
|
},
|
||||||
|
PushFlowState::Error => PushState::Error,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push(&self) -> io::Result<()> {
|
||||||
|
let obj = &self.obj;
|
||||||
|
let res = CreateFolder::new()
|
||||||
|
.set_url(obj.path.to_str().unwrap())
|
||||||
|
.send();
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: error creating folder {}: {}", obj.name, err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::RequestError(_)) => {
|
||||||
|
eprintln!("fatal: request error creating folder {}", obj.name);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
// get lastmodified props to update it
|
||||||
|
let props = ReqProps::new()
|
||||||
|
.set_url(obj.path.to_str().unwrap())
|
||||||
|
.getlastmodified()
|
||||||
|
.send_req_single();
|
||||||
|
|
||||||
|
let prop = match props {
|
||||||
|
Ok(o) => o,
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: {}", err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::EmptyError(_)) => {
|
||||||
|
eprintln!("Failed to get body");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
Err(ApiError::RequestError(err)) => {
|
||||||
|
eprintln!("fatal: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::Unexpected(_)) => todo!()
|
||||||
|
};
|
||||||
|
|
||||||
|
let lastmodified = prop.lastmodified.unwrap().timestamp_millis();
|
||||||
|
|
||||||
|
// update tree
|
||||||
|
Tree::from_path(obj.path.clone()).create(&lastmodified.to_string(), true)?;
|
||||||
|
|
||||||
|
// remove index
|
||||||
|
index::rm_line(obj.path.to_str().unwrap())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn conflict(&self) {}
|
||||||
|
}
|
||||||
@@ -1,11 +1,17 @@
|
|||||||
use std::path::Path;
|
use std::path::PathBuf;
|
||||||
|
use std::io;
|
||||||
use crate::commands::status::{State, LocalObj};
|
use crate::commands::status::{State, LocalObj};
|
||||||
use crate::services::api::ApiError;
|
use crate::services::api::ApiError;
|
||||||
use crate::store::object;
|
use crate::services::api_call::ApiCall;
|
||||||
use crate::services::req_props::{ObjProps, ReqProps};
|
use crate::services::req_props::ReqProps;
|
||||||
use crate::commands::push::new::New;
|
use crate::commands::push::new::New;
|
||||||
//use crate::commands::push::new_dir::NewDir;
|
use crate::commands::push::new_dir::NewDir;
|
||||||
//use crate::commands::push::deleted::Deleted;
|
use crate::commands::push::rm_dir::RmDir;
|
||||||
|
use crate::commands::push::deleted::Deleted;
|
||||||
|
use crate::commands::push::modified::Modified;
|
||||||
|
use crate::commands::push::moved::Moved;
|
||||||
|
use crate::commands::push::copied::Copied;
|
||||||
|
use crate::store::object::blob::Blob;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum PushState {
|
pub enum PushState {
|
||||||
@@ -24,18 +30,19 @@ pub enum PushFlowState {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub trait PushChange {
|
pub trait PushChange {
|
||||||
fn can_push(&self, whitelist: Option<&Path>) -> PushState;
|
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState;
|
||||||
fn try_push(&self, whitelist: Option<&Path>);
|
fn push(&self) -> io::Result<()>;
|
||||||
fn push(&self);
|
fn conflict(&self);
|
||||||
|
|
||||||
fn is_whitelisted(&self, obj: &LocalObj, path: Option<&Path>) -> bool {
|
fn is_whitelisted(&self, obj: &LocalObj, path: Option<PathBuf>) -> bool {
|
||||||
match path {
|
match path {
|
||||||
Some(p) => obj.path.starts_with(p),
|
Some(p) => obj.path.starts_with(p),
|
||||||
None => false,
|
None => false,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn flow(&self, obj: &LocalObj, whitelist: Option<&Path>) -> PushFlowState {
|
fn flow(&self, obj: &LocalObj, whitelist: Option<PathBuf>) -> PushFlowState {
|
||||||
|
// todo moved: from same file, destination doesn't exist but parent do
|
||||||
if self.is_whitelisted(obj, whitelist) {
|
if self.is_whitelisted(obj, whitelist) {
|
||||||
return PushFlowState::Whitelisted;
|
return PushFlowState::Whitelisted;
|
||||||
}
|
}
|
||||||
@@ -52,6 +59,7 @@ pub trait PushChange {
|
|||||||
if err.status() == 404 {
|
if err.status() == 404 {
|
||||||
Ok(None)
|
Ok(None)
|
||||||
} else {
|
} else {
|
||||||
|
eprintln!("err: when requesting properties of {} ({})", obj.name, err.status());
|
||||||
Err(())
|
Err(())
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
@@ -65,7 +73,9 @@ pub trait PushChange {
|
|||||||
};
|
};
|
||||||
|
|
||||||
// check if remote is newest
|
// check if remote is newest
|
||||||
let last_sync_ts = object::get_timestamp(obj.path.to_str().unwrap().to_string()).unwrap();
|
let last_sync_ts = Blob::from_path(obj.path.clone())
|
||||||
|
.saved_remote_ts()
|
||||||
|
.parse::<i64>().unwrap();
|
||||||
let remote_ts = obj_data.lastmodified.unwrap().timestamp_millis();
|
let remote_ts = obj_data.lastmodified.unwrap().timestamp_millis();
|
||||||
|
|
||||||
if last_sync_ts < remote_ts {
|
if last_sync_ts < remote_ts {
|
||||||
@@ -82,22 +92,21 @@ impl PushFactory {
|
|||||||
pub fn new(&self, obj: LocalObj) -> Box<dyn PushChange> {
|
pub fn new(&self, obj: LocalObj) -> Box<dyn PushChange> {
|
||||||
match obj.state {
|
match obj.state {
|
||||||
State::New => Box::new(New { obj }),
|
State::New => Box::new(New { obj }),
|
||||||
State::Renamed => todo!(),
|
State::Modified => Box::new(Modified { obj }),
|
||||||
State::Modified => todo!(),
|
State::Deleted => Box::new(Deleted { obj }),
|
||||||
State::Deleted => todo!(),
|
State::Moved => Box::new(Moved { obj }),
|
||||||
//State::Deleted => Box::new(Deleted {}),
|
State::Copied => Box::new(Copied { obj }),
|
||||||
State::Default => todo!(),
|
State::Default => todo!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn new_dir(&self, obj: LocalObj) -> Box<dyn PushChange> {
|
pub fn new_dir(&self, obj: LocalObj) -> Box<dyn PushChange> {
|
||||||
match obj.state {
|
match obj.state {
|
||||||
//State::New => Box::new(NewDir {}),
|
State::New => Box::new(NewDir { obj }),
|
||||||
State::New => todo!(),
|
|
||||||
State::Renamed => todo!(),
|
|
||||||
State::Modified => todo!(),
|
State::Modified => todo!(),
|
||||||
State::Deleted => todo!(),
|
State::Deleted => Box::new(RmDir { obj }),
|
||||||
State::Default => todo!(),
|
State::Default => todo!(),
|
||||||
|
_ => todo!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
62
src/commands/push/rm_dir.rs
Normal file
62
src/commands/push/rm_dir.rs
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use std::io;
|
||||||
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::delete_path::DeletePath;
|
||||||
|
use crate::store::index;
|
||||||
|
use crate::store::object::tree::Tree;
|
||||||
|
use crate::commands::status::LocalObj;
|
||||||
|
use crate::commands::push::push_factory::{PushState, PushChange, PushFlowState};
|
||||||
|
use crate::store::object::object::ObjMethods;
|
||||||
|
|
||||||
|
pub struct RmDir {
|
||||||
|
pub obj: LocalObj
|
||||||
|
}
|
||||||
|
|
||||||
|
impl PushChange for RmDir {
|
||||||
|
fn can_push(&self, whitelist: &mut Option<PathBuf>) -> PushState {
|
||||||
|
match self.flow(&self.obj, whitelist.clone()) {
|
||||||
|
PushFlowState::Whitelisted => PushState::Done,
|
||||||
|
PushFlowState::NotOnRemote => {
|
||||||
|
*whitelist = Some(self.obj.path.clone());
|
||||||
|
PushState::Done
|
||||||
|
},
|
||||||
|
PushFlowState::RemoteIsNewer => PushState::Conflict,
|
||||||
|
PushFlowState::LocalIsNewer => {
|
||||||
|
*whitelist = Some(self.obj.path.clone());
|
||||||
|
PushState::Valid
|
||||||
|
},
|
||||||
|
PushFlowState::Error => PushState::Error,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn push(&self) -> io::Result<()> {
|
||||||
|
let obj = &self.obj;
|
||||||
|
let res = DeletePath::new()
|
||||||
|
.set_url(obj.path.to_str().unwrap())
|
||||||
|
.send();
|
||||||
|
|
||||||
|
match res {
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: error deleting dir {}: {}", obj.name, err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::RequestError(_)) => {
|
||||||
|
eprintln!("fatal: request error deleting dir {}", obj.name);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
// update tree
|
||||||
|
// todo update date
|
||||||
|
Tree::from_path(obj.path.clone()).rm()?;
|
||||||
|
|
||||||
|
// remove index
|
||||||
|
index::rm_line(obj.path.to_str().unwrap())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn conflict(&self) {}
|
||||||
|
}
|
||||||
27
src/commands/remote.rs
Normal file
27
src/commands/remote.rs
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
use crate::commands::config;
|
||||||
|
|
||||||
|
use super::config::get_all_remote;
|
||||||
|
|
||||||
|
pub struct RemoteArgs {
|
||||||
|
pub name: String,
|
||||||
|
pub url: String,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remote_add(args: RemoteArgs) {
|
||||||
|
let _ = config::add_remote(&args.name, &args.url);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remote_list(verbose: bool) {
|
||||||
|
let remotes = get_all_remote();
|
||||||
|
for remote in remotes {
|
||||||
|
if verbose
|
||||||
|
{
|
||||||
|
println!("{} {}", remote.0, remote.1);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
println!("{}", remote.0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
62
src/commands/remote_diff.rs
Normal file
62
src/commands/remote_diff.rs
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::req_props::{ReqProps, ObjProps};
|
||||||
|
use crate::store::object::Object;
|
||||||
|
use crate::utils::api::{ApiProps, get_api_props};
|
||||||
|
use crate::utils::path;
|
||||||
|
use crate::utils::remote::{enumerate_remote, EnumerateOptions};
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
// todo deletion
|
||||||
|
pub fn remote_diff() {
|
||||||
|
let relative_p = path::current()
|
||||||
|
.unwrap()
|
||||||
|
.strip_prefix(path::repo_root()).unwrap().to_path_buf();
|
||||||
|
let (folders, files) = get_diff(relative_p);
|
||||||
|
|
||||||
|
for folder in folders {
|
||||||
|
println!("should pull {}", folder.clone().relative_s.unwrap());
|
||||||
|
}
|
||||||
|
for file in files {
|
||||||
|
println!("should pull {}", file.clone().relative_s.unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_diff(path: PathBuf) -> (Vec<ObjProps>, Vec<ObjProps>) {
|
||||||
|
|
||||||
|
let depth = "6"; // todo opti
|
||||||
|
let api_props = get_api_props();
|
||||||
|
|
||||||
|
enumerate_remote(
|
||||||
|
|a| req(&api_props, depth, a),
|
||||||
|
Some(&should_skip),
|
||||||
|
EnumerateOptions {
|
||||||
|
depth: Some(depth.to_owned()),
|
||||||
|
relative_s: Some(path.to_str().unwrap().to_owned())
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
fn should_skip(obj: ObjProps) -> bool {
|
||||||
|
let mut o = Object::new(&obj.clone().relative_s.unwrap());
|
||||||
|
let exist = o.exists();
|
||||||
|
|
||||||
|
// if doesn't exist locally when cannot skip it as we need to pull it
|
||||||
|
if !exist {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if local directory is older there is changes on the remote we cannot
|
||||||
|
// skip this folder
|
||||||
|
!o.read().is_older(obj.lastmodified.unwrap().timestamp())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn req(api_props: &ApiProps, depth: &str, relative_s: &str) -> Result<Vec<ObjProps>, ApiError> {
|
||||||
|
ReqProps::new()
|
||||||
|
.set_request(relative_s, &api_props)
|
||||||
|
.set_depth(depth)
|
||||||
|
.gethref()
|
||||||
|
.getcontentlength() // todo opti
|
||||||
|
.getlastmodified()
|
||||||
|
.send_req_multiple()
|
||||||
|
}
|
||||||
|
|
||||||
@@ -2,14 +2,7 @@ use std::fs::File;
|
|||||||
use crate::utils;
|
use crate::utils;
|
||||||
|
|
||||||
pub fn reset() {
|
pub fn reset() {
|
||||||
let mut root = match utils::path::nextsync_root() {
|
let mut root = utils::path::nextsync();
|
||||||
Some(path) => path,
|
|
||||||
None => {
|
|
||||||
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
root.push(".nextsync");
|
|
||||||
root.push("index");
|
root.push("index");
|
||||||
if File::create(root).is_err() {
|
if File::create(root).is_err() {
|
||||||
eprintln!("fatal: failed to reset");
|
eprintln!("fatal: failed to reset");
|
||||||
|
|||||||
@@ -1,13 +1,19 @@
|
|||||||
use std::fs::File;
|
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::io::{self, Lines, BufReader};
|
use std::collections::HashMap;
|
||||||
use std::collections::{HashSet, HashMap};
|
|
||||||
use crypto::digest::Digest;
|
use crypto::digest::Digest;
|
||||||
use crypto::sha1::Sha1;
|
use crypto::sha1::Sha1;
|
||||||
use colored::Colorize;
|
use colored::Colorize;
|
||||||
use crate::utils;
|
use crate::utils::path::{self, path_buf_to_string};
|
||||||
use crate::store::object::tree;
|
use crate::store::object::blob::Blob;
|
||||||
|
use crate::store::object::object::Obj;
|
||||||
|
use crate::store::object::tree::Tree;
|
||||||
|
use crate::utils::read::read_folder;
|
||||||
use crate::store::index;
|
use crate::store::index;
|
||||||
|
use crate::store::object::object::ObjMethods;
|
||||||
|
|
||||||
|
pub struct StatusArgs {
|
||||||
|
pub nostyle: bool,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(PartialEq)]
|
#[derive(PartialEq)]
|
||||||
enum RemoveSide {
|
enum RemoveSide {
|
||||||
@@ -20,22 +26,128 @@ enum RemoveSide {
|
|||||||
pub enum State {
|
pub enum State {
|
||||||
Default,
|
Default,
|
||||||
New,
|
New,
|
||||||
Renamed,
|
Moved,
|
||||||
|
Copied,
|
||||||
Modified,
|
Modified,
|
||||||
Deleted,
|
Deleted,
|
||||||
}
|
}
|
||||||
|
|
||||||
// todo: relative path, filename, get modified
|
// todo: relative path, filename
|
||||||
pub fn status() {
|
// todo: not catch added empty folder
|
||||||
let (mut new_objs, mut del_objs) = get_diff();
|
pub fn status(args: StatusArgs) {
|
||||||
dbg!(get_diff());
|
let mut all_hashes = get_all_objs_hashes();
|
||||||
let mut renamed_objs = get_renamed(&mut new_objs, &mut del_objs);
|
let staged_objs = get_staged(&mut all_hashes);
|
||||||
// get copy, modified
|
|
||||||
let mut objs = new_objs;
|
let objs: Vec<LocalObj> = all_hashes.iter().map(|x| {
|
||||||
objs.append(&mut del_objs);
|
x.1.clone()
|
||||||
objs.append(&mut renamed_objs);
|
}).collect();
|
||||||
let staged_objs = get_staged(&mut objs);
|
|
||||||
|
if args.nostyle
|
||||||
|
{
|
||||||
|
print_status_nostyle(staged_objs, objs);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
print_status(staged_objs, objs);
|
print_status(staged_objs, objs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_all_objs() -> Vec<LocalObj> {
|
||||||
|
let all_hashes = get_all_objs_hashes();
|
||||||
|
all_hashes.iter().map(|x| {
|
||||||
|
x.1.clone()
|
||||||
|
}).collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_all_objs_hashes() -> HashMap<String, LocalObj> {
|
||||||
|
let (mut new_objs_hashes, mut del_objs_hashes, objs_modified) = get_diff();
|
||||||
|
let move_copy_hashes = get_move_copy_objs(&mut new_objs_hashes, &mut del_objs_hashes);
|
||||||
|
|
||||||
|
let mut hasher = Sha1::new();
|
||||||
|
let mut modified_objs_hashes = HashMap::new();
|
||||||
|
for obj in objs_modified {
|
||||||
|
hasher.input_str(&obj);
|
||||||
|
let hash = hasher.result_str();
|
||||||
|
hasher.reset();
|
||||||
|
|
||||||
|
modified_objs_hashes.insert(hash, LocalObj {
|
||||||
|
// todo otype
|
||||||
|
otype: get_otype(PathBuf::from(obj.clone())),
|
||||||
|
name: obj.clone().to_string(),
|
||||||
|
path: PathBuf::from(obj),
|
||||||
|
path_from: None,
|
||||||
|
state: State::Modified
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut all_hashes = HashMap::new();
|
||||||
|
all_hashes.extend(move_copy_hashes);
|
||||||
|
all_hashes.extend(del_objs_hashes);
|
||||||
|
all_hashes.extend(new_objs_hashes);
|
||||||
|
all_hashes.extend(modified_objs_hashes);
|
||||||
|
|
||||||
|
all_hashes
|
||||||
|
}
|
||||||
|
|
||||||
|
fn should_retain(hasher: &mut Sha1, key: String, obj: LocalObj, move_copy_hashes: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> bool {
|
||||||
|
// todo prevent copied or moved if file empty
|
||||||
|
// todo deal with directories
|
||||||
|
if obj.path.is_dir()
|
||||||
|
{
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
let mut blob = Blob::from_path(obj.path.clone());
|
||||||
|
let mut flag = true;
|
||||||
|
let identical_blobs = blob.get_all_identical_blobs();
|
||||||
|
|
||||||
|
// try to find an identical blob among the deleted files (=moved)
|
||||||
|
for obj_s in identical_blobs.clone() {
|
||||||
|
if !flag { break; }
|
||||||
|
|
||||||
|
hasher.input_str(&obj_s);
|
||||||
|
let hash = hasher.result_str();
|
||||||
|
hasher.reset();
|
||||||
|
|
||||||
|
if del_objs_h.contains_key(&hash) {
|
||||||
|
let mut new_move = obj.clone();
|
||||||
|
|
||||||
|
let deleted = del_objs_h.get(&hash).unwrap().clone();
|
||||||
|
del_objs_h.remove(&hash);
|
||||||
|
|
||||||
|
new_move.path_from = Some(deleted.path);
|
||||||
|
new_move.state = State::Moved;
|
||||||
|
move_copy_hashes.insert(key.clone(), new_move.clone());
|
||||||
|
flag = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// if did not find anything before try to find a file with the same content (=copy)
|
||||||
|
if flag {
|
||||||
|
if let Some(rel_s) = identical_blobs.first() {
|
||||||
|
let root = path::repo_root();
|
||||||
|
let rel_p = PathBuf::from(rel_s.clone());
|
||||||
|
let abs_p = root.join(rel_p.clone());
|
||||||
|
|
||||||
|
if abs_p.exists() {
|
||||||
|
let mut new_copy = obj.clone();
|
||||||
|
new_copy.path_from = Some(rel_p);
|
||||||
|
new_copy.state = State::Copied;
|
||||||
|
move_copy_hashes.insert(key, new_copy.clone());
|
||||||
|
flag = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
flag
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_move_copy_objs(new_objs_h: &mut HashMap<String, LocalObj>, del_objs_h: &mut HashMap<String, LocalObj>) -> HashMap<String, LocalObj> {
|
||||||
|
let mut hasher = Sha1::new();
|
||||||
|
let mut move_copy_hashes = HashMap::new();
|
||||||
|
|
||||||
|
new_objs_h.retain(|key, obj| {
|
||||||
|
should_retain(&mut hasher, key.to_owned(), obj.clone(), &mut move_copy_hashes, del_objs_h)
|
||||||
|
});
|
||||||
|
move_copy_hashes
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
@@ -43,77 +155,99 @@ pub struct LocalObj {
|
|||||||
pub otype: String,
|
pub otype: String,
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub path: PathBuf,
|
pub path: PathBuf,
|
||||||
|
pub path_from: Option<PathBuf>, // origin path when state is move or copy
|
||||||
pub state: State,
|
pub state: State,
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_all_staged() -> Vec<LocalObj> {
|
pub fn get_all_staged() -> Vec<LocalObj> {
|
||||||
// todo opti getting staged and then finding differences ?
|
let mut staged_objs = vec![];
|
||||||
// todo opti return folder
|
|
||||||
let (mut new_objs, mut del_objs) = get_diff();
|
if let Ok(entries) = index::read_line() {
|
||||||
let mut renamed_objs = get_renamed(&mut new_objs, &mut del_objs);
|
for line in entries {
|
||||||
// get copy, modified
|
|
||||||
let mut objs = new_objs;
|
let obj = Obj::from_path(line.unwrap()).get_local_obj();
|
||||||
objs.append(&mut del_objs);
|
if obj.state != State::Default {
|
||||||
objs.append(&mut renamed_objs);
|
staged_objs.push(obj);
|
||||||
let staged_objs = get_staged(&mut objs);
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
staged_objs
|
staged_objs
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_renamed(_new_obj: &mut Vec<LocalObj>, _del_obj: &mut Vec<LocalObj>) -> Vec<LocalObj> {
|
fn get_staged(hashes: &mut HashMap<String, LocalObj>) -> Vec<LocalObj> {
|
||||||
// get hash of all new obj, compare to hash of all del
|
let mut lines: Vec<String> = vec![];
|
||||||
let renamed_objs = vec![];
|
|
||||||
renamed_objs
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_staged(objs: &mut Vec<LocalObj>) -> Vec<LocalObj> {
|
|
||||||
let mut indexes = HashSet::new();
|
|
||||||
let mut staged_objs: Vec<LocalObj> = vec![];
|
|
||||||
|
|
||||||
if let Ok(entries) = index::read_line() {
|
if let Ok(entries) = index::read_line() {
|
||||||
for entry in entries {
|
for entry in entries {
|
||||||
indexes.insert(entry.unwrap());
|
lines.push(entry.unwrap());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
objs.retain(|obj| {
|
let mut hasher = Sha1::new();
|
||||||
if indexes.contains(obj.clone().path.to_str().unwrap()) {
|
let mut staged_objs: Vec<LocalObj> = vec![];
|
||||||
staged_objs.push(obj.clone());
|
|
||||||
false
|
let ref_p = path::repo_root();
|
||||||
|
for obj in lines {
|
||||||
|
// hash the object
|
||||||
|
hasher.input_str(&obj);
|
||||||
|
let hash = hasher.result_str();
|
||||||
|
hasher.reset();
|
||||||
|
|
||||||
|
// find it on the list of hashes
|
||||||
|
if hashes.contains_key(&hash) {
|
||||||
|
staged_objs.push(hashes.get(&hash).unwrap().clone());
|
||||||
|
hashes.remove(&hash);
|
||||||
|
}else {
|
||||||
|
let mut t_path = ref_p.clone();
|
||||||
|
let relative_p = PathBuf::from(obj.clone());
|
||||||
|
t_path.push(relative_p.clone());
|
||||||
|
staged_objs.push(LocalObj {
|
||||||
|
otype: get_otype(t_path.clone()),
|
||||||
|
name: obj.to_string(),
|
||||||
|
path: relative_p.clone(),
|
||||||
|
path_from: None,
|
||||||
|
state: {
|
||||||
|
if t_path.exists() {
|
||||||
|
State::New
|
||||||
} else {
|
} else {
|
||||||
true
|
State::Deleted
|
||||||
}
|
}
|
||||||
|
},
|
||||||
});
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
staged_objs
|
staged_objs
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_diff() -> (Vec<LocalObj>, Vec<LocalObj>) {
|
fn read_tree_to_hashmap(tree: &mut Tree, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
|
||||||
|
while let Some(child) = tree.next() {
|
||||||
|
hashes.insert(String::from(child.get_hash_path()), child.get_local_obj());
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_diff() -> (HashMap<String, LocalObj>, HashMap<String, LocalObj>, Vec<String>) {
|
||||||
let mut hashes = HashMap::new();
|
let mut hashes = HashMap::new();
|
||||||
let mut objs: Vec<String> = vec![];
|
let mut objs: Vec<String> = vec![];
|
||||||
|
let mut objs_modified: Vec<String> = vec![];
|
||||||
|
|
||||||
let root = match utils::path::nextsync_root() {
|
let root = path::repo_root();
|
||||||
Some(path) => path,
|
|
||||||
None => {
|
|
||||||
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
dbg!(utils::path::current());
|
let current_p = path::current().unwrap();
|
||||||
let nextsync_path = utils::path::nextsync().unwrap();
|
// todo use repo_root instead of current
|
||||||
let current_p = utils::path::current().unwrap();
|
|
||||||
let dist_path = current_p.strip_prefix(root.clone()).unwrap().to_path_buf();
|
let dist_path = current_p.strip_prefix(root.clone()).unwrap().to_path_buf();
|
||||||
|
|
||||||
if let Ok(lines) = read_head(nextsync_path.clone()) {
|
read_tree_to_hashmap(&mut Tree::from_head(), &mut hashes, dist_path.clone());
|
||||||
add_to_hashmap(lines, &mut hashes, dist_path.clone());
|
//if let Ok(lines) = read_lines(head::path()) {
|
||||||
}
|
// add_to_hashmap(lines, &mut hashes, dist_path.clone());
|
||||||
|
//}
|
||||||
|
|
||||||
if let Ok(entries) = utils::read::read_folder(root.clone()) {
|
if let Ok(entries) = read_folder(root.clone()) {
|
||||||
add_to_vec(entries, &mut objs, root.clone());
|
add_to_vec(entries, &mut objs, root.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
let mut obj_to_analyse = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
|
let mut obj_to_analyse = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
|
||||||
dbg!(obj_to_analyse.clone());
|
|
||||||
|
|
||||||
while obj_to_analyse.len() > 0 {
|
while obj_to_analyse.len() > 0 {
|
||||||
let cur_obj = obj_to_analyse.pop().unwrap();
|
let cur_obj = obj_to_analyse.pop().unwrap();
|
||||||
@@ -122,45 +256,57 @@ fn get_diff() -> (Vec<LocalObj>, Vec<LocalObj>) {
|
|||||||
let obj_path = root.clone().join(cur_path.clone());
|
let obj_path = root.clone().join(cur_path.clone());
|
||||||
|
|
||||||
if obj_path.is_dir() {
|
if obj_path.is_dir() {
|
||||||
if let Some((_, lines)) = tree::read(cur_obj.clone()) {
|
// read virtual tree
|
||||||
add_to_hashmap(lines, &mut hashes, cur_path.clone());
|
read_tree_to_hashmap(&mut Tree::from_path(cur_obj.clone()), &mut hashes, dist_path.clone());
|
||||||
}
|
//let mut tree = Tree::from_path(cur_obj.clone());
|
||||||
|
//if let Some(lines) = tree.get_children() {
|
||||||
|
//add_to_hashmap(lines, &mut hashes, cur_path.clone());
|
||||||
|
//}
|
||||||
|
|
||||||
if let Ok(entries) = utils::read::read_folder(obj_path.clone()) {
|
// read physical tree
|
||||||
|
if let Ok(entries) = read_folder(obj_path.clone()) {
|
||||||
add_to_vec(entries, &mut objs, root.clone());
|
add_to_vec(entries, &mut objs, root.clone());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// remove duplicate
|
||||||
let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
|
let diff = remove_duplicate(&mut hashes, &mut objs, RemoveSide::Both);
|
||||||
obj_to_analyse.append(&mut diff.clone());
|
obj_to_analyse.append(&mut diff.clone());
|
||||||
} else {
|
} else {
|
||||||
// todo look for change
|
if Blob::from_path(cur_path).has_changes() {
|
||||||
|
objs_modified.push(cur_obj);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let del_objs: Vec<LocalObj> = hashes.iter().map(|x| {
|
for (_, elt) in &mut hashes {
|
||||||
LocalObj {
|
elt.state = State::Deleted;
|
||||||
otype: x.1.otype.clone(),
|
|
||||||
name: x.1.name.clone(),
|
|
||||||
path: x.1.path.clone(),
|
|
||||||
state: State::Deleted
|
|
||||||
}
|
}
|
||||||
}).collect();
|
|
||||||
|
|
||||||
let new_objs: Vec<LocalObj> = objs.iter().map(|x| {
|
let mut new_objs_hashes = HashMap::new();
|
||||||
let p = PathBuf::from(x.to_string());
|
let mut hasher = Sha1::new();
|
||||||
|
for obj in objs {
|
||||||
|
// hash the object
|
||||||
|
hasher.input_str(&obj);
|
||||||
|
let hash = hasher.result_str();
|
||||||
|
hasher.reset();
|
||||||
|
|
||||||
|
let p = PathBuf::from(obj.to_string());
|
||||||
|
let abs_p = path::repo_root().join(p.clone());
|
||||||
// todo name
|
// todo name
|
||||||
LocalObj {
|
new_objs_hashes.insert(String::from(hash), LocalObj {
|
||||||
otype: get_type(p.clone()),
|
otype: get_otype(abs_p),
|
||||||
name: x.to_string(),
|
name: obj.to_string(),
|
||||||
path: p,
|
path: p,
|
||||||
|
path_from: None,
|
||||||
state: State::New
|
state: State::New
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}).collect();
|
|
||||||
(new_objs, del_objs)
|
(new_objs_hashes, hashes, objs_modified)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_type(p: PathBuf) -> String {
|
fn get_otype(p: PathBuf) -> String {
|
||||||
if p.is_dir() {
|
if p.is_dir() {
|
||||||
String::from("tree")
|
String::from("tree")
|
||||||
} else {
|
} else {
|
||||||
@@ -168,27 +314,28 @@ fn get_type(p: PathBuf) -> String {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
|
//fn add_to_hashmap(lines: Lines<BufReader<File>>, hashes: &mut HashMap<String, LocalObj>, path: PathBuf) {
|
||||||
for line in lines {
|
// for line in lines {
|
||||||
if let Ok(ip) = line {
|
// if let Ok(ip) = line {
|
||||||
if ip.clone().len() > 5 {
|
// if ip.clone().len() > 5 {
|
||||||
let (ftype, hash, name) = tree::parse_line(ip);
|
// let (ftype, hash, name) = tree::parse_line(ip);
|
||||||
let mut p = path.clone();
|
// let mut p = path.clone();
|
||||||
p.push(name.clone());
|
// p.push(name.clone());
|
||||||
hashes.insert(String::from(hash), LocalObj{
|
// hashes.insert(String::from(hash), LocalObj{
|
||||||
otype: String::from(ftype),
|
// otype: String::from(ftype),
|
||||||
name: String::from(name),
|
// name: String::from(name),
|
||||||
path: p,
|
// path: p,
|
||||||
state: State::Default,
|
// path_from: None,
|
||||||
});
|
// state: State::Default,
|
||||||
}
|
// });
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
}
|
// }
|
||||||
|
//}
|
||||||
|
|
||||||
fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) {
|
fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) {
|
||||||
for entry in entries {
|
for entry in entries {
|
||||||
if !is_nextsync_config(entry.clone()) {
|
if !path::is_nextsync_config(entry.clone()) {
|
||||||
let object_path = entry.strip_prefix(root.clone()).unwrap();
|
let object_path = entry.strip_prefix(root.clone()).unwrap();
|
||||||
objects.push(String::from(object_path.to_str().unwrap()));
|
objects.push(String::from(object_path.to_str().unwrap()));
|
||||||
}
|
}
|
||||||
@@ -197,8 +344,6 @@ fn add_to_vec(entries: Vec<PathBuf>, objects: &mut Vec<String>, root: PathBuf) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
|
fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
|
||||||
dbg!(staged_objs.clone());
|
|
||||||
dbg!(objs.clone());
|
|
||||||
if staged_objs.len() == 0 && objs.len() == 0 {
|
if staged_objs.len() == 0 && objs.len() == 0 {
|
||||||
println!("Nothing to push, working tree clean");
|
println!("Nothing to push, working tree clean");
|
||||||
return;
|
return;
|
||||||
@@ -216,7 +361,7 @@ fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
|
|||||||
// not staged files
|
// not staged files
|
||||||
if objs.len() != 0 {
|
if objs.len() != 0 {
|
||||||
println!("Changes not staged for push:");
|
println!("Changes not staged for push:");
|
||||||
println!(" (Use\"nextsync add <file>...\" to update what will be pushed)");
|
println!(" (Use \"nextsync add <file>...\" to update what will be pushed)");
|
||||||
|
|
||||||
for object in objs {
|
for object in objs {
|
||||||
print_object(object);
|
print_object(object);
|
||||||
@@ -224,27 +369,52 @@ fn print_status(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn print_status_nostyle(staged_objs: Vec<LocalObj>, objs: Vec<LocalObj>) {
|
||||||
|
// todo sort
|
||||||
|
if staged_objs.len() == 0 && objs.len() == 0 {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
for obj in staged_objs {
|
||||||
|
if obj.state == State::Deleted {
|
||||||
|
println!("deleted: {}", obj.name);
|
||||||
|
} else if obj.state == State::New {
|
||||||
|
println!("new: {}", obj.name);
|
||||||
|
} else if obj.state == State::Modified {
|
||||||
|
println!("modified: {}", obj.name);
|
||||||
|
} else if obj.state == State::Moved {
|
||||||
|
println!("moved: {} => {}", path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path));
|
||||||
|
} else if obj.state == State::Copied {
|
||||||
|
println!("copied: {} => {}", path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
fn print_object(obj: LocalObj) {
|
fn print_object(obj: LocalObj) {
|
||||||
if obj.state == State::Deleted {
|
if obj.state == State::Deleted {
|
||||||
println!(" {} {}", String::from("deleted:").red(), obj.name.red());
|
println!(" {} {}", String::from("deleted:").red(), obj.name.red());
|
||||||
} else if obj.state == State::Renamed {
|
|
||||||
println!(" {} {}", String::from("renamed:").red(), obj.name.red());
|
|
||||||
} else if obj.state == State::New {
|
} else if obj.state == State::New {
|
||||||
println!(" {} {}", String::from("new:").red(), obj.name.red());
|
println!(" {} {}", String::from("new:").red(), obj.name.red());
|
||||||
} else if obj.state == State::Modified {
|
} else if obj.state == State::Modified {
|
||||||
println!(" {} {}", String::from("modified:").red(), obj.name.red());
|
println!(" {} {}", String::from("modified:").red(), obj.name.red());
|
||||||
|
} else if obj.state == State::Moved {
|
||||||
|
println!(" {} {} => {}", String::from("moved:").red(), path_buf_to_string(obj.path_from.unwrap()).red(), path_buf_to_string(obj.path).red());
|
||||||
|
} else if obj.state == State::Copied {
|
||||||
|
println!(" {} {} => {}", String::from("copied:").red(), path_buf_to_string(obj.path_from.unwrap()), path_buf_to_string(obj.path).red());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
fn print_staged_object(obj: LocalObj) {
|
fn print_staged_object(obj: LocalObj) {
|
||||||
if obj.state == State::Deleted {
|
if obj.state == State::Deleted {
|
||||||
println!(" {} {}", String::from("deleted:").green(), obj.name.green());
|
println!(" {} {}", String::from("deleted:").green(), obj.name.green());
|
||||||
} else if obj.state == State::Renamed {
|
|
||||||
println!(" {} {}", String::from("renamed:").green(), obj.name.green());
|
|
||||||
} else if obj.state == State::New {
|
} else if obj.state == State::New {
|
||||||
println!(" {} {}", String::from("new:").green(), obj.name.green());
|
println!(" {} {}", String::from("new:").green(), obj.name.green());
|
||||||
} else if obj.state == State::Modified {
|
} else if obj.state == State::Modified {
|
||||||
println!(" {} {}", String::from("modified:").green(), obj.name.green());
|
println!(" {} {}", String::from("modified:").green(), obj.name.green());
|
||||||
|
} else if obj.state == State::Moved {
|
||||||
|
println!(" {} {} => {}", String::from("moved:").green(), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
|
||||||
|
} else if obj.state == State::Copied {
|
||||||
|
println!(" {} {} => {}", String::from("copied:"), path_buf_to_string(obj.path_from.unwrap()).green(), path_buf_to_string(obj.path).green());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -277,15 +447,6 @@ fn remove_duplicate(hashes: &mut HashMap<String, LocalObj>, objects: &mut Vec<St
|
|||||||
duplicate
|
duplicate
|
||||||
}
|
}
|
||||||
|
|
||||||
fn is_nextsync_config(path: PathBuf) -> bool {
|
|
||||||
path.ends_with(".nextsync") || path.ends_with(".nextsyncignore")
|
|
||||||
}
|
|
||||||
|
|
||||||
fn read_head(mut path: PathBuf) -> io::Result<io::Lines<io::BufReader<File>>> {
|
|
||||||
path.push("HEAD");
|
|
||||||
utils::read::read_lines(path)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
@@ -310,6 +471,7 @@ mod tests {
|
|||||||
otype: String::from("tree"),
|
otype: String::from("tree"),
|
||||||
name: String::from("test"),
|
name: String::from("test"),
|
||||||
path: PathBuf::from(""),
|
path: PathBuf::from(""),
|
||||||
|
path_from: None,
|
||||||
state: State::Default,
|
state: State::Default,
|
||||||
};
|
};
|
||||||
hashes.insert(hash1.clone(), default_obj.clone());
|
hashes.insert(hash1.clone(), default_obj.clone());
|
||||||
@@ -321,8 +483,7 @@ mod tests {
|
|||||||
objects.push(String::from("file2"));
|
objects.push(String::from("file2"));
|
||||||
objects.push(String::from("file3"));
|
objects.push(String::from("file3"));
|
||||||
remove_duplicate(&mut hashes, &mut objects, RemoveSide::Both);
|
remove_duplicate(&mut hashes, &mut objects, RemoveSide::Both);
|
||||||
dbg!(hashes.clone());
|
|
||||||
dbg!(objects.clone());
|
|
||||||
assert_eq!(hashes.contains_key(&hash4), true);
|
assert_eq!(hashes.contains_key(&hash4), true);
|
||||||
assert_eq!(hashes.len(), 1);
|
assert_eq!(hashes.len(), 1);
|
||||||
assert_eq!(objects, vec!["file3"]);
|
assert_eq!(objects, vec!["file3"]);
|
||||||
|
|||||||
145
src/main.rs
145
src/main.rs
@@ -1,5 +1,6 @@
|
|||||||
use clap::{App, Arg, SubCommand};
|
use clap::Command;
|
||||||
use crate::commands::add::AddArgs;
|
|
||||||
|
mod subcommands;
|
||||||
|
|
||||||
mod commands;
|
mod commands;
|
||||||
mod utils;
|
mod utils;
|
||||||
@@ -8,115 +9,41 @@ mod global;
|
|||||||
mod store;
|
mod store;
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
let matches = App::new("NextSync")
|
let app = Command::new("Nextsync")
|
||||||
.version("1.0")
|
.version("1.0")
|
||||||
.author("grimhilt")
|
.author("grimhilt")
|
||||||
.about("")
|
.about("A git-line command line tool to interact with nextcloud")
|
||||||
.subcommand(
|
.subcommands([
|
||||||
SubCommand::with_name("init")
|
subcommands::clone::create(),
|
||||||
.arg(
|
subcommands::init::create(),
|
||||||
Arg::with_name("directory")
|
subcommands::status::create(),
|
||||||
.required(false)
|
subcommands::add::create(),
|
||||||
.takes_value(true)
|
subcommands::push::create(),
|
||||||
.value_name("DIRECTORY")
|
subcommands::reset::create(),
|
||||||
)
|
subcommands::remote::create(),
|
||||||
)
|
subcommands::config::create(),
|
||||||
.subcommand(
|
subcommands::remote_diff::create(),
|
||||||
SubCommand::with_name("status")
|
subcommands::pull::create(),
|
||||||
.arg(
|
subcommands::credential::create(),
|
||||||
Arg::with_name("directory")
|
]);
|
||||||
.required(false)
|
// .setting(clap::AppSettings::SubcommandRequiredElseHelp);
|
||||||
.takes_value(true)
|
|
||||||
.value_name("DIRECTORY")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.subcommand(SubCommand::with_name("reset"))
|
|
||||||
.subcommand(SubCommand::with_name("push"))
|
|
||||||
.subcommand(
|
|
||||||
SubCommand::with_name("clone")
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("remote")
|
|
||||||
.required(true)
|
|
||||||
.takes_value(true)
|
|
||||||
.value_name("REMOTE")
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("directory")
|
|
||||||
.required(false)
|
|
||||||
.takes_value(true)
|
|
||||||
.value_name("DIRECTORY")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.subcommand(
|
|
||||||
SubCommand::with_name("add")
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("files")
|
|
||||||
.required(true)
|
|
||||||
.multiple(true)
|
|
||||||
.takes_value(true)
|
|
||||||
.value_name("FILE")
|
|
||||||
.help("Files to add"),
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("force")
|
|
||||||
.short("f")
|
|
||||||
.long("force")
|
|
||||||
.help("Allow adding otherwise ignored files."),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.subcommand(
|
|
||||||
SubCommand::with_name("config")
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("variable")
|
|
||||||
.required(true)
|
|
||||||
.takes_value(true)
|
|
||||||
.value_name("VARIABLE")
|
|
||||||
)
|
|
||||||
.arg(
|
|
||||||
Arg::with_name("value")
|
|
||||||
.required(true)
|
|
||||||
.takes_value(true)
|
|
||||||
.value_name("VALUE")
|
|
||||||
)
|
|
||||||
)
|
|
||||||
.get_matches();
|
|
||||||
|
|
||||||
if let Some(matches) = matches.subcommand_matches("init") {
|
let matches = app.get_matches();
|
||||||
if let Some(val) = matches.values_of("directory") {
|
|
||||||
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
|
match matches.subcommand() {
|
||||||
}
|
Some(("init", args)) => subcommands::init::handler(args),
|
||||||
commands::init::init();
|
Some(("status", args)) => subcommands::status::handler(args),
|
||||||
} else if let Some(matches) = matches.subcommand_matches("status") {
|
Some(("add", args)) => subcommands::add::handler(args),
|
||||||
if let Some(val) = matches.values_of("directory") {
|
Some(("reset", _)) => commands::reset::reset(),
|
||||||
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
|
Some(("clone", args)) => subcommands::clone::handler(args),
|
||||||
}
|
Some(("push", _)) => commands::push::push(),
|
||||||
commands::status::status();
|
Some(("config", args)) => subcommands::config::handler(args),
|
||||||
} else if let Some(matches) = matches.subcommand_matches("add") {
|
Some(("remote-diff", args)) => subcommands::remote_diff::handler(args),
|
||||||
if let Some(files) = matches.values_of("files") {
|
Some(("pull", args)) => subcommands::pull::handler(args),
|
||||||
commands::add::add(AddArgs {
|
Some(("remote", args)) => subcommands::remote::handler(args),
|
||||||
files: files,
|
Some(("credential", args)) => subcommands::credential::handler(args),
|
||||||
force: matches.is_present("force"),
|
Some((_, _)) => {},
|
||||||
});
|
None => {},
|
||||||
}
|
};
|
||||||
} else if let Some(_) = matches.subcommand_matches("reset") {
|
|
||||||
commands::reset::reset();
|
|
||||||
} else if let Some(matches) = matches.subcommand_matches("clone") {
|
|
||||||
if let Some(val) = matches.values_of("directory") {
|
|
||||||
global::global::set_dir_path(String::from(val.clone().next().unwrap()));
|
|
||||||
}
|
|
||||||
if let Some(remote) = matches.values_of("remote") {
|
|
||||||
commands::clone::clone(remote);
|
|
||||||
}
|
|
||||||
} else if let Some(_matches) = matches.subcommand_matches("push") {
|
|
||||||
commands::push::push();
|
|
||||||
} else if let Some(matches) = matches.subcommand_matches("config") {
|
|
||||||
if let Some(mut var) = matches.values_of("variable") {
|
|
||||||
if let Some(mut val) = matches.values_of("value") {
|
|
||||||
if commands::config::set(var.next().unwrap(), val.next().unwrap()).is_err() {
|
|
||||||
eprintln!("fatal: cannot save the value");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,14 @@
|
|||||||
pub mod api;
|
pub mod api;
|
||||||
|
pub mod create_folder;
|
||||||
pub mod download_files;
|
pub mod download_files;
|
||||||
pub mod req_props;
|
pub mod req_props;
|
||||||
pub mod upload_file;
|
pub mod upload_file;
|
||||||
pub mod delete_path;
|
pub mod delete_path;
|
||||||
|
pub mod downloader;
|
||||||
|
pub mod r#move;
|
||||||
|
pub mod r#copy;
|
||||||
|
pub mod login;
|
||||||
|
pub mod request_manager;
|
||||||
|
pub mod api_call;
|
||||||
|
//pub mod auth;
|
||||||
|
//pub mod bulk_upload;
|
||||||
|
|||||||
@@ -1,9 +1,19 @@
|
|||||||
use std::env;
|
use std::error::Error;
|
||||||
use dotenv::dotenv;
|
use lazy_static::lazy_static;
|
||||||
|
use std::sync::Mutex;
|
||||||
use reqwest::Client;
|
use reqwest::Client;
|
||||||
use reqwest::RequestBuilder;
|
use reqwest::RequestBuilder;
|
||||||
use reqwest::{Response, Error, IntoUrl, Method};
|
use reqwest::multipart::Form;
|
||||||
|
use reqwest::{Response, Method};
|
||||||
|
use reqwest::header::{HeaderValue, CONTENT_TYPE, HeaderMap, IntoHeaderName};
|
||||||
use crate::utils::api::ApiProps;
|
use crate::utils::api::ApiProps;
|
||||||
|
use crate::commands::config;
|
||||||
|
use crate::commands::clone::get_url_props;
|
||||||
|
use crate::services::request_manager::get_request_manager;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref HTTP_TOKEN: Mutex<String> = Mutex::new(String::new());
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ApiError {
|
pub enum ApiError {
|
||||||
@@ -16,6 +26,9 @@ pub enum ApiError {
|
|||||||
pub struct ApiBuilder {
|
pub struct ApiBuilder {
|
||||||
client: Client,
|
client: Client,
|
||||||
request: Option<RequestBuilder>,
|
request: Option<RequestBuilder>,
|
||||||
|
headers: Option<HeaderMap>,
|
||||||
|
auth_set: bool,
|
||||||
|
host: Option<String>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ApiBuilder {
|
impl ApiBuilder {
|
||||||
@@ -23,61 +36,88 @@ impl ApiBuilder {
|
|||||||
ApiBuilder {
|
ApiBuilder {
|
||||||
client: Client::new(),
|
client: Client::new(),
|
||||||
request: None,
|
request: None,
|
||||||
|
headers: None,
|
||||||
|
auth_set: false,
|
||||||
|
host: None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_request<U: IntoUrl>(&mut self, method: Method, url: U) -> &mut ApiBuilder {
|
pub fn set_url(&mut self, method: Method, url: &str) -> &mut ApiBuilder {
|
||||||
self.request = Some(self.client.request(method, url));
|
let mut new_url = url.to_owned();
|
||||||
|
if let Some(active) = config::get_core("force_insecure") {
|
||||||
|
if active == "true" {
|
||||||
|
new_url = url.replace("https", "http");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.request = Some(self.client.request(method, new_url));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn build_request(&mut self, method: Method, path: &str) -> &mut ApiBuilder {
|
pub fn build_request(&mut self, method: Method, path: &str) -> &mut ApiBuilder {
|
||||||
dotenv().ok();
|
let remote = match config::get_remote("origin") {
|
||||||
// todo remove env
|
Some(r) => r,
|
||||||
let host = env::var("HOST").unwrap();
|
None => {
|
||||||
let username = env::var("USERNAME").unwrap();
|
eprintln!("fatal: unable to find a remote");
|
||||||
let root = env::var("ROOT").unwrap();
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let (host, username, root) = get_url_props(&remote);
|
||||||
|
self.host = Some(host.clone());
|
||||||
let mut url = String::from(host);
|
let mut url = String::from(host);
|
||||||
url.push_str("/remote.php/dav/files/");
|
url.push_str("/remote.php/dav/files/");
|
||||||
url.push_str(&username);
|
url.push_str(username.unwrap());
|
||||||
url.push_str("/");
|
|
||||||
url.push_str(&root);
|
url.push_str(&root);
|
||||||
url.push_str("/");
|
url.push_str("/");
|
||||||
|
if path != "/" {
|
||||||
url.push_str(path);
|
url.push_str(path);
|
||||||
dbg!(url.clone());
|
}
|
||||||
self.request = Some(self.client.request(method, url));
|
|
||||||
self
|
self.set_url(method, &url)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_req(&mut self, meth: Method, p: &str, api_props: &ApiProps) -> &mut ApiBuilder {
|
pub fn set_req(&mut self, meth: Method, p: &str, api_props: &ApiProps) -> &mut ApiBuilder {
|
||||||
|
self.host = Some(api_props.clone().host.clone());
|
||||||
let mut url = String::from(&api_props.host);
|
let mut url = String::from(&api_props.host);
|
||||||
url.push_str("/remote.php/dav/files/");
|
url.push_str("/remote.php/dav/files/");
|
||||||
url.push_str("/");
|
url.push_str("/");
|
||||||
url.push_str(&api_props.username);
|
url.push_str(&api_props.username);
|
||||||
url.push_str(&api_props.root);
|
url.push_str(&api_props.root);
|
||||||
url.push_str("/");
|
url.push_str("/");
|
||||||
|
if p != "/" {
|
||||||
url.push_str(p);
|
url.push_str(p);
|
||||||
self.request = Some(self.client.request(meth, url));
|
|
||||||
self
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn set_auth(&mut self) -> &mut ApiBuilder {
|
self.set_url(meth, &url)
|
||||||
// todo if not exist
|
}
|
||||||
dotenv().ok();
|
|
||||||
let password = env::var("PASSWORD").unwrap();
|
pub fn set_basic_auth(&mut self, login: String, pwd: String) -> &mut ApiBuilder {
|
||||||
let username = env::var("USERNAME").unwrap();
|
|
||||||
match self.request.take() {
|
match self.request.take() {
|
||||||
None => {
|
None => {
|
||||||
eprintln!("fatal: incorrect request");
|
eprintln!("fatal: incorrect request");
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
},
|
},
|
||||||
Some(req) => {
|
Some(req) => {
|
||||||
self.request = Some(req.basic_auth(username, Some(password)));
|
self.request = Some(req.basic_auth(login, Some(pwd)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.auth_set = true;
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_token(&mut self, token: String) {
|
||||||
|
match self.request.take() {
|
||||||
|
None => {
|
||||||
|
eprintln!("fatal: incorrect request");
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Some(req) => {
|
||||||
|
self.request = Some(req.bearer_auth(token));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.auth_set = true;
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_xml(&mut self, xml_payload: String) -> &mut ApiBuilder {
|
pub fn set_xml(&mut self, xml_payload: String) -> &mut ApiBuilder {
|
||||||
match self.request.take() {
|
match self.request.take() {
|
||||||
None => {
|
None => {
|
||||||
@@ -86,11 +126,32 @@ impl ApiBuilder {
|
|||||||
},
|
},
|
||||||
Some(req) => {
|
Some(req) => {
|
||||||
self.request = Some(req.body(xml_payload));
|
self.request = Some(req.body(xml_payload));
|
||||||
|
self.set_header(CONTENT_TYPE, HeaderValue::from_static("application/xml"));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_multipart(&mut self, form: Form) -> &mut ApiBuilder {
|
||||||
|
match self.request.take() {
|
||||||
|
None => {
|
||||||
|
eprintln!("fatal: incorrect request");
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Some(req) => {
|
||||||
|
self.request = Some(req.multipart(form));
|
||||||
|
self.set_header(CONTENT_TYPE, HeaderValue::from_static("multipart/related"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_header<K: IntoHeaderName>(&mut self, key: K, val: HeaderValue) -> &mut ApiBuilder {
|
||||||
|
let map = self.headers.get_or_insert(HeaderMap::new());
|
||||||
|
map.insert(key, val);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_body(&mut self, body: Vec<u8>) -> &mut ApiBuilder {
|
pub fn set_body(&mut self, body: Vec<u8>) -> &mut ApiBuilder {
|
||||||
match self.request.take() {
|
match self.request.take() {
|
||||||
None => {
|
None => {
|
||||||
@@ -102,17 +163,95 @@ impl ApiBuilder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
self
|
self
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
fn set_request_manager(&mut self) {
|
||||||
self.set_auth();
|
let mut request_manager = get_request_manager().lock().unwrap();
|
||||||
|
let request_manager = request_manager.as_mut().unwrap();
|
||||||
|
|
||||||
|
if !self.host.is_none()
|
||||||
|
{
|
||||||
|
request_manager.set_host(self.host.clone().unwrap().replace("https://", ""));
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.auth_set {
|
||||||
|
self.set_token(request_manager.get_token());
|
||||||
|
//self.set_auth();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send(&mut self, need_text: bool) -> Result<Option<String>, ApiError> {
|
||||||
|
if !self.host.is_none() || !self.auth_set {
|
||||||
|
self.set_request_manager();
|
||||||
|
}
|
||||||
|
|
||||||
|
let res_req = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||||
match self.request.take() {
|
match self.request.take() {
|
||||||
None => {
|
None => {
|
||||||
eprintln!("fatal: incorrect request");
|
eprintln!("fatal: incorrect request");
|
||||||
std::process::exit(1);
|
std::process::exit(1);
|
||||||
},
|
},
|
||||||
Some(req) => req.send().await.map_err(Error::from),
|
Some(req) => {
|
||||||
|
if let Some(headers) = &self.headers {
|
||||||
|
req.headers(headers.clone())
|
||||||
|
.send().await
|
||||||
|
} else {
|
||||||
|
req.send().await
|
||||||
|
}
|
||||||
|
},
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// handle request error
|
||||||
|
let res = match res_req {
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!("fatal: {}", err.source().unwrap());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Ok(res) => res,
|
||||||
|
};
|
||||||
|
|
||||||
|
if res.status().is_success() {
|
||||||
|
if need_text {
|
||||||
|
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||||
|
res.text().await
|
||||||
|
}).map_err(|err| ApiError::EmptyError(err))?;
|
||||||
|
Ok(Some(body))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
Err(ApiError::IncorrectRequest(res))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub async fn old_send(&mut self) -> Result<Response, reqwest::Error> {
|
||||||
|
let mut request_manager = get_request_manager().lock().unwrap();
|
||||||
|
let request_manager = request_manager.as_mut().unwrap();
|
||||||
|
if !self.host.is_none()
|
||||||
|
{
|
||||||
|
request_manager.set_host(self.host.clone().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
if !self.auth_set {
|
||||||
|
//self.set_auth();
|
||||||
|
self.set_token(request_manager.get_token());
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
match self.request.take() {
|
||||||
|
None => {
|
||||||
|
eprintln!("fatal: incorrect request");
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Some(req) => {
|
||||||
|
if let Some(headers) = &self.headers {
|
||||||
|
req.headers(headers.clone())
|
||||||
|
.send().await.map_err(reqwest::Error::from)
|
||||||
|
} else {
|
||||||
|
req.send().await.map_err(reqwest::Error::from)
|
||||||
|
}
|
||||||
|
},
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
13
src/services/api_call.rs
Normal file
13
src/services/api_call.rs
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
use crate::services::api::ApiError;
|
||||||
|
|
||||||
|
pub trait ApiCall {
|
||||||
|
fn new() -> Self where Self: Sized {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
fn set_url(&mut self, _url: &str) -> &mut Self {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||||
|
unimplemented!()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
53
src/services/copy.rs
Normal file
53
src/services/copy.rs
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
use reqwest::{Method, header::HeaderValue};
|
||||||
|
use crate::services::api::{ApiBuilder, ApiError};
|
||||||
|
use crate::commands::clone::get_url_props;
|
||||||
|
use crate::commands::config;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
|
||||||
|
pub struct Copy {
|
||||||
|
api_builder: ApiBuilder,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiCall for Copy {
|
||||||
|
fn new() -> Self {
|
||||||
|
Copy {
|
||||||
|
api_builder: ApiBuilder::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||||
|
self.api_builder.send(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Copy {
|
||||||
|
pub fn set_url_copy(&mut self, url: &str, destination: &str) -> &mut Copy {
|
||||||
|
self.api_builder.build_request(Method::from_bytes(b"COPY").unwrap(), url);
|
||||||
|
|
||||||
|
let remote = match config::get_remote("origin") {
|
||||||
|
Some(r) => r,
|
||||||
|
None => {
|
||||||
|
eprintln!("fatal: unable to find a remote");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let (host, username, root) = get_url_props(&remote);
|
||||||
|
let mut url = String::from(host);
|
||||||
|
url.push_str("/remote.php/dav/files/");
|
||||||
|
url.push_str(username.unwrap());
|
||||||
|
url.push_str(&root);
|
||||||
|
url.push_str("/");
|
||||||
|
if destination != "/" {
|
||||||
|
url.push_str(destination);
|
||||||
|
}
|
||||||
|
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Copy {
|
||||||
|
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
|
||||||
|
if overwrite { "T" } else { "F" }
|
||||||
|
}).unwrap());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,28 +1,24 @@
|
|||||||
use reqwest::{Method, IntoUrl, Response, Error};
|
use reqwest::Method;
|
||||||
use crate::services::api::{ApiBuilder, ApiError};
|
use crate::services::api::{ApiBuilder, ApiError};
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
|
||||||
pub struct CreateFolder {
|
pub struct CreateFolder {
|
||||||
api_builder: ApiBuilder,
|
api_builder: ApiBuilder,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl CreateFolder {
|
impl ApiCall for CreateFolder {
|
||||||
pub fn new<U: IntoUrl>(url: U) -> Self {
|
fn new() -> Self {
|
||||||
ListFolders {
|
CreateFolder {
|
||||||
api_builder: ApiBuilder::new()
|
api_builder: ApiBuilder::new(),
|
||||||
.set_request(Method::from_bytes(b"MKCOL").unwrap(), url),
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
fn set_url(&mut self, url: &str) -> &mut CreateFolder {
|
||||||
self.api_builder.send().await
|
self.api_builder.build_request(Method::from_bytes(b"MKCOL").unwrap(), url);
|
||||||
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send_with_err(mut self) -> Result<(), ApiError> {
|
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||||
let res = self.send().await.map_err(ApiError::RequestError)?;
|
self.api_builder.send(false)
|
||||||
if res.status().is_success() {
|
|
||||||
Ok()
|
|
||||||
} else {
|
|
||||||
Err(ApiError::IncorrectRequest(res))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,38 +1,24 @@
|
|||||||
use reqwest::{Method, Response, Error};
|
use reqwest::Method;
|
||||||
use crate::services::api::{ApiBuilder, ApiError};
|
use crate::services::api::{ApiBuilder, ApiError};
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
|
||||||
pub struct DeletePath {
|
pub struct DeletePath {
|
||||||
api_builder: ApiBuilder,
|
api_builder: ApiBuilder,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DeletePath {
|
impl ApiCall for DeletePath {
|
||||||
pub fn new() -> Self {
|
fn new() -> Self {
|
||||||
DeletePath {
|
DeletePath {
|
||||||
api_builder: ApiBuilder::new(),
|
api_builder: ApiBuilder::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_url(&mut self, url: &str) -> &mut DeletePath {
|
fn set_url(&mut self, url: &str) -> &mut DeletePath {
|
||||||
self.api_builder.build_request(Method::DELETE, url);
|
self.api_builder.build_request(Method::DELETE, url);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||||
self.api_builder.send().await
|
self.api_builder.send(true)
|
||||||
}
|
|
||||||
|
|
||||||
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
|
|
||||||
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
|
||||||
self.send().await
|
|
||||||
}).map_err(ApiError::RequestError)?;
|
|
||||||
|
|
||||||
if res.status().is_success() {
|
|
||||||
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
|
||||||
res.text().await
|
|
||||||
}).map_err(ApiError::EmptyError)?;
|
|
||||||
Ok(body)
|
|
||||||
} else {
|
|
||||||
Err(ApiError::IncorrectRequest(res))
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,51 +1,74 @@
|
|||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
|
use futures_util::StreamExt;
|
||||||
|
use std::fs::File;
|
||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use reqwest::{Method, Response, Error};
|
use reqwest::{Method, Response, Error};
|
||||||
use crate::utils::api::ApiProps;
|
use crate::utils::api::ApiProps;
|
||||||
use crate::services::api::{ApiBuilder, ApiError};
|
use crate::services::api::{ApiBuilder, ApiError};
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
|
||||||
pub struct DownloadFiles {
|
pub struct DownloadFiles {
|
||||||
api_builder: ApiBuilder,
|
api_builder: ApiBuilder,
|
||||||
relative_ps: String,
|
relative_ps: String,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl DownloadFiles {
|
impl ApiCall for DownloadFiles {
|
||||||
pub fn new() -> Self {
|
fn new() -> Self {
|
||||||
DownloadFiles {
|
DownloadFiles {
|
||||||
api_builder: ApiBuilder::new(),
|
api_builder: ApiBuilder::new(),
|
||||||
relative_ps: String::from(""),
|
relative_ps: String::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn set_url(&mut self, relative_ps: &str, api_props: &ApiProps) -> &mut DownloadFiles {
|
impl DownloadFiles {
|
||||||
|
// todo make it beautiful
|
||||||
|
pub fn set_url_download(&mut self, relative_ps: &str, api_props: &ApiProps) -> &mut DownloadFiles {
|
||||||
self.relative_ps = relative_ps.to_string();
|
self.relative_ps = relative_ps.to_string();
|
||||||
self.api_builder.set_req(Method::from_bytes(b"PROPFIND").unwrap(), relative_ps, api_props);
|
self.api_builder.set_req(Method::GET, relative_ps, api_props);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
pub async fn send_download(&mut self) -> Result<Response, Error> {
|
||||||
self.api_builder.send().await
|
self.api_builder.old_send().await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn _send_with_err(mut self) -> Result<Vec<u8>, ApiError> {
|
pub fn save_stream(&mut self, ref_p: PathBuf, callback: Option<impl Fn(u64)>) -> Result<(), ApiError> {
|
||||||
let res = self.send().await.map_err(ApiError::RequestError)?;
|
let abs_p = ref_p.join(PathBuf::from(self.relative_ps.clone()));
|
||||||
|
let mut file = File::create(abs_p).unwrap();
|
||||||
|
|
||||||
|
tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||||
|
let res = self.send_download().await.map_err(ApiError::RequestError)?;
|
||||||
if res.status().is_success() {
|
if res.status().is_success() {
|
||||||
let body = res.bytes().await.map_err(ApiError::EmptyError)?;
|
let mut stream = res.bytes_stream();
|
||||||
Ok(body.to_vec())
|
|
||||||
|
while let Some(chunk) = stream.next().await {
|
||||||
|
let unwrap_chunk = chunk.unwrap();
|
||||||
|
// save chunk inside file
|
||||||
|
if let Err(err) = file.write_all(&unwrap_chunk) {
|
||||||
|
return Err(ApiError::Unexpected(err.to_string()));
|
||||||
|
} else if let Some(fct) = &callback {
|
||||||
|
// call callback with size of this chunk
|
||||||
|
fct(unwrap_chunk.len().try_into().unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(ApiError::IncorrectRequest(res))
|
Err(ApiError::IncorrectRequest(res))
|
||||||
}
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn save(&mut self, ref_p: PathBuf) -> Result<(), ApiError> {
|
pub fn save(&mut self, ref_p: PathBuf) -> Result<(), ApiError> {
|
||||||
tokio::runtime::Runtime::new().unwrap().block_on(async {
|
tokio::runtime::Runtime::new().unwrap().block_on(async {
|
||||||
let p = ref_p.join(PathBuf::from(self.relative_ps.clone()));
|
let p = ref_p.join(PathBuf::from(self.relative_ps.clone()));
|
||||||
let res = self.send().await.map_err(ApiError::RequestError)?;
|
let res = self.send_download().await.map_err(ApiError::RequestError)?;
|
||||||
if res.status().is_success() {
|
if res.status().is_success() {
|
||||||
let body = res.bytes().await.map_err(ApiError::EmptyError)?;
|
let body = res.bytes().await.map_err(ApiError::EmptyError)?;
|
||||||
match DownloadFiles::write_file(p, &body.to_vec()) {
|
match Self::write_file(p, &body.to_vec()) {
|
||||||
Err(_) => Err(ApiError::Unexpected(String::from(""))),
|
Err(_) => Err(ApiError::Unexpected(String::new())),
|
||||||
Ok(_) => Ok(()),
|
Ok(_) => Ok(()),
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
|||||||
168
src/services/downloader.rs
Normal file
168
src/services/downloader.rs
Normal file
@@ -0,0 +1,168 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use indicatif::{ProgressBar, MultiProgress, ProgressStyle, HumanBytes};
|
||||||
|
|
||||||
|
use crate::utils::api::ApiProps;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::api::ApiError;
|
||||||
|
use crate::services::download_files::DownloadFiles;
|
||||||
|
use crate::services::req_props::ObjProps;
|
||||||
|
|
||||||
|
const SIZE_TO_STREAM: u64 = 2 * 1024 * 1024;
|
||||||
|
|
||||||
|
pub struct Downloader {
|
||||||
|
files: Vec<ObjProps>,
|
||||||
|
should_log: bool,
|
||||||
|
api_props: Option<ApiProps>,
|
||||||
|
progress_bars: Vec<ProgressBar>,
|
||||||
|
multi_progress: Option<MultiProgress>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Downloader {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
Downloader {
|
||||||
|
files: vec![],
|
||||||
|
should_log: false,
|
||||||
|
api_props: None,
|
||||||
|
progress_bars: vec![],
|
||||||
|
multi_progress: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn should_log(&mut self) -> &mut Downloader {
|
||||||
|
self.should_log = true;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_api_props(&mut self, api_props: ApiProps) -> &mut Downloader {
|
||||||
|
self.api_props = Some(api_props);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_files(&mut self, files: Vec<ObjProps>) -> &mut Downloader {
|
||||||
|
self.files = files;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _add_file(&mut self, file: ObjProps) -> &mut Downloader {
|
||||||
|
self.files.push(file);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
fn init_log(&mut self, nb_objs: u64, total_size: u64) {
|
||||||
|
self.multi_progress = Some(MultiProgress::new());
|
||||||
|
|
||||||
|
self.progress_bars.push(
|
||||||
|
self.multi_progress
|
||||||
|
.clone()
|
||||||
|
.unwrap()
|
||||||
|
.add(ProgressBar::new(nb_objs).with_message("Objects")));
|
||||||
|
|
||||||
|
let msg = format!("0B/{}", HumanBytes(total_size).to_string());
|
||||||
|
self.progress_bars.push(
|
||||||
|
self.multi_progress
|
||||||
|
.clone()
|
||||||
|
.unwrap()
|
||||||
|
.add(ProgressBar::new(total_size).with_message(msg)));
|
||||||
|
|
||||||
|
self.progress_bars[0].set_style(
|
||||||
|
ProgressStyle::with_template("{_:>10} [{bar:40}] {pos}/{len} {msg}")
|
||||||
|
.unwrap()
|
||||||
|
.progress_chars("=> "));
|
||||||
|
|
||||||
|
self.progress_bars[1].set_style(
|
||||||
|
ProgressStyle::with_template("[{elapsed_precise}] [{bar:40}] {msg}")
|
||||||
|
.unwrap()
|
||||||
|
.progress_chars("=> "));
|
||||||
|
|
||||||
|
self.progress_bars[0].tick();
|
||||||
|
self.progress_bars[1].tick();
|
||||||
|
}
|
||||||
|
|
||||||
|
fn update_bytes_bar(&self, size: u64) {
|
||||||
|
let bytes_bar = &self.progress_bars[1];
|
||||||
|
bytes_bar.inc(size);
|
||||||
|
let msg = format!(
|
||||||
|
"{}/{}",
|
||||||
|
HumanBytes(bytes_bar.position()).to_string(),
|
||||||
|
HumanBytes(bytes_bar.length().unwrap()).to_string());
|
||||||
|
bytes_bar.set_message(msg);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn download(&mut self, ref_p: PathBuf, callback: Option<&dyn Fn(ObjProps)>) {
|
||||||
|
if self.should_log {
|
||||||
|
let mut total_size = 0;
|
||||||
|
let nb_objs = self.files.len();
|
||||||
|
|
||||||
|
// set the full size of the download
|
||||||
|
self.files
|
||||||
|
.iter()
|
||||||
|
.for_each(|f|
|
||||||
|
if let Some(size) = f.contentlength {
|
||||||
|
total_size += size
|
||||||
|
}
|
||||||
|
);
|
||||||
|
|
||||||
|
self.init_log(nb_objs.try_into().unwrap(), total_size);
|
||||||
|
}
|
||||||
|
|
||||||
|
for file in self.files.clone() {
|
||||||
|
let relative_s = &file.clone().relative_s.unwrap();
|
||||||
|
let mut download = DownloadFiles::new();
|
||||||
|
download.set_url_download(&relative_s, &self.api_props.clone().unwrap());
|
||||||
|
|
||||||
|
let should_use_stream = {
|
||||||
|
if let Some(size) = file.contentlength {
|
||||||
|
size > SIZE_TO_STREAM
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// download
|
||||||
|
let res = {
|
||||||
|
if should_use_stream {
|
||||||
|
download.save_stream(ref_p.clone(), if self.should_log { Some(|a| self.update_bytes_bar(a)) } else { None })
|
||||||
|
} else {
|
||||||
|
download.save(ref_p.clone())
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// deal with error
|
||||||
|
match res {
|
||||||
|
Ok(()) => {
|
||||||
|
if let Some(fct) = callback {
|
||||||
|
fct(file.clone());
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(ApiError::Unexpected(_)) => {
|
||||||
|
eprintln!("err: writing {}", relative_s);
|
||||||
|
},
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: {}", err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::EmptyError(_)) => eprintln!("Failed to get body"),
|
||||||
|
Err(ApiError::RequestError(err)) => {
|
||||||
|
eprintln!("fatal: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// increment loading bars
|
||||||
|
if self.should_log {
|
||||||
|
self.progress_bars[0].inc(1); // increment object
|
||||||
|
|
||||||
|
// increment bytes only if
|
||||||
|
// not incremented continuously by stream
|
||||||
|
if !should_use_stream {
|
||||||
|
self.update_bytes_bar(file.contentlength.unwrap());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// finish all bars
|
||||||
|
for bar in &self.progress_bars {
|
||||||
|
bar.finish();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
112
src/services/login.rs
Normal file
112
src/services/login.rs
Normal file
@@ -0,0 +1,112 @@
|
|||||||
|
use std::io;
|
||||||
|
use std::io::Cursor;
|
||||||
|
use std::io::prelude::*;
|
||||||
|
use xml::reader::{EventReader, XmlEvent};
|
||||||
|
use reqwest::{header::HeaderValue, Method};
|
||||||
|
use rpassword;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
use crate::services::api::{ApiBuilder, ApiError};
|
||||||
|
|
||||||
|
pub struct Login {
|
||||||
|
api_builder: ApiBuilder,
|
||||||
|
login: String,
|
||||||
|
password: String,
|
||||||
|
host: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiCall for Login {
|
||||||
|
fn new() -> Self {
|
||||||
|
Login {
|
||||||
|
api_builder: ApiBuilder::new(),
|
||||||
|
login: String::new(),
|
||||||
|
password: String::new(),
|
||||||
|
host: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||||
|
|
||||||
|
let url = match self.host.clone() {
|
||||||
|
Some(h) => {
|
||||||
|
let mut u = if &h[0..8] == "https://" || &h[0..7] == "http://" {
|
||||||
|
String::new()
|
||||||
|
} else {
|
||||||
|
String::from("https://")
|
||||||
|
};
|
||||||
|
u.push_str(&h);
|
||||||
|
u.push_str("/ocs/v2.php/core/getapppassword");
|
||||||
|
u
|
||||||
|
},
|
||||||
|
None => "/ocs/v2.php/core/getapppassword".to_owned(),
|
||||||
|
};
|
||||||
|
self.api_builder.set_url(Method::GET, &url);
|
||||||
|
self.api_builder.set_header("OCS-APIRequest", HeaderValue::from_str("true").unwrap());
|
||||||
|
self.api_builder.set_header("USER-AGENT", HeaderValue::from_str("nextsync").unwrap());
|
||||||
|
self.api_builder.set_basic_auth(self.login.clone(), self.password.clone());
|
||||||
|
self.api_builder.send(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Login {
|
||||||
|
pub fn ask_auth(&mut self) -> &mut Login {
|
||||||
|
println!("Please enter your username/email: ");
|
||||||
|
let stdin = io::stdin();
|
||||||
|
self.login = stdin.lock().lines().next().unwrap().unwrap();
|
||||||
|
println!("Please enter your password: ");
|
||||||
|
self.password = rpassword::read_password().unwrap();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_auth(&mut self, username: &str, password: &str) -> &mut Login {
|
||||||
|
self.login = username.to_owned();
|
||||||
|
self.password = password.to_owned();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_host(&mut self, host: Option<String>) -> &mut Login {
|
||||||
|
self.host = host;
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn send_login(&mut self) -> Result<String, ApiError> {
|
||||||
|
match self.send() {
|
||||||
|
Ok(Some(body)) => Ok(self.parse(body)),
|
||||||
|
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
|
||||||
|
Err(err) => Err(err),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse(&self, xml: String) -> String {
|
||||||
|
let cursor = Cursor::new(xml);
|
||||||
|
let parser = EventReader::new(cursor);
|
||||||
|
|
||||||
|
let mut should_get = false;
|
||||||
|
|
||||||
|
for event in parser {
|
||||||
|
match event {
|
||||||
|
Ok(XmlEvent::StartElement { name, .. }) => {
|
||||||
|
should_get = {
|
||||||
|
if &name.local_name == "apppassword" {
|
||||||
|
true
|
||||||
|
} else {
|
||||||
|
false
|
||||||
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Ok(XmlEvent::Characters(text)) => {
|
||||||
|
if !text.trim().is_empty() && should_get {
|
||||||
|
return text.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
//Ok(XmlEvent::EndElement { name, .. }) => {
|
||||||
|
//}
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("err: parsing xml: {}", e);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
_ => {}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
54
src/services/move.rs
Normal file
54
src/services/move.rs
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
use reqwest::{Method, header::HeaderValue};
|
||||||
|
use crate::services::api::{ApiBuilder, ApiError};
|
||||||
|
use crate::commands::clone::get_url_props;
|
||||||
|
use crate::commands::config;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
|
||||||
|
pub struct Move {
|
||||||
|
api_builder: ApiBuilder,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ApiCall for Move {
|
||||||
|
fn new() -> Self {
|
||||||
|
Move {
|
||||||
|
api_builder: ApiBuilder::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||||
|
self.api_builder.send(false)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Move {
|
||||||
|
pub fn set_url_move(&mut self, url: &str, destination: &str) -> &mut Move {
|
||||||
|
self.api_builder.build_request(Method::from_bytes(b"MOVE").unwrap(), url);
|
||||||
|
|
||||||
|
let remote = match config::get_remote("origin") {
|
||||||
|
Some(r) => r,
|
||||||
|
None => {
|
||||||
|
eprintln!("fatal: unable to find a remote");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let (host, username, root) = get_url_props(&remote);
|
||||||
|
let mut url = String::from(host);
|
||||||
|
url.push_str("/remote.php/dav/files/");
|
||||||
|
url.push_str(username.unwrap());
|
||||||
|
url.push_str(&root);
|
||||||
|
url.push_str("/");
|
||||||
|
if destination != "/" {
|
||||||
|
url.push_str(destination);
|
||||||
|
}
|
||||||
|
self.api_builder.set_header("Destination", HeaderValue::from_str(&url).unwrap());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn _overwrite(&mut self, overwrite: bool) -> &mut Move {
|
||||||
|
self.api_builder.set_header("Overwrite", HeaderValue::from_str({
|
||||||
|
if overwrite { "T" } else { "F" }
|
||||||
|
}).unwrap());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,16 +1,21 @@
|
|||||||
use std::io::Cursor;
|
use std::io::Cursor;
|
||||||
use chrono::{Utc, DateTime};
|
use chrono::{Utc, DateTime};
|
||||||
use reqwest::{Method, Response, Error};
|
use reqwest::Method;
|
||||||
use xml::reader::{EventReader, XmlEvent};
|
use xml::reader::{EventReader, XmlEvent};
|
||||||
|
use reqwest::header::HeaderValue;
|
||||||
|
use crate::commands::clone::get_url_props;
|
||||||
|
use crate::commands::config;
|
||||||
use crate::utils::time::parse_timestamp;
|
use crate::utils::time::parse_timestamp;
|
||||||
use crate::utils::api::{get_relative_s, ApiProps};
|
use crate::utils::api::{get_relative_s, ApiProps};
|
||||||
use crate::services::api::{ApiBuilder, ApiError};
|
use crate::services::api::{ApiBuilder, ApiError};
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ObjProps {
|
pub struct ObjProps {
|
||||||
pub href: Option<String>,
|
pub href: Option<String>,
|
||||||
pub relative_s: Option<String>,
|
pub relative_s: Option<String>,
|
||||||
pub lastmodified: Option<DateTime<Utc>>,
|
pub lastmodified: Option<DateTime<Utc>>,
|
||||||
|
pub contentlength: Option<u64>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Clone for ObjProps {
|
impl Clone for ObjProps {
|
||||||
@@ -19,6 +24,7 @@ impl Clone for ObjProps {
|
|||||||
href: self.href.clone(),
|
href: self.href.clone(),
|
||||||
relative_s: self.relative_s.clone(),
|
relative_s: self.relative_s.clone(),
|
||||||
lastmodified: self.lastmodified.clone(),
|
lastmodified: self.lastmodified.clone(),
|
||||||
|
contentlength: self.contentlength.clone(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -29,6 +35,16 @@ impl ObjProps {
|
|||||||
href: None,
|
href: None,
|
||||||
relative_s: None,
|
relative_s: None,
|
||||||
lastmodified: None,
|
lastmodified: None,
|
||||||
|
contentlength: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn is_dir(&self) -> bool {
|
||||||
|
if let Some(href) = &self.href {
|
||||||
|
href.chars().last().unwrap() == '/'
|
||||||
|
} else {
|
||||||
|
eprintln!("err: cannot determine object type wihout href");
|
||||||
|
false
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -40,8 +56,8 @@ pub struct ReqProps {
|
|||||||
api_props: Option<ApiProps>
|
api_props: Option<ApiProps>
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReqProps {
|
impl ApiCall for ReqProps {
|
||||||
pub fn new() -> Self {
|
fn new() -> Self {
|
||||||
ReqProps {
|
ReqProps {
|
||||||
api_builder: ApiBuilder::new(),
|
api_builder: ApiBuilder::new(),
|
||||||
xml_balises: vec![],
|
xml_balises: vec![],
|
||||||
@@ -50,11 +66,31 @@ impl ReqProps {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_url(&mut self, url: &str) -> &mut ReqProps {
|
fn set_url(&mut self, url: &str) -> &mut ReqProps {
|
||||||
|
let remote = match config::get_remote("origin") {
|
||||||
|
Some(r) => r,
|
||||||
|
None => {
|
||||||
|
eprintln!("fatal: unable to find a remote");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let (host, username, root) = get_url_props(&remote);
|
||||||
|
self.api_props = Some(ApiProps {
|
||||||
|
host,
|
||||||
|
username: username.unwrap().to_owned(),
|
||||||
|
root: root.to_owned(),
|
||||||
|
});
|
||||||
self.api_builder.build_request(Method::from_bytes(b"PROPFIND").unwrap(), url);
|
self.api_builder.build_request(Method::from_bytes(b"PROPFIND").unwrap(), url);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||||
|
self.validate_xml();
|
||||||
|
self.api_builder.send(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ReqProps {
|
||||||
pub fn set_request(&mut self, p: &str, api_props: &ApiProps) -> &mut ReqProps {
|
pub fn set_request(&mut self, p: &str, api_props: &ApiProps) -> &mut ReqProps {
|
||||||
self.api_props = Some(api_props.clone());
|
self.api_props = Some(api_props.clone());
|
||||||
self.api_builder.set_req(Method::from_bytes(b"PROPFIND").unwrap(), p, api_props);
|
self.api_builder.set_req(Method::from_bytes(b"PROPFIND").unwrap(), p, api_props);
|
||||||
@@ -62,6 +98,7 @@ impl ReqProps {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn gethref(&mut self) -> &mut ReqProps {
|
pub fn gethref(&mut self) -> &mut ReqProps {
|
||||||
|
// not an actual property but used to prevent getting anything else
|
||||||
self.xml_balises.push(String::from("href"));
|
self.xml_balises.push(String::from("href"));
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
@@ -72,7 +109,7 @@ impl ReqProps {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn getcontentlenght(&mut self) -> &mut ReqProps {
|
pub fn getcontentlength(&mut self) -> &mut ReqProps {
|
||||||
self.xml_balises.push(String::from("getcontentlength"));
|
self.xml_balises.push(String::from("getcontentlength"));
|
||||||
self.xml_payload.push_str(r#"<d:getcontentlength/>"#);
|
self.xml_payload.push_str(r#"<d:getcontentlength/>"#);
|
||||||
self
|
self
|
||||||
@@ -102,7 +139,13 @@ impl ReqProps {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn set_depth(&mut self, depth: &str) -> &mut ReqProps {
|
||||||
|
self.api_builder.set_header("Depth", HeaderValue::from_str(depth).unwrap());
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
fn validate_xml(&mut self) -> &mut ReqProps {
|
fn validate_xml(&mut self) -> &mut ReqProps {
|
||||||
|
self.gethref();
|
||||||
let mut xml = String::from(r#"<?xml version="1.0" encoding="UTF-8"?><d:propfind xmlns:d="DAV:" xmlns:oc="http://owncloud.org/ns" xmlns:nc="http://nextcloud.org/ns"><d:prop>"#);
|
let mut xml = String::from(r#"<?xml version="1.0" encoding="UTF-8"?><d:propfind xmlns:d="DAV:" xmlns:oc="http://owncloud.org/ns" xmlns:nc="http://nextcloud.org/ns"><d:prop>"#);
|
||||||
xml.push_str(&self.xml_payload.clone());
|
xml.push_str(&self.xml_payload.clone());
|
||||||
xml.push_str(r#"</d:prop></d:propfind>"#);
|
xml.push_str(r#"</d:prop></d:propfind>"#);
|
||||||
@@ -110,40 +153,24 @@ impl ReqProps {
|
|||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
|
||||||
self.validate_xml();
|
|
||||||
self.api_builder.send().await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
|
|
||||||
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
|
||||||
self.send().await
|
|
||||||
}).map_err(ApiError::RequestError)?;
|
|
||||||
|
|
||||||
if res.status().is_success() {
|
|
||||||
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
|
||||||
res.text().await
|
|
||||||
}).map_err(ApiError::EmptyError)?;
|
|
||||||
Ok(body)
|
|
||||||
} else {
|
|
||||||
Err(ApiError::IncorrectRequest(res))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn send_req_multiple(&mut self) -> Result<Vec<ObjProps>, ApiError> {
|
pub fn send_req_multiple(&mut self) -> Result<Vec<ObjProps>, ApiError> {
|
||||||
match self.send_with_err() {
|
match self.send() {
|
||||||
Ok(body) => Ok(self.parse(body, true)),
|
Ok(Some(body)) => Ok(self.parse(body, true)),
|
||||||
|
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn send_req_single(&mut self) -> Result<ObjProps, ApiError> {
|
pub fn send_req_single(&mut self) -> Result<ObjProps, ApiError> {
|
||||||
match self.send_with_err() {
|
// set depth to 0 as we only need one element
|
||||||
Ok(body) => {
|
self.set_depth("0");
|
||||||
|
match self.send() {
|
||||||
|
Ok(Some(body)) => {
|
||||||
let objs = self.parse(body, false);
|
let objs = self.parse(body, false);
|
||||||
let obj = objs[0].clone();
|
let obj = objs[0].clone();
|
||||||
Ok(obj)
|
Ok(obj)
|
||||||
},
|
},
|
||||||
|
Ok(None) => Err(ApiError::Unexpected(String::from("Empty after tested"))),
|
||||||
Err(err) => Err(err),
|
Err(err) => Err(err),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -152,54 +179,60 @@ impl ReqProps {
|
|||||||
let cursor = Cursor::new(xml);
|
let cursor = Cursor::new(xml);
|
||||||
let parser = EventReader::new(cursor);
|
let parser = EventReader::new(cursor);
|
||||||
|
|
||||||
let mut should_get = false;
|
|
||||||
let mut values: Vec<ObjProps> = vec![];
|
let mut values: Vec<ObjProps> = vec![];
|
||||||
|
|
||||||
let mut iter = self.xml_balises.iter();
|
let mut should_get = false;
|
||||||
let mut val = iter.next();
|
let mut val: String = String::new();
|
||||||
let mut content = ObjProps::new();
|
let mut content = ObjProps::new();
|
||||||
|
|
||||||
for event in parser {
|
for event in parser {
|
||||||
match event {
|
match event {
|
||||||
Ok(XmlEvent::StartElement { name, .. }) => {
|
Ok(XmlEvent::StartElement { name, .. }) => {
|
||||||
if let Some(v) = val.clone() {
|
should_get = {
|
||||||
should_get = &name.local_name == v;
|
if self.xml_balises.clone().contains(&name.local_name) {
|
||||||
|
val = name.local_name.clone();
|
||||||
|
true
|
||||||
} else {
|
} else {
|
||||||
// end of balises to get then start over for
|
false
|
||||||
// next object if want multiple
|
|
||||||
if multiple {
|
|
||||||
values.push(content.clone());
|
|
||||||
iter = self.xml_balises.iter();
|
|
||||||
val = iter.next();
|
|
||||||
content = ObjProps::new();
|
|
||||||
if let Some(v) = val.clone() {
|
|
||||||
should_get = &name.local_name == v;
|
|
||||||
}
|
}
|
||||||
|
};
|
||||||
|
}
|
||||||
|
Ok(XmlEvent::Characters(text)) => {
|
||||||
|
if !text.trim().is_empty() && should_get {
|
||||||
|
match val.as_str() {
|
||||||
|
"href" => {
|
||||||
|
content.href = Some(text.clone());
|
||||||
|
content.relative_s = Some(
|
||||||
|
get_relative_s(text, &(self.api_props
|
||||||
|
.clone()
|
||||||
|
.unwrap())));
|
||||||
|
},
|
||||||
|
"getlastmodified" => {
|
||||||
|
content.lastmodified = Some(
|
||||||
|
parse_timestamp(&text).unwrap());
|
||||||
|
},
|
||||||
|
"getcontentlength" => {
|
||||||
|
content.contentlength = Some(
|
||||||
|
text.clone().parse().unwrap());
|
||||||
|
},
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
should_get = false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Ok(XmlEvent::EndElement { name, .. }) => {
|
||||||
|
if name.local_name == "response" {
|
||||||
|
values.push(content.clone());
|
||||||
|
if multiple {
|
||||||
|
content = ObjProps::new();
|
||||||
} else {
|
} else {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
|
||||||
Ok(XmlEvent::Characters(text)) => {
|
|
||||||
if !text.trim().is_empty() && should_get {
|
|
||||||
match val.unwrap().as_str() {
|
|
||||||
"href" => {
|
|
||||||
content.href = Some(text.clone());
|
|
||||||
content.relative_s = Some(get_relative_s(text, &(self.api_props.clone().unwrap())));
|
|
||||||
},
|
|
||||||
"getlastmodified" => {
|
|
||||||
content.lastmodified = Some(parse_timestamp(&text).unwrap());
|
|
||||||
},
|
|
||||||
_ => (),
|
|
||||||
}
|
|
||||||
val = iter.next()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(XmlEvent::EndElement { .. }) => {
|
|
||||||
should_get = false;
|
should_get = false;
|
||||||
}
|
}
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
eprintln!("Error: {}", e);
|
eprintln!("err: parsing xml: {}", e);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
_ => {}
|
_ => {}
|
||||||
|
|||||||
91
src/services/request_manager.rs
Normal file
91
src/services/request_manager.rs
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
use lazy_static::lazy_static;
|
||||||
|
use std::sync::Mutex;
|
||||||
|
|
||||||
|
use crate::services::login::Login;
|
||||||
|
use crate::commands::config;
|
||||||
|
use crate::store::gconfig;
|
||||||
|
use crate::commands::clone::get_url_props;
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
|
||||||
|
lazy_static! {
|
||||||
|
static ref REQUEST_MANAGER: Mutex<Option<RequestManager>> = Mutex::new(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_request_manager() -> &'static Mutex<Option<RequestManager>> {
|
||||||
|
if REQUEST_MANAGER.lock().unwrap().is_none() {
|
||||||
|
*REQUEST_MANAGER.lock().unwrap() = Some(RequestManager::new());
|
||||||
|
}
|
||||||
|
&REQUEST_MANAGER
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct RequestManager {
|
||||||
|
token: Option<String>,
|
||||||
|
host: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl RequestManager {
|
||||||
|
pub fn new() -> Self {
|
||||||
|
RequestManager {
|
||||||
|
token: None,
|
||||||
|
host: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn set_host(&mut self, host: String) {
|
||||||
|
self.host = Some(host);
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_host(&mut self) -> String
|
||||||
|
{
|
||||||
|
if self.host.is_none()
|
||||||
|
{
|
||||||
|
let remote = match config::get_remote("origin") {
|
||||||
|
Some(r) => r,
|
||||||
|
None => {
|
||||||
|
// todo ask user instead
|
||||||
|
eprintln!("fatal: unable to find a remote");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
let (host, _, _) = get_url_props(&remote);
|
||||||
|
self.host = Some(host.clone());
|
||||||
|
// todo ask user
|
||||||
|
}
|
||||||
|
self.host.clone().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_token(&mut self) -> String {
|
||||||
|
if self.token.is_none() {
|
||||||
|
// look in global config
|
||||||
|
if let Some(token) = gconfig::read_token() {
|
||||||
|
if !token.is_empty() {
|
||||||
|
self.token = Some(token);
|
||||||
|
return self.token.clone().unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// look in local config
|
||||||
|
if let Some(token) = config::find_option_in_cat("core", "token")
|
||||||
|
{
|
||||||
|
if !token.is_empty() {
|
||||||
|
self.token = Some(token);
|
||||||
|
return self.token.clone().unwrap();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// ask for a token
|
||||||
|
let get_token = Login::new()
|
||||||
|
.ask_auth()
|
||||||
|
.set_host(Some(self.get_host()))
|
||||||
|
.send_login();
|
||||||
|
|
||||||
|
// todo deal with error cases
|
||||||
|
self.token = Some(get_token.unwrap());
|
||||||
|
if let Err(err) = gconfig::write_token(&self.token.clone().unwrap()) {
|
||||||
|
eprintln!("err: failed to write token ({})", err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.token.clone().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,25 +1,32 @@
|
|||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{Read};
|
use std::io::Read;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use reqwest::{Method, Response, Error};
|
use reqwest::Method;
|
||||||
use crate::services::api::{ApiBuilder, ApiError};
|
use crate::services::api::{ApiBuilder, ApiError};
|
||||||
|
use crate::services::api_call::ApiCall;
|
||||||
|
|
||||||
pub struct UploadFile {
|
pub struct UploadFile {
|
||||||
api_builder: ApiBuilder,
|
api_builder: ApiBuilder,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl UploadFile {
|
impl ApiCall for UploadFile {
|
||||||
pub fn new() -> Self {
|
fn new() -> Self {
|
||||||
UploadFile {
|
UploadFile {
|
||||||
api_builder: ApiBuilder::new(),
|
api_builder: ApiBuilder::new(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn set_url(&mut self, url: &str) -> &mut UploadFile {
|
fn set_url(&mut self, url: &str) -> &mut UploadFile {
|
||||||
self.api_builder.build_request(Method::PUT, url);
|
self.api_builder.build_request(Method::PUT, url);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn send(&mut self) -> Result<Option<String>, ApiError> {
|
||||||
|
self.api_builder.send(true)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl UploadFile {
|
||||||
pub fn set_file(&mut self, path: PathBuf) -> &mut UploadFile {
|
pub fn set_file(&mut self, path: PathBuf) -> &mut UploadFile {
|
||||||
// todo large file
|
// todo large file
|
||||||
// todo small files
|
// todo small files
|
||||||
@@ -29,23 +36,4 @@ impl UploadFile {
|
|||||||
self.api_builder.set_body(buffer);
|
self.api_builder.set_body(buffer);
|
||||||
self
|
self
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn send(&mut self) -> Result<Response, Error> {
|
|
||||||
self.api_builder.send().await
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn send_with_err(&mut self) -> Result<String, ApiError> {
|
|
||||||
let res = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
|
||||||
self.send().await
|
|
||||||
}).map_err(ApiError::RequestError)?;
|
|
||||||
|
|
||||||
if res.status().is_success() {
|
|
||||||
let body = tokio::runtime::Runtime::new().unwrap().block_on(async {
|
|
||||||
res.text().await
|
|
||||||
}).map_err(ApiError::EmptyError)?;
|
|
||||||
Ok(body)
|
|
||||||
} else {
|
|
||||||
Err(ApiError::IncorrectRequest(res))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,3 +1,4 @@
|
|||||||
pub mod index;
|
pub mod index;
|
||||||
pub mod head;
|
pub mod head;
|
||||||
pub mod object;
|
pub mod object;
|
||||||
|
pub mod gconfig;
|
||||||
|
|||||||
54
src/store/gconfig.rs
Normal file
54
src/store/gconfig.rs
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
use std::env;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use std::fs::{self, OpenOptions};
|
||||||
|
use std::io::{self, Write};
|
||||||
|
use crate::utils::read;
|
||||||
|
|
||||||
|
fn global_path() -> Option<PathBuf> {
|
||||||
|
if let Some(home_dir) = env::var_os("HOME") {
|
||||||
|
let mut path = PathBuf::new();
|
||||||
|
path.push(home_dir);
|
||||||
|
path.push(".nextsync");
|
||||||
|
Some(path)
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn write_token(token: &str) -> io::Result<()> {
|
||||||
|
if let Some(mut path_token) = global_path() {
|
||||||
|
if !path_token.exists() {
|
||||||
|
fs::create_dir_all(path_token.clone())?;
|
||||||
|
}
|
||||||
|
path_token.push("token");
|
||||||
|
let mut file = OpenOptions::new()
|
||||||
|
.read(true)
|
||||||
|
.write(true)
|
||||||
|
.create(true)
|
||||||
|
.open(path_token)?;
|
||||||
|
|
||||||
|
writeln!(file, "{}", token)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read_token() -> Option<String> {
|
||||||
|
if let Some(mut path_token) = global_path() {
|
||||||
|
if !path_token.exists() {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
path_token.push("token");
|
||||||
|
if let Ok(lines) = read::read_lines(path_token) {
|
||||||
|
for line in lines {
|
||||||
|
if let Ok(l) = line {
|
||||||
|
return Some(l);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
@@ -1,38 +1,16 @@
|
|||||||
|
use std::fs::OpenOptions;
|
||||||
use std::path::PathBuf;
|
use std::path::PathBuf;
|
||||||
use std::fs::{File, OpenOptions};
|
|
||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use crate::utils::{read, path};
|
use crate::utils::{read, path};
|
||||||
|
|
||||||
pub fn _read_only(mut path: PathBuf) -> File {
|
pub fn path() -> PathBuf {
|
||||||
path.push("HEAD");
|
let mut root = path::nextsync();
|
||||||
OpenOptions::new()
|
root.push("HEAD");
|
||||||
.read(true)
|
root
|
||||||
.open(path).expect("Cannot open HEAD file")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn _open(mut path: PathBuf) -> File {
|
|
||||||
path.push("HEAD");
|
|
||||||
OpenOptions::new()
|
|
||||||
.read(true)
|
|
||||||
.write(true)
|
|
||||||
.append(true)
|
|
||||||
.create(true)
|
|
||||||
.open(path).expect("Cannot open HEAD file")
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn _read_line(mut path: PathBuf) -> io::Result<io::Lines<io::BufReader<File>>> {
|
|
||||||
path.push("HEAD");
|
|
||||||
read::read_lines(path)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn add_line(line: String) -> io::Result<()> {
|
pub fn add_line(line: String) -> io::Result<()> {
|
||||||
let mut root = match path::nextsync_root() {
|
let root = path();
|
||||||
Some(path) => path,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
root.push(".nextsync");
|
|
||||||
root.push("HEAD");
|
|
||||||
|
|
||||||
let mut file = OpenOptions::new()
|
let mut file = OpenOptions::new()
|
||||||
.read(true)
|
.read(true)
|
||||||
@@ -45,13 +23,7 @@ pub fn add_line(line: String) -> io::Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn rm_line(line: &str) -> io::Result<()> {
|
pub fn rm_line(line: &str) -> io::Result<()> {
|
||||||
let mut root = match path::nextsync_root() {
|
let root = path();
|
||||||
Some(path) => path,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
root.push(".nextsync");
|
|
||||||
root.push("HEAD");
|
|
||||||
read::rm_line(root, line)?;
|
read::rm_line(root, line)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,23 +1,17 @@
|
|||||||
use std::io;
|
use std::io;
|
||||||
|
use std::path::PathBuf;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
use std::path::PathBuf;
|
|
||||||
use crate::utils::{read, path};
|
use crate::utils::{read, path};
|
||||||
|
|
||||||
pub fn _read_only(mut path: PathBuf) -> File {
|
pub fn path() -> PathBuf {
|
||||||
|
let mut path = path::nextsync();
|
||||||
path.push("index");
|
path.push("index");
|
||||||
OpenOptions::new()
|
path
|
||||||
.read(true)
|
|
||||||
.open(path).expect("Cannot open index file")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn open() -> File {
|
pub fn open() -> File {
|
||||||
let mut path = match path::nextsync() {
|
let path = path();
|
||||||
Some(p) => p,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
path.push("index");
|
|
||||||
OpenOptions::new()
|
OpenOptions::new()
|
||||||
.read(true)
|
.read(true)
|
||||||
.write(true)
|
.write(true)
|
||||||
@@ -27,21 +21,27 @@ pub fn open() -> File {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn read_line() -> io::Result<io::Lines<io::BufReader<File>>> {
|
pub fn read_line() -> io::Result<io::Lines<io::BufReader<File>>> {
|
||||||
let mut path = match path::nextsync() {
|
let mut path = path::nextsync();
|
||||||
Some(p) => p,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
path.push("index");
|
path.push("index");
|
||||||
read::read_lines(path)
|
read::read_lines(path)
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rm_line(line: &str) -> io::Result<()> {
|
pub fn rm_line(line: &str) -> io::Result<()> {
|
||||||
let mut root = match path::nextsync() {
|
let mut root = path::nextsync();
|
||||||
Some(path) => path,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
root.push("index");
|
root.push("index");
|
||||||
read::rm_line(root, line)?;
|
read::rm_line(root, line)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn alread_added(file: String) -> bool {
|
||||||
|
if let Ok(lines) = read_line() {
|
||||||
|
for line in lines {
|
||||||
|
if let Ok(l) = line {
|
||||||
|
if l == file {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|||||||
@@ -1,19 +1,95 @@
|
|||||||
use std::io::{self, Write};
|
use std::io::{self, Write};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::fs::{OpenOptions, self};
|
use std::fs::{self, OpenOptions};
|
||||||
use crypto::sha1::Sha1;
|
use crypto::sha1::Sha1;
|
||||||
use crypto::digest::Digest;
|
use crypto::digest::Digest;
|
||||||
|
use std::io::{Seek, SeekFrom, Read};
|
||||||
|
use crate::store::head;
|
||||||
use crate::utils::{read, path};
|
use crate::utils::{read, path};
|
||||||
|
|
||||||
pub mod tree;
|
pub mod tree;
|
||||||
pub mod blob;
|
pub mod blob;
|
||||||
|
pub mod object;
|
||||||
|
|
||||||
|
pub struct Object {
|
||||||
|
path: PathBuf,
|
||||||
|
hash: String,
|
||||||
|
obj_p: PathBuf,
|
||||||
|
ts: Option<i64>
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Object {
|
||||||
|
pub fn new(path: &str) -> Object {
|
||||||
|
let path = match path.chars().next_back() == "/".chars().next() {
|
||||||
|
true => {
|
||||||
|
let mut new = path.chars();
|
||||||
|
new.next_back();
|
||||||
|
new.as_str()
|
||||||
|
},
|
||||||
|
false => path,
|
||||||
|
};
|
||||||
|
if path == "" {
|
||||||
|
return Object {
|
||||||
|
path: PathBuf::from("/"),
|
||||||
|
hash: String::new(),
|
||||||
|
obj_p: head::path(),
|
||||||
|
ts: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
let mut hasher = Sha1::new();
|
||||||
|
hasher.input_str(path);
|
||||||
|
let hash = hasher.result_str();
|
||||||
|
|
||||||
|
let (dir, res) = hash.split_at(2);
|
||||||
|
|
||||||
|
let mut obj_p = path::objects();
|
||||||
|
obj_p.push(dir);
|
||||||
|
obj_p.push(res);
|
||||||
|
|
||||||
|
Object {
|
||||||
|
path: PathBuf::from(path),
|
||||||
|
hash,
|
||||||
|
obj_p,
|
||||||
|
ts: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read(&mut self) -> &mut Object {
|
||||||
|
match read::read_lines(&self.obj_p) {
|
||||||
|
Ok(mut reader) => {
|
||||||
|
if let Some(Ok(line)) = reader.next() {
|
||||||
|
let mut data = line.rsplit(' ').collect::<Vec<_>>();
|
||||||
|
data.reverse();
|
||||||
|
if data.clone().len() >= 2 {
|
||||||
|
self.ts = Some(data[1].parse::<i64>().unwrap())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(err) => {
|
||||||
|
eprintln!("error reading object {}: {}", self.obj_p.display(), err);
|
||||||
|
},
|
||||||
|
};
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn exists(&mut self) -> bool {
|
||||||
|
self.obj_p.exists()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// return true if the local file is older than the remote one
|
||||||
|
pub fn is_older(&mut self, ts: i64) -> bool {
|
||||||
|
// todo be aware of the diff of ts format
|
||||||
|
ts > self.ts.expect("Should be read before used") / 1000
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Returns (line, hash, name)
|
/// Returns (line, hash, name)
|
||||||
///
|
///
|
||||||
/// # Examples
|
/// # Examples
|
||||||
/// Input: /foo/bar
|
/// Input: /foo/bar
|
||||||
/// Result: ("tree hash(/foo/bar) bar", hash(/foo/bar), bar)
|
/// Result: ("tree hash(/foo/bar) bar", hash(/foo/bar), bar)
|
||||||
pub fn parse_path(path: &Path, is_blob: bool) -> (String, String, String) {
|
pub fn parse_path(path: PathBuf, is_blob: bool) -> (String, String, String) {
|
||||||
let file_name = path.file_name().unwrap().to_str().unwrap();
|
let file_name = path.file_name().unwrap().to_str().unwrap();
|
||||||
|
|
||||||
let mut hasher = Sha1::new();
|
let mut hasher = Sha1::new();
|
||||||
@@ -37,24 +113,26 @@ fn hash_obj(obj: &str) -> (String, String) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn _object_path(obj: &str) -> PathBuf {
|
fn _object_path(obj: &str) -> PathBuf {
|
||||||
let mut root = match path::objects() {
|
let mut root = path::objects();
|
||||||
Some(path) => path,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let (dir, res) = hash_obj(&obj);
|
let (dir, res) = hash_obj(&obj);
|
||||||
|
|
||||||
root.push(dir);
|
root.push(dir);
|
||||||
root.push(res);
|
root.push(res);
|
||||||
root
|
root
|
||||||
}
|
}
|
||||||
|
|
||||||
fn rm_node(path: &Path, node: &str) -> io::Result<()> {
|
fn rm(hash: &str) -> io::Result<()> {
|
||||||
let mut root = match path::objects() {
|
let mut root = path::objects();
|
||||||
Some(path) => path,
|
let (dir, rest) = hash.split_at(2);
|
||||||
None => todo!(),
|
root.push(dir);
|
||||||
};
|
root.push(rest);
|
||||||
|
fs::remove_file(root)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
let (dir, rest) = hash_obj(path.clone().to_str().unwrap());
|
fn rm_node(path: &Path, node: &str) -> io::Result<()> {
|
||||||
|
let mut root = path::objects();
|
||||||
|
let (dir, rest) = hash_obj(path.to_str().unwrap());
|
||||||
|
|
||||||
root.push(dir);
|
root.push(dir);
|
||||||
root.push(rest);
|
root.push(rest);
|
||||||
@@ -64,16 +142,13 @@ fn rm_node(path: &Path, node: &str) -> io::Result<()> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn add_node(path: &Path, node: &str) -> io::Result<()> {
|
fn add_node(path: &Path, node: &str) -> io::Result<()> {
|
||||||
let mut root = match path::objects() {
|
let mut root = path::objects();
|
||||||
Some(path) => path,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let (dir, rest) = hash_obj(path.clone().to_str().unwrap());
|
let (dir, rest) = hash_obj(path.to_str().unwrap());
|
||||||
|
|
||||||
root.push(dir);
|
root.push(dir);
|
||||||
if !root.exists() {
|
if !root.exists() {
|
||||||
todo!();
|
//todo!();
|
||||||
}
|
}
|
||||||
root.push(rest);
|
root.push(rest);
|
||||||
|
|
||||||
@@ -87,11 +162,50 @@ fn add_node(path: &Path, node: &str) -> io::Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn update_dates(mut path: PathBuf, date: &str) -> io::Result<()> {
|
||||||
|
let mut obj_p = path::objects();
|
||||||
|
|
||||||
|
while path.pop() {
|
||||||
|
let (dir, res) = hash_obj(path.to_str().unwrap());
|
||||||
|
obj_p.push(dir);
|
||||||
|
obj_p.push(res);
|
||||||
|
update_date(obj_p.clone(), date)?;
|
||||||
|
obj_p.pop();
|
||||||
|
obj_p.pop();
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update_date(path: PathBuf, date: &str) -> io::Result<()> {
|
||||||
|
let mut file = OpenOptions::new()
|
||||||
|
.read(true)
|
||||||
|
.write(true)
|
||||||
|
.open(path.clone())?;
|
||||||
|
|
||||||
|
let mut buffer = [0; 1];
|
||||||
|
file.seek(SeekFrom::Start(0))?;
|
||||||
|
|
||||||
|
// Seek and read until a space is found
|
||||||
|
loop {
|
||||||
|
let bytes_read = file.read(&mut buffer)?;
|
||||||
|
if bytes_read == 0 {
|
||||||
|
// Reached the end of the file without finding a space
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if buffer[0] == b' ' {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
file.write_all(&date.as_bytes())?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
fn create_obj(name: String, content: &str) -> io::Result<()> {
|
fn create_obj(name: String, content: &str) -> io::Result<()> {
|
||||||
let mut root = match path::objects() {
|
let mut root = path::objects();
|
||||||
Some(path) => path,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let c = name.clone();
|
let c = name.clone();
|
||||||
let (dir, rest) = c.split_at(2);
|
let (dir, rest) = c.split_at(2);
|
||||||
@@ -110,34 +224,3 @@ fn create_obj(name: String, content: &str) -> io::Result<()> {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_timestamp(path_s: String) -> Option<i64> {
|
|
||||||
let mut obj_p = match path::objects() {
|
|
||||||
Some(path) => path,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let (dir, res) = hash_obj(&path_s);
|
|
||||||
obj_p.push(dir);
|
|
||||||
obj_p.push(res);
|
|
||||||
|
|
||||||
match read::read_lines(obj_p) {
|
|
||||||
Ok(mut reader) => {
|
|
||||||
match reader.next() {
|
|
||||||
Some(Ok(line)) => {
|
|
||||||
let mut data = line.rsplit(' ');
|
|
||||||
if data.clone().count() >= 2 {
|
|
||||||
Some(data.next().unwrap().parse::<i64>().unwrap())
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
}
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
},
|
|
||||||
Err(err) => {
|
|
||||||
eprintln!("error reading object: {}", err);
|
|
||||||
None
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|||||||
@@ -1,50 +1,330 @@
|
|||||||
use std::io::{self};
|
use std::io::{self, Read};
|
||||||
use std::path::Path;
|
use std::fs::{self, File};
|
||||||
use std::fs::{self};
|
use std::io::Write;
|
||||||
use crate::utils::path;
|
use std::fs::OpenOptions;
|
||||||
use crate::store::head;
|
use std::path::PathBuf;
|
||||||
use crate::store::object::{parse_path, add_node, create_obj, rm_node};
|
use std::time::SystemTime;
|
||||||
|
use crate::commands::status::State;
|
||||||
|
use crate::utils::into::IntoPathBuf;
|
||||||
|
use crate::utils::{path, read};
|
||||||
|
use crate::store::object::update_dates;
|
||||||
|
|
||||||
pub fn add(path: &Path, date: &str) -> io::Result<()> {
|
use crate::store::object::object::ObjMethods;
|
||||||
let (line, hash, name) = parse_path(path.clone(), true);
|
use crate::store::object::object::Obj;
|
||||||
// add blob reference to parent
|
|
||||||
if path.iter().count() == 1 {
|
|
||||||
head::add_line(line)?;
|
|
||||||
} else {
|
|
||||||
add_node(path.parent().unwrap(), &line)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut content = name.clone().to_owned();
|
const HASH_EMPTY: &str = "d41d8cd98f00b204e9800998ecf8427e";
|
||||||
content.push_str(" ");
|
|
||||||
content.push_str(date);
|
|
||||||
|
|
||||||
// create blob object
|
pub struct Blob {
|
||||||
create_obj(hash, &content)?;
|
pub obj: Obj,
|
||||||
|
data: Vec<String>, // content of the ref file
|
||||||
Ok(())
|
file_hash: Option<String>, // hash of the file's content
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn rm(path: &Path) -> io::Result<()> {
|
//pub struct Blob {
|
||||||
let (line, hash, _) = parse_path(path.clone(), true);
|
// r_path: PathBuf, // relative path
|
||||||
|
// a_path: PathBuf, // absolute path
|
||||||
|
// hash: String, // hash of relative path
|
||||||
|
// file_hash: Option<String>,
|
||||||
|
// obj_p: PathBuf, // path of the object file
|
||||||
|
// data: Vec<String>, // content of the blob
|
||||||
|
//}
|
||||||
|
|
||||||
// remove blob reference to parent
|
|
||||||
if path.iter().count() == 1 {
|
|
||||||
head::rm_line(&line)?;
|
impl Blob {
|
||||||
} else {
|
pub fn new(obj: Obj) -> Self {
|
||||||
rm_node(path.parent().unwrap(), &line)?;
|
Self {
|
||||||
|
obj,
|
||||||
|
data: vec![],
|
||||||
|
file_hash: None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
pub fn from_path<S>(r_path: S) -> Blob where S: IntoPathBuf {
|
||||||
|
let r_path = r_path.into();
|
||||||
|
Self {
|
||||||
|
obj: Obj::from_path(r_path),
|
||||||
|
data: vec![],
|
||||||
|
file_hash: None,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove blob object
|
fn get_file_hash(&mut self) -> String {
|
||||||
let mut root = match path::objects() {
|
if self.file_hash.is_none() {
|
||||||
Some(path) => path,
|
let bytes = std::fs::read(self.get_file_path()).unwrap();
|
||||||
None => todo!(),
|
let hash = md5::compute(&bytes);
|
||||||
|
self.file_hash = Some(format!("{:x}", hash))
|
||||||
|
}
|
||||||
|
self.file_hash.clone().unwrap()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// read line of blob to get all informations and store them in self.data
|
||||||
|
pub fn read_data(&mut self) {
|
||||||
|
if self.data.len() == 0 {
|
||||||
|
if let Ok(mut file) = File::open(self.get_obj_path()) {
|
||||||
|
let mut buffer = String::new();
|
||||||
|
let _ = file.read_to_string(&mut buffer);
|
||||||
|
let data = buffer.rsplit(' ').collect::<Vec<_>>();
|
||||||
|
for e in data {
|
||||||
|
self.data.push(String::from(e));
|
||||||
|
}
|
||||||
|
self.data.reverse();
|
||||||
|
|
||||||
|
// remove \n of last element
|
||||||
|
if let Some(last) = self.data.last_mut() {
|
||||||
|
if last.ends_with("\n") {
|
||||||
|
last.pop();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_data_index(&mut self, index: usize) -> String {
|
||||||
|
self.read_data();
|
||||||
|
if self.data.len() >= index + 1 {
|
||||||
|
self.data[index].clone()
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn saved_filename(&mut self) -> String {
|
||||||
|
self.get_data_index(0)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn saved_remote_ts(&mut self) -> String {
|
||||||
|
self.get_data_index(1)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn saved_local_size(&mut self) -> String {
|
||||||
|
self.get_data_index(2)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn saved_local_ts(&mut self) -> u64 {
|
||||||
|
match self.get_data_index(3).as_str() {
|
||||||
|
"" => 0,
|
||||||
|
str => str.parse::<u64>().unwrap()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn saved_hash(&mut self) -> String {
|
||||||
|
self.get_data_index(4)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_same_size(&mut self) -> bool {
|
||||||
|
let metadata = match fs::metadata(self.get_file_path()) {
|
||||||
|
Ok(m) => m,
|
||||||
|
Err(_) => return true,
|
||||||
};
|
};
|
||||||
|
|
||||||
let c = hash.clone();
|
if self.saved_local_size() == String::new() { return true; }
|
||||||
let (dir, rest) = c.split_at(2);
|
metadata.len().to_string() == self.saved_local_size()
|
||||||
root.push(dir);
|
}
|
||||||
root.push(rest);
|
|
||||||
fs::remove_file(root)?;
|
fn is_newer(&mut self) -> bool {
|
||||||
|
let metadata = match fs::metadata(self.get_file_path()) {
|
||||||
|
Ok(m) => m,
|
||||||
|
Err(_) => return true,
|
||||||
|
};
|
||||||
|
|
||||||
|
let secs = metadata
|
||||||
|
.modified()
|
||||||
|
.unwrap()
|
||||||
|
.duration_since(SystemTime::UNIX_EPOCH)
|
||||||
|
.unwrap()
|
||||||
|
.as_secs();
|
||||||
|
if self.saved_local_ts() == 0 { return true; }
|
||||||
|
secs > self.saved_local_ts()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn has_same_hash(&mut self) -> bool {
|
||||||
|
if self.saved_hash() == String::new() { return false; }
|
||||||
|
let file_hash = self.get_file_hash().clone();
|
||||||
|
self.saved_hash() == file_hash
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_changes(&mut self) -> bool {
|
||||||
|
!self.has_same_size() || (self.is_newer() && !self.has_same_hash())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn get_all_identical_blobs(&mut self) -> Vec<String> {
|
||||||
|
// an empty file is a new file not the copy of another empty file
|
||||||
|
if self.get_file_hash() == HASH_EMPTY {
|
||||||
|
return vec![];
|
||||||
|
}
|
||||||
|
|
||||||
|
let refs_p = self.get_obj_path();
|
||||||
|
let mut blobs: Vec<String> = vec![];
|
||||||
|
if let Ok(lines) = read::read_lines(refs_p) {
|
||||||
|
for line in lines {
|
||||||
|
if let Ok(l) = line {
|
||||||
|
blobs.push(l.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
blobs
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn status(&mut self, path_from: &mut Option<PathBuf>) -> State {
|
||||||
|
let has_obj_ref = self.get_obj_path().exists();
|
||||||
|
let blob_exists = self.get_file_path().exists();
|
||||||
|
|
||||||
|
if has_obj_ref && !blob_exists {
|
||||||
|
State::Deleted
|
||||||
|
} else if !has_obj_ref && blob_exists {
|
||||||
|
let identical_blobs = self.get_all_identical_blobs();
|
||||||
|
if identical_blobs.len() != 0 {
|
||||||
|
let identical_blob = Blob::from_path(identical_blobs[0].clone()).get_local_obj();
|
||||||
|
|
||||||
|
if identical_blob.state == State::Deleted {
|
||||||
|
*path_from = Some(identical_blob.path);
|
||||||
|
State::Moved
|
||||||
|
} else if identical_blob.state == State::Default {
|
||||||
|
*path_from = Some(identical_blob.path);
|
||||||
|
State::Copied
|
||||||
|
} else {
|
||||||
|
State::New
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
State::New
|
||||||
|
}
|
||||||
|
} else if !has_obj_ref && !blob_exists {
|
||||||
|
State::Default
|
||||||
|
} else if self.has_changes() {
|
||||||
|
State::Modified
|
||||||
|
} else {
|
||||||
|
State::Default
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn create_blob_ref(&mut self, ts_remote: &str) -> io::Result<()> {
|
||||||
|
let metadata = fs::metadata(self.get_file_path())?;
|
||||||
|
let secs = metadata
|
||||||
|
.modified()
|
||||||
|
.unwrap()
|
||||||
|
.duration_since(SystemTime::UNIX_EPOCH)
|
||||||
|
.unwrap()
|
||||||
|
.as_secs();
|
||||||
|
|
||||||
|
// build line with all needed properties
|
||||||
|
let content = format!("{} {} {} {} {}",
|
||||||
|
self.get_name(),
|
||||||
|
ts_remote,
|
||||||
|
metadata.len().to_string(),
|
||||||
|
secs.to_string(),
|
||||||
|
self.get_file_hash());
|
||||||
|
|
||||||
|
// create parent dir if needed
|
||||||
|
let mut obj_path = self.get_obj_path();
|
||||||
|
obj_path.pop();
|
||||||
|
if !obj_path.exists() {
|
||||||
|
fs::create_dir_all(obj_path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// open ref file
|
||||||
|
let mut file = OpenOptions::new()
|
||||||
|
.create_new(true)
|
||||||
|
.write(true)
|
||||||
|
.open(self.get_obj_path())?;
|
||||||
|
|
||||||
|
writeln!(file, "{}", content)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_file_ref(&mut self) -> PathBuf {
|
||||||
|
let mut refs_p = path::refs();
|
||||||
|
let file_hash = self.get_file_hash().clone();
|
||||||
|
let (dir, res) = file_hash.split_at(2);
|
||||||
|
|
||||||
|
refs_p.push(dir);
|
||||||
|
if !refs_p.exists() {
|
||||||
|
let _ = fs::create_dir_all(refs_p.clone());
|
||||||
|
}
|
||||||
|
refs_p.push(res);
|
||||||
|
refs_p
|
||||||
|
}
|
||||||
|
|
||||||
|
// create a file in .nextsync/refs with the hash of this blob that
|
||||||
|
// redirect to the relative path
|
||||||
|
fn create_hash_ref(&mut self) -> io::Result<()> {
|
||||||
|
// todo check if the file has been modified for moved and copy
|
||||||
|
let refs_p = self.get_file_ref();
|
||||||
|
|
||||||
|
let mut file = OpenOptions::new()
|
||||||
|
.create(true)
|
||||||
|
.write(true)
|
||||||
|
.open(refs_p)?;
|
||||||
|
|
||||||
|
// todo deal with duplicate content
|
||||||
|
writeln!(file, "{}", self.get_relative_file_path().to_str().unwrap())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create(&mut self, ts_remote: &str, up_parent: bool) -> io::Result<()> {
|
||||||
|
|
||||||
|
// add blob reference to parent
|
||||||
|
let _ = self.add_ref_to_parent();
|
||||||
|
|
||||||
|
if let Err(err) = self.create_blob_ref(ts_remote.clone()) {
|
||||||
|
eprintln!("err: saving blob ref of {}: {}", self.get_relative_file_path().display(), err);
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Err(err) = self.create_hash_ref() {
|
||||||
|
eprintln!("err: saving hash ref of {}: {}", self.get_relative_file_path().display(), err);
|
||||||
|
}
|
||||||
|
|
||||||
|
// update date for all parent
|
||||||
|
if up_parent {
|
||||||
|
if let Err(err) = update_dates(self.get_relative_file_path(), ts_remote) {
|
||||||
|
eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn update(&mut self, ts_remote: &str) -> io::Result<()> {
|
||||||
|
|
||||||
|
// // remove old hash ref
|
||||||
|
// let mut refs_p = path::refs();
|
||||||
|
// let binding = self.saved_hash();
|
||||||
|
// let (dir, res) = binding.split_at(2);
|
||||||
|
// refs_p.push(dir);
|
||||||
|
// refs_p.push(res);
|
||||||
|
// if let Err(err) = fs::remove_file(refs_p) {
|
||||||
|
// eprintln!("err: removing hash ref of {}: {}", self.r_path.clone().display(), err);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // creating new hash ref
|
||||||
|
// if let Err(err) = self.create_hash_ref() {
|
||||||
|
// eprintln!("err: saving hash ref of {}: {}", self.r_path.clone().display(), err);
|
||||||
|
// }
|
||||||
|
//
|
||||||
|
// // updating content of blob's ref
|
||||||
|
// let metadata = fs::metadata(self.a_path.clone())?;
|
||||||
|
// let secs = metadata
|
||||||
|
// .modified()
|
||||||
|
// .unwrap()
|
||||||
|
// .duration_since(SystemTime::UNIX_EPOCH)
|
||||||
|
// .unwrap()
|
||||||
|
// .as_secs();
|
||||||
|
//
|
||||||
|
// let mut content = self.saved_filename();
|
||||||
|
// content.push_str(" ");
|
||||||
|
// content.push_str(ts_remote);
|
||||||
|
// content.push_str(" ");
|
||||||
|
// content.push_str(&metadata.len().to_string());
|
||||||
|
// content.push_str(" ");
|
||||||
|
// content.push_str(&secs.to_string());
|
||||||
|
// content.push_str(" ");
|
||||||
|
// content.push_str(&self.get_file_hash());
|
||||||
|
//
|
||||||
|
// let mut file = OpenOptions::new()
|
||||||
|
// .write(true)
|
||||||
|
// .open(self.obj_p.clone())?;
|
||||||
|
//
|
||||||
|
// writeln!(file, "{}", &content)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
380
src/store/object/object.rs
Normal file
380
src/store/object/object.rs
Normal file
@@ -0,0 +1,380 @@
|
|||||||
|
use std::io;
|
||||||
|
use std::fs;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
use crate::utils::path;
|
||||||
|
use crate::store::head;
|
||||||
|
use crate::store::object::{add_node, rm_node};
|
||||||
|
use crypto::sha1::Sha1;
|
||||||
|
use crypto::digest::Digest;
|
||||||
|
use crate::utils::into::IntoPathBuf;
|
||||||
|
use crate::store::object::{blob::Blob, tree::Tree};
|
||||||
|
use crate::commands::status::{State, LocalObj};
|
||||||
|
|
||||||
|
#[derive(Clone, Copy)]
|
||||||
|
pub enum ObjType {
|
||||||
|
TREE,
|
||||||
|
BLOB,
|
||||||
|
DEFAULT
|
||||||
|
}
|
||||||
|
|
||||||
|
pub trait ObjMethods {
|
||||||
|
fn get_type(&self) -> ObjType;
|
||||||
|
fn get_obj_path(&self) -> PathBuf;
|
||||||
|
fn get_file_path(&self) -> PathBuf;
|
||||||
|
fn get_relative_file_path(&self) -> PathBuf;
|
||||||
|
fn get_repo_file_path(&self) -> PathBuf;
|
||||||
|
fn get_name(&self) -> String;
|
||||||
|
fn get_hash_path(&self) -> String;
|
||||||
|
fn get_local_obj(&self) -> LocalObj;
|
||||||
|
fn get_line(&self) -> String;
|
||||||
|
fn add_ref_to_parent(&self) -> io::Result<()>;
|
||||||
|
fn rm(&mut self) -> io::Result<()>;
|
||||||
|
fn rm_node(&mut self) -> io::Result<()>;
|
||||||
|
fn rm_node_down(&mut self) -> io::Result<()>;
|
||||||
|
fn exists_on_remote(&mut self) -> bool;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub struct Obj {
|
||||||
|
name: String,
|
||||||
|
obj_path: PathBuf,
|
||||||
|
obj_type: ObjType,
|
||||||
|
file_path: PathBuf, // file here is used as both file and directory
|
||||||
|
relative_file_path: PathBuf,
|
||||||
|
repo_file_path: PathBuf,
|
||||||
|
hash_path: String, // hash of the relative path of the file
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
impl ObjMethods for Obj {
|
||||||
|
fn get_type(&self) -> ObjType {
|
||||||
|
self.obj_type
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_obj_path(&self) -> PathBuf {
|
||||||
|
self.obj_path.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_file_path(&self) -> PathBuf {
|
||||||
|
self.file_path.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_relative_file_path(&self) -> PathBuf {
|
||||||
|
self.relative_file_path.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_repo_file_path(&self) -> PathBuf {
|
||||||
|
self.repo_file_path.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_local_obj(&self) -> LocalObj {
|
||||||
|
LocalObj {
|
||||||
|
otype: match self.obj_type {
|
||||||
|
ObjType::BLOB => String::from("blob"),
|
||||||
|
ObjType::TREE => String::from("tree"),
|
||||||
|
ObjType::DEFAULT => String::from("default"),
|
||||||
|
},
|
||||||
|
name: self.get_name(),
|
||||||
|
path: self.get_repo_file_path(),
|
||||||
|
path_from: None,
|
||||||
|
state: State::New
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_name(&self) -> String {
|
||||||
|
self.name.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_hash_path(&self) -> String {
|
||||||
|
self.hash_path.clone()
|
||||||
|
}
|
||||||
|
|
||||||
|
// build line for parent reference
|
||||||
|
fn get_line(&self) -> String {
|
||||||
|
format!("tree {} {}", self.get_hash_path(), self.get_name())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_ref_to_parent(&self) -> io::Result<()> {
|
||||||
|
let line = self.get_line();
|
||||||
|
if self.get_relative_file_path().iter().count() == 1 {
|
||||||
|
head::add_line(line)?;
|
||||||
|
} else {
|
||||||
|
add_node(self.get_relative_file_path().parent().unwrap(), &line)?;
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rm_node(&mut self) -> io::Result<()> {
|
||||||
|
// remove self object and children object
|
||||||
|
self.rm_node_down();
|
||||||
|
|
||||||
|
// remove parent reference to self
|
||||||
|
let line = self.get_line();
|
||||||
|
if self.get_relative_file_path().iter().count() == 1 {
|
||||||
|
head::rm_line(&line)?;
|
||||||
|
} else {
|
||||||
|
rm_node(self.get_relative_file_path().parent().unwrap(), &line)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rm_node_down(&mut self) -> io::Result<()> {
|
||||||
|
eprintln!("rm_node_down: tried to do this on Obj");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rm(&mut self) -> io::Result<()> {
|
||||||
|
eprintln!("rm: tried to do this on Obj");
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exists_on_remote(&mut self) -> bool {
|
||||||
|
PathBuf::from(self.get_hash_path()).exists()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ObjMethods for Blob {
|
||||||
|
fn get_type(&self) -> ObjType {
|
||||||
|
self.obj.get_type()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_obj_path(&self) -> PathBuf {
|
||||||
|
self.obj.get_obj_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_file_path(&self) -> PathBuf {
|
||||||
|
self.obj.get_file_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_relative_file_path(&self) -> PathBuf {
|
||||||
|
self.obj.get_relative_file_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_repo_file_path(&self) -> PathBuf {
|
||||||
|
self.obj.get_repo_file_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_local_obj(&self) -> LocalObj {
|
||||||
|
self.obj.get_local_obj()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_name(&self) -> String {
|
||||||
|
self.obj.get_name()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_hash_path(&self) -> String {
|
||||||
|
self.obj.get_hash_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_line(&self) -> String {
|
||||||
|
self.obj.get_line()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_ref_to_parent(&self) -> io::Result<()> {
|
||||||
|
self.obj.add_ref_to_parent()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rm_node(&mut self) -> io::Result<()> {
|
||||||
|
self.obj.rm_node()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rm_node_down(&mut self) -> io::Result<()> {
|
||||||
|
// remove reference to self
|
||||||
|
fs::remove_file(self.get_obj_path())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rm(&mut self) -> io::Result<()> {
|
||||||
|
// remove all references, including children's one
|
||||||
|
self.rm_node()?;
|
||||||
|
|
||||||
|
// remove file
|
||||||
|
fs::remove_file(self.get_file_path())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exists_on_remote(&mut self) -> bool {
|
||||||
|
self.obj.exists_on_remote()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ObjMethods for Tree {
|
||||||
|
fn get_type(&self) -> ObjType {
|
||||||
|
self.obj.get_type()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_obj_path(&self) -> PathBuf {
|
||||||
|
self.obj.get_obj_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_file_path(&self) -> PathBuf {
|
||||||
|
self.obj.get_file_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_relative_file_path(&self) -> PathBuf {
|
||||||
|
self.obj.get_relative_file_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_repo_file_path(&self) -> PathBuf {
|
||||||
|
self.obj.get_repo_file_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_local_obj(&self) -> LocalObj {
|
||||||
|
self.obj.get_local_obj()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_name(&self) -> String {
|
||||||
|
self.obj.get_name()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_hash_path(&self) -> String {
|
||||||
|
self.obj.get_hash_path()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_line(&self) -> String {
|
||||||
|
self.obj.get_line()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn add_ref_to_parent(&self) -> io::Result<()> {
|
||||||
|
self.obj.add_ref_to_parent()
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rm_node(&mut self) -> io::Result<()> {
|
||||||
|
self.obj.rm_node()
|
||||||
|
}
|
||||||
|
|
||||||
|
/// remove objects and children but not parent reference to self
|
||||||
|
fn rm_node_down(&mut self) -> io::Result<()> {
|
||||||
|
// remove children
|
||||||
|
while let Some(mut child) = self.next() {
|
||||||
|
match child.get_type() {
|
||||||
|
ObjType::TREE => child.rm_node_down(),
|
||||||
|
ObjType::BLOB => child.rm_node_down(),
|
||||||
|
_ => Ok(())
|
||||||
|
}?;
|
||||||
|
};
|
||||||
|
|
||||||
|
// remove reference to self
|
||||||
|
fs::remove_file(self.get_obj_path())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn rm(&mut self) -> io::Result<()> {
|
||||||
|
// remove all references, including children's one
|
||||||
|
self.rm_node()?;
|
||||||
|
|
||||||
|
// remove directory and all subfiles
|
||||||
|
fs::remove_dir_all(self.get_file_path())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn exists_on_remote(&mut self) -> bool {
|
||||||
|
self.obj.exists_on_remote()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Obj {
|
||||||
|
fn new() -> Self {
|
||||||
|
Obj {
|
||||||
|
name: String::new(),
|
||||||
|
obj_path: PathBuf::new(),
|
||||||
|
file_path: PathBuf::new(),
|
||||||
|
obj_type: ObjType::DEFAULT,
|
||||||
|
hash_path: String::new(),
|
||||||
|
relative_file_path: PathBuf::new(),
|
||||||
|
repo_file_path: PathBuf::new()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_path<S>(path: S) -> Obj where S: IntoPathBuf {
|
||||||
|
let path = path.into();
|
||||||
|
let mut hasher = Sha1::new();
|
||||||
|
hasher.input_str(path.to_str().unwrap());
|
||||||
|
let hash = hasher.result_str();
|
||||||
|
|
||||||
|
let (dir, res) = hash.split_at(2);
|
||||||
|
let mut obj_path = path::objects();
|
||||||
|
obj_path.push(dir);
|
||||||
|
obj_path.push(res);
|
||||||
|
|
||||||
|
let root = path::repo_root();
|
||||||
|
let abs_path = root.join(path.clone());
|
||||||
|
Obj {
|
||||||
|
name: match abs_path.file_name() {
|
||||||
|
None => String::new(),
|
||||||
|
Some(name) => name.to_str().unwrap().to_owned()
|
||||||
|
},
|
||||||
|
obj_path,
|
||||||
|
obj_type: match path.exists() {
|
||||||
|
true => match path.is_dir() {
|
||||||
|
true => ObjType::TREE,
|
||||||
|
false => ObjType::BLOB
|
||||||
|
},
|
||||||
|
false => ObjType::DEFAULT
|
||||||
|
},
|
||||||
|
file_path: abs_path,
|
||||||
|
relative_file_path: path.clone(),
|
||||||
|
repo_file_path: path,
|
||||||
|
hash_path: hash,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/// load from the information line stored in the object
|
||||||
|
pub fn from_line(line: String, base_dir: Option<PathBuf>) -> Box<dyn ObjMethods> {
|
||||||
|
let mut split = line.rsplit(' ');
|
||||||
|
if split.clone().count() != 3 {
|
||||||
|
eprintln!("fatal: invalid object(s)");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
let name = split.next().unwrap();
|
||||||
|
let hash_path = split.next().unwrap();
|
||||||
|
let obj_type = split.next().unwrap();
|
||||||
|
|
||||||
|
let (dir, res) = hash_path.split_at(2);
|
||||||
|
let mut obj_path = path::objects();
|
||||||
|
obj_path.push(dir);
|
||||||
|
obj_path.push(res);
|
||||||
|
|
||||||
|
let path = match base_dir {
|
||||||
|
Some(dir) => dir.join(name),
|
||||||
|
None => PathBuf::from(name),
|
||||||
|
};
|
||||||
|
|
||||||
|
let root = path::repo_root();
|
||||||
|
let abs_path = root.join(path.clone());
|
||||||
|
|
||||||
|
let obj = Obj {
|
||||||
|
name: String::from(name),
|
||||||
|
obj_path,
|
||||||
|
obj_type: match obj_type {
|
||||||
|
"tree" => ObjType::TREE,
|
||||||
|
"blob" => ObjType::BLOB,
|
||||||
|
_ => ObjType::DEFAULT
|
||||||
|
},
|
||||||
|
file_path: abs_path,
|
||||||
|
relative_file_path: path.clone(),
|
||||||
|
repo_file_path: path,
|
||||||
|
hash_path: String::from(hash_path),
|
||||||
|
};
|
||||||
|
|
||||||
|
match obj.obj_type {
|
||||||
|
ObjType::TREE => Box::new(Tree::new(obj)),
|
||||||
|
ObjType::BLOB => Box::new(Blob::new(obj)),
|
||||||
|
ObjType::DEFAULT => Box::new(Tree::new(obj))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_head() -> Self {
|
||||||
|
Obj {
|
||||||
|
name: String::new(),
|
||||||
|
obj_path: head::path(),
|
||||||
|
obj_type: ObjType::TREE,
|
||||||
|
file_path: PathBuf::new(),
|
||||||
|
relative_file_path: PathBuf::new(),
|
||||||
|
repo_file_path: PathBuf::new(),
|
||||||
|
hash_path: String::new(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@@ -1,63 +1,113 @@
|
|||||||
use std::fs::File;
|
use crate::utils::into::IntoPathBuf;
|
||||||
use std::io::{self};
|
use crate::store::object::object::Obj;
|
||||||
use std::path::Path;
|
use crate::store::object::update_dates;
|
||||||
use crate::utils::{read, path};
|
use crate::store::object::object::ObjMethods;
|
||||||
use crate::store::head;
|
use std::fs::{self, File, OpenOptions};
|
||||||
use crate::store::object::{parse_path, hash_obj, add_node, create_obj};
|
use std::io::{self, BufRead, BufReader, Write};
|
||||||
|
|
||||||
pub fn add(path: &Path, date: &str) -> io::Result<()> {
|
pub struct Tree {
|
||||||
let (line, hash, name) = parse_path(path.clone(), false);
|
pub obj: Obj,
|
||||||
|
pub buf_reader: Option<BufReader<File>>,
|
||||||
|
is_head: bool,
|
||||||
|
}
|
||||||
|
|
||||||
// add tree reference to parent
|
|
||||||
if path.iter().count() == 1 {
|
impl Tree {
|
||||||
head::add_line(line)?;
|
pub fn new(obj: Obj) -> Self {
|
||||||
} else {
|
Tree {
|
||||||
add_node(path.parent().unwrap(), &line)?;
|
obj,
|
||||||
|
buf_reader: None,
|
||||||
|
is_head: false,
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_head() -> Self {
|
||||||
|
Tree {
|
||||||
|
obj: Obj::from_head(),
|
||||||
|
buf_reader: None,
|
||||||
|
is_head: true,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn from_path<S>(r_path: S) -> Tree where S: IntoPathBuf {
|
||||||
|
Tree {
|
||||||
|
obj: Obj::from_path(r_path.into()),
|
||||||
|
buf_reader: None,
|
||||||
|
is_head: false,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn read(&mut self) {
|
||||||
|
if self.buf_reader.is_none() {
|
||||||
|
if let Ok(file) = File::open(self.get_obj_path()) {
|
||||||
|
self.buf_reader = Some(BufReader::new(file));
|
||||||
|
|
||||||
|
// skip first line if is head
|
||||||
|
if !self.is_head {
|
||||||
|
self.next();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn next(&mut self) -> Option<Box<dyn ObjMethods>> {
|
||||||
|
self.read();
|
||||||
|
//if let Some(ref mut file) = self.buf_reader {
|
||||||
|
// let mut line = String::new();
|
||||||
|
// match file.read_line(&mut line) {
|
||||||
|
// Ok(0) => Ok(None), // End of file
|
||||||
|
// Ok(_) => Ok(Some(line.trim_end().len())), // Return length of line
|
||||||
|
// Err(e) => Err(e),
|
||||||
|
// }
|
||||||
|
//} else {
|
||||||
|
// Ok(None) // If file is None, return None
|
||||||
|
//}
|
||||||
|
match self.buf_reader {
|
||||||
|
Some(ref mut file) => {
|
||||||
|
let mut line = String::new();
|
||||||
|
match file.read_line(&mut line) {
|
||||||
|
Ok(0) => None,
|
||||||
|
Ok(_) => Some(Obj::from_line(line, Some(self.get_relative_file_path()))),
|
||||||
|
Err(e) => {
|
||||||
|
eprintln!("tree::next: failed to read next line: {}", e);
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
None => None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn create(&self, date: &str, up_parent: bool) -> io::Result<()> {
|
||||||
|
// add tree reference to parent
|
||||||
|
let _ = self.add_ref_to_parent();
|
||||||
|
|
||||||
// create tree object
|
// create tree object
|
||||||
let mut content = name;
|
let content = format!("{} {}", self.get_name(), date);
|
||||||
content.push_str(" ");
|
|
||||||
content.push_str(date);
|
// create parent dir if needed
|
||||||
create_obj(hash, &content)?;
|
let mut obj_path = self.get_obj_path();
|
||||||
|
obj_path.pop();
|
||||||
|
if !obj_path.exists() {
|
||||||
|
fs::create_dir_all(obj_path)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// open ref file
|
||||||
|
let mut file = OpenOptions::new()
|
||||||
|
.create_new(true)
|
||||||
|
.write(true)
|
||||||
|
.open(self.get_obj_path())?;
|
||||||
|
|
||||||
|
// update date for all parent
|
||||||
|
if up_parent {
|
||||||
|
if let Err(err) = update_dates(self.get_relative_file_path(), date) {
|
||||||
|
eprintln!("err: updating parent date of {}: {}", self.get_relative_file_path().display(), err);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
writeln!(file, "{}", content)?;
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
|
||||||
|
|
||||||
pub fn read(tree: String) -> Option<(String, io::Lines<io::BufReader<File>>)> {
|
|
||||||
let mut obj_p = match path::objects() {
|
|
||||||
Some(path) => path,
|
|
||||||
None => todo!(),
|
|
||||||
};
|
|
||||||
|
|
||||||
let (dir, res) = hash_obj(&tree);
|
|
||||||
obj_p.push(dir);
|
|
||||||
obj_p.push(res);
|
|
||||||
|
|
||||||
match read::read_lines(obj_p) {
|
|
||||||
Ok(mut reader) => {
|
|
||||||
let name = match reader.next() {
|
|
||||||
Some(Ok(line)) => line,
|
|
||||||
_ => String::from(""),
|
|
||||||
};
|
|
||||||
Some((name, reader))
|
|
||||||
},
|
|
||||||
Err(err) => {
|
|
||||||
eprintln!("error reading tree: {}", err);
|
|
||||||
None
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn parse_line(line: String) -> (String, String, String) {
|
|
||||||
let mut split = line.rsplit(' ');
|
|
||||||
if split.clone().count() != 3 {
|
|
||||||
eprintln!("fatal: invalid object(s)");
|
|
||||||
std::process::exit(1);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
let name = split.next().unwrap();
|
|
||||||
let hash = split.next().unwrap();
|
|
||||||
let ftype = split.next().unwrap();
|
|
||||||
(String::from(ftype), String::from(hash), String::from(name))
|
|
||||||
}
|
}
|
||||||
11
src/subcommands.rs
Normal file
11
src/subcommands.rs
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
pub mod init;
|
||||||
|
pub mod status;
|
||||||
|
pub mod add;
|
||||||
|
pub mod reset;
|
||||||
|
pub mod clone;
|
||||||
|
pub mod push;
|
||||||
|
pub mod config;
|
||||||
|
pub mod remote_diff;
|
||||||
|
pub mod pull;
|
||||||
|
pub mod remote;
|
||||||
|
pub mod credential;
|
||||||
40
src/subcommands/add.rs
Normal file
40
src/subcommands/add.rs
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
use clap::{Arg, ArgMatches, Command};
|
||||||
|
|
||||||
|
use crate::commands;
|
||||||
|
use crate::commands::add::AddArgs;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("add")
|
||||||
|
.arg(
|
||||||
|
Arg::new("files")
|
||||||
|
.required(true)
|
||||||
|
.conflicts_with("all")
|
||||||
|
.num_args(1..)
|
||||||
|
.value_name("FILE")
|
||||||
|
.help("Files to add"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("force")
|
||||||
|
.short('f')
|
||||||
|
.long("force")
|
||||||
|
.help("Allow adding otherwise ignored files."),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("all")
|
||||||
|
.short('A')
|
||||||
|
.long("all")
|
||||||
|
.help("This adds, modifies, and removes index entries to match the working tree"),
|
||||||
|
)
|
||||||
|
.about("Add changes to the index")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
commands::add::add(AddArgs {
|
||||||
|
files: match args.get_many::<String>("files") {
|
||||||
|
None => vec![],
|
||||||
|
Some(vals) => vals.map(|s| s.to_string()).collect(),
|
||||||
|
},
|
||||||
|
force: args.contains_id("force"),
|
||||||
|
all: args.contains_id("all"),
|
||||||
|
});
|
||||||
|
}
|
||||||
52
src/subcommands/clone.rs
Normal file
52
src/subcommands/clone.rs
Normal file
@@ -0,0 +1,52 @@
|
|||||||
|
use clap::{Arg, Command, ArgMatches};
|
||||||
|
// use textwrap::{fill, Options};
|
||||||
|
|
||||||
|
use crate::commands::clone::CloneArgs;
|
||||||
|
use crate::global;
|
||||||
|
use crate::commands;
|
||||||
|
|
||||||
|
// fn sized_str<'a>(content: &'a str) -> &'a str {
|
||||||
|
// fill(content, Options::new(70).width).as_str();
|
||||||
|
// "ok"
|
||||||
|
// }
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
// let remote_desc = sized_str(&format!("The repository to clone from. See the NEXTSYNC URLS section below for more information on specifying repositories."));
|
||||||
|
// let depth_desc = sized_str(&format!("Depth of the recursive fetch of object properties. This value should be lower when there are a lot of files per directory and higher when there are a lot of subdirectories with fewer files. (Default: {})", clone::DEPTH));
|
||||||
|
Command::new("clone")
|
||||||
|
.arg(
|
||||||
|
Arg::new("remote")
|
||||||
|
.required(true)
|
||||||
|
.num_args(1)
|
||||||
|
.value_name("REMOTE")
|
||||||
|
//.help(_desc)
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("depth")
|
||||||
|
.short('d')
|
||||||
|
.long("depth")
|
||||||
|
.required(false)
|
||||||
|
.num_args(1)
|
||||||
|
//.help(&depth_desc)
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("directory")
|
||||||
|
.required(false)
|
||||||
|
.num_args(1)
|
||||||
|
.value_name("DIRECTORY")
|
||||||
|
)
|
||||||
|
.about("Clone a repository into a new directory")
|
||||||
|
.after_help("NEXTSYNC URLS\nThe following syntaxes may be used:\n\t- user@host.xz/path/to/repo\n\t- http[s]://host.xz/apps/files/?dir=/path/to/repo&fileid=111111\n\t- [http[s]://]host.xz/remote.php/dav/files/user/path/to/repo\n")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
if let Some(val) = args.get_one::<String>("directory") {
|
||||||
|
global::global::set_dir_path(String::from(val.to_string()));
|
||||||
|
}
|
||||||
|
if let Some(remote) = args.get_one::<String>("remote") {
|
||||||
|
commands::clone::clone(CloneArgs {
|
||||||
|
remote: remote.to_string(),
|
||||||
|
depth: args.get_one::<String>("depth").cloned(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}
|
||||||
48
src/subcommands/config.rs
Normal file
48
src/subcommands/config.rs
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
use clap::{Arg, Command, ArgMatches};
|
||||||
|
use crate::commands::config::ConfigSetArgs;
|
||||||
|
|
||||||
|
use crate::commands;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("config")
|
||||||
|
.about("Get and set repository or global options")
|
||||||
|
.subcommand(
|
||||||
|
Command::new("get")
|
||||||
|
.about("Get the value of a configuration variable")
|
||||||
|
.arg(
|
||||||
|
Arg::new("name")
|
||||||
|
.help("The name of the configuration variable")
|
||||||
|
.required(true)
|
||||||
|
.index(1)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.subcommand(
|
||||||
|
Command::new("set")
|
||||||
|
.about("Set a configuration variable")
|
||||||
|
.arg(
|
||||||
|
Arg::new("name")
|
||||||
|
.help("The name of the configuration variable")
|
||||||
|
.required(true)
|
||||||
|
.index(1)
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("value")
|
||||||
|
.help("The value to set")
|
||||||
|
.required(true)
|
||||||
|
.index(2)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
|
||||||
|
match args.subcommand() {
|
||||||
|
Some(("set", set_matches)) => {
|
||||||
|
commands::config::config_set(ConfigSetArgs {
|
||||||
|
name: set_matches.get_one::<String>("name").unwrap().to_string(),
|
||||||
|
value: set_matches.get_one::<String>("value").unwrap().to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => println!("Invalid or missing subcommand for 'config'"),
|
||||||
|
}
|
||||||
|
}
|
||||||
39
src/subcommands/credential.rs
Normal file
39
src/subcommands/credential.rs
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
use clap::{Arg, Command, ArgMatches};
|
||||||
|
|
||||||
|
use crate::commands;
|
||||||
|
use crate::commands::credential::CredentialArgs;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("credential")
|
||||||
|
.about("Manage set of credentials")
|
||||||
|
.subcommand(
|
||||||
|
Command::new("add")
|
||||||
|
.arg(
|
||||||
|
Arg::new("username")
|
||||||
|
.required(true)
|
||||||
|
.num_args(1)
|
||||||
|
.value_name("NAME")
|
||||||
|
.help("The username used to connect to nextcloud"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("password")
|
||||||
|
.required(false)
|
||||||
|
.num_args(1)
|
||||||
|
.value_name("PASSWORD")
|
||||||
|
.help("The passowd used to connect to nextcloud (optional)"),
|
||||||
|
)
|
||||||
|
.about("Add a new set of credential")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
match args.subcommand() {
|
||||||
|
Some(("add", add_matches)) => {
|
||||||
|
commands::credential::credential_add(CredentialArgs {
|
||||||
|
username: add_matches.get_one::<String>("username").unwrap().to_string(),
|
||||||
|
password: add_matches.get_one::<String>("password").cloned(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => println!("Invalid or missing subcommand for 'credential'"),
|
||||||
|
}
|
||||||
|
}
|
||||||
23
src/subcommands/init.rs
Normal file
23
src/subcommands/init.rs
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
use clap::{Arg, Command, ArgMatches};
|
||||||
|
|
||||||
|
use crate::global;
|
||||||
|
use crate::commands;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("init")
|
||||||
|
.arg(
|
||||||
|
Arg::new("directory")
|
||||||
|
.required(false)
|
||||||
|
.num_args(1)
|
||||||
|
.value_name("DIRECTORY")
|
||||||
|
)
|
||||||
|
.about("Create an empty Nextsync repository")
|
||||||
|
// Create an empty nextsync repository or reinitialize an existing one
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
if let Some(val) = args.get_one::<String>("directory") {
|
||||||
|
global::global::set_dir_path(val.to_string());
|
||||||
|
}
|
||||||
|
commands::init::init();
|
||||||
|
}
|
||||||
23
src/subcommands/pull.rs
Normal file
23
src/subcommands/pull.rs
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
use clap::{Arg, Command, ArgMatches};
|
||||||
|
|
||||||
|
use crate::global;
|
||||||
|
use crate::commands;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("pull")
|
||||||
|
.arg(
|
||||||
|
Arg::new("path")
|
||||||
|
.required(false)
|
||||||
|
.num_args(1)
|
||||||
|
.value_name("PATH")
|
||||||
|
.help("The path to pull."),
|
||||||
|
)
|
||||||
|
.about("Fetch and integrate changes from the nextcloud server.")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
if let Some(val) = args.get_one::<String>("path") {
|
||||||
|
global::global::set_dir_path(val.to_string());
|
||||||
|
}
|
||||||
|
commands::pull::pull();
|
||||||
|
}
|
||||||
6
src/subcommands/push.rs
Normal file
6
src/subcommands/push.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
use clap::Command;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("push")
|
||||||
|
.about("Push changes on nextcloud")
|
||||||
|
}
|
||||||
45
src/subcommands/remote.rs
Normal file
45
src/subcommands/remote.rs
Normal file
@@ -0,0 +1,45 @@
|
|||||||
|
use clap::{Arg, Command, ArgMatches};
|
||||||
|
|
||||||
|
use crate::commands;
|
||||||
|
use crate::commands::remote::RemoteArgs;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("remote")
|
||||||
|
.about("Manage set of tracked repositories")
|
||||||
|
.subcommand(
|
||||||
|
Command::new("add")
|
||||||
|
.arg(
|
||||||
|
Arg::new("name")
|
||||||
|
.required(true)
|
||||||
|
.index(1)
|
||||||
|
.help("The name of the remote"),
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("url")
|
||||||
|
.required(true)
|
||||||
|
.index(2)
|
||||||
|
.help("The url of the remote"),
|
||||||
|
)
|
||||||
|
.about("Add a new remote to this repository")
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("verbose")
|
||||||
|
.short('v')
|
||||||
|
.long("verbose")
|
||||||
|
.help("Be a little more verbose and show remote url after name.")
|
||||||
|
)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
match args.subcommand() {
|
||||||
|
Some(("add", add_matches)) => {
|
||||||
|
commands::remote::remote_add(RemoteArgs {
|
||||||
|
name: add_matches.get_one::<String>("name").unwrap().to_string(),
|
||||||
|
url: add_matches.get_one::<String>("url").unwrap().to_string(),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
commands::remote::remote_list(args.contains_id("verbose"));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
24
src/subcommands/remote_diff.rs
Normal file
24
src/subcommands/remote_diff.rs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
use clap::{Arg, Command, ArgMatches};
|
||||||
|
|
||||||
|
use crate::global;
|
||||||
|
use crate::commands;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("remote-diff")
|
||||||
|
.arg(
|
||||||
|
Arg::new("path")
|
||||||
|
.required(false)
|
||||||
|
.num_args(1)
|
||||||
|
.value_name("PATH")
|
||||||
|
.help("The path to pull."),
|
||||||
|
)
|
||||||
|
.about("Fetch changes from the nextcloud server.")
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
if let Some(val) = args.get_one::<String>("path") {
|
||||||
|
global::global::set_dir_path(val.to_string());
|
||||||
|
}
|
||||||
|
commands::remote_diff::remote_diff();
|
||||||
|
}
|
||||||
6
src/subcommands/reset.rs
Normal file
6
src/subcommands/reset.rs
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
use clap::Command;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("reset")
|
||||||
|
.about("Clear the index")
|
||||||
|
}
|
||||||
30
src/subcommands/status.rs
Normal file
30
src/subcommands/status.rs
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
use clap::{Arg, Command, ArgMatches};
|
||||||
|
|
||||||
|
use crate::global;
|
||||||
|
use crate::commands;
|
||||||
|
use crate::commands::status::StatusArgs;
|
||||||
|
|
||||||
|
pub fn create() -> Command {
|
||||||
|
Command::new("status")
|
||||||
|
.arg(
|
||||||
|
Arg::new("directory")
|
||||||
|
.num_args(1)
|
||||||
|
.value_name("DIRECTORY")
|
||||||
|
)
|
||||||
|
.arg(
|
||||||
|
Arg::new("nostyle")
|
||||||
|
.long("nostyle")
|
||||||
|
.help("Status with minium information and style"),
|
||||||
|
)
|
||||||
|
.about("Show the working tree status")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn handler(args: &ArgMatches) {
|
||||||
|
if let Some(val) = args.get_one::<String>("directory") {
|
||||||
|
global::global::set_dir_path(val.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
|
commands::status::status(StatusArgs {
|
||||||
|
nostyle: args.contains_id("nostyle"),
|
||||||
|
});
|
||||||
|
}
|
||||||
@@ -3,3 +3,5 @@ pub mod read;
|
|||||||
pub mod nextsyncignore;
|
pub mod nextsyncignore;
|
||||||
pub mod api;
|
pub mod api;
|
||||||
pub mod time;
|
pub mod time;
|
||||||
|
pub mod remote;
|
||||||
|
pub mod into;
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
use crate::commands::{clone::get_url_props, config};
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct ApiProps {
|
pub struct ApiProps {
|
||||||
pub host: String, // nextcloud.example.com
|
pub host: String, // nextcloud.example.com
|
||||||
@@ -15,11 +17,30 @@ impl Clone for ApiProps {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_api_props() -> ApiProps {
|
||||||
|
let remote = match config::get_remote("origin") {
|
||||||
|
Some(r) => r,
|
||||||
|
None => {
|
||||||
|
eprintln!("fatal: unable to find a remote");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let (host, username, root) = get_url_props(&remote);
|
||||||
|
ApiProps {
|
||||||
|
host,
|
||||||
|
username: username.unwrap().to_owned(),
|
||||||
|
root: root.to_owned(),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn get_relative_s(p: String, api_props: &ApiProps) -> String {
|
pub fn get_relative_s(p: String, api_props: &ApiProps) -> String {
|
||||||
let mut final_p = p.clone();
|
let mut final_p = p.clone();
|
||||||
final_p = final_p.strip_prefix("/remote.php/dav/files/").unwrap().to_string();
|
final_p = final_p.strip_prefix("/remote.php/dav/files/").unwrap().to_string();
|
||||||
final_p = final_p.strip_prefix(&api_props.username).unwrap().to_string();
|
final_p = final_p.strip_prefix(&api_props.username).unwrap().to_string();
|
||||||
final_p = final_p.strip_prefix(&api_props.root).unwrap().to_string();
|
final_p = final_p.strip_prefix(&api_props.root).unwrap().to_string();
|
||||||
|
if final_p.starts_with("/") {
|
||||||
final_p = final_p.strip_prefix("/").unwrap().to_string();
|
final_p = final_p.strip_prefix("/").unwrap().to_string();
|
||||||
|
}
|
||||||
final_p
|
final_p
|
||||||
}
|
}
|
||||||
|
|||||||
24
src/utils/into.rs
Normal file
24
src/utils/into.rs
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
use std::path::{PathBuf, Path};
|
||||||
|
|
||||||
|
pub trait IntoPathBuf {
|
||||||
|
fn into(self) -> PathBuf;
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoPathBuf for PathBuf {
|
||||||
|
fn into(self) -> PathBuf {
|
||||||
|
self
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoPathBuf for &Path {
|
||||||
|
fn into(self) -> PathBuf {
|
||||||
|
PathBuf::from(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl IntoPathBuf for String {
|
||||||
|
fn into(self) -> PathBuf {
|
||||||
|
PathBuf::from(self)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@@ -23,6 +23,13 @@ pub fn read_lines() -> Result<Vec<String>, ()> {
|
|||||||
Ok(vec![])
|
Ok(vec![])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_rules() -> Vec<String> {
|
||||||
|
match read_lines() {
|
||||||
|
Ok(r) => r,
|
||||||
|
Err(_) => vec![],
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn _ignore_files(files: &mut Vec<String>) -> (bool, Vec<String>) {
|
pub fn _ignore_files(files: &mut Vec<String>) -> (bool, Vec<String>) {
|
||||||
let mut ignored_f = vec![];
|
let mut ignored_f = vec![];
|
||||||
if let Ok(lines) = read_lines() {
|
if let Ok(lines) = read_lines() {
|
||||||
@@ -80,6 +87,7 @@ pub fn ignore_file(path: &String, lines: Vec<String>, ignored_f: &mut Vec<String
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use super::*;
|
use super::*;
|
||||||
|
use std::io::Cursor;
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_ignore_files() {
|
fn test_ignore_files() {
|
||||||
|
|||||||
@@ -1,8 +1,61 @@
|
|||||||
use std::env;
|
use std::env;
|
||||||
use std::fs::canonicalize;
|
use std::fs::canonicalize;
|
||||||
use std::path::{PathBuf, Path};
|
use std::path::{PathBuf, Path, Component};
|
||||||
|
|
||||||
use crate::global::global::DIR_PATH;
|
use crate::global::global::DIR_PATH;
|
||||||
|
|
||||||
|
/// Improve the path to try remove and solve .. token.
|
||||||
|
/// Taken from https://stackoverflow.com/questions/68231306/stdfscanonicalize-for-files-that-dont-exist
|
||||||
|
///
|
||||||
|
/// This assumes that `a/b/../c` is `a/c` which might be different from
|
||||||
|
/// what the OS would have chosen when b is a link. This is OK
|
||||||
|
/// for broot verb arguments but can't be generally used elsewhere
|
||||||
|
///
|
||||||
|
/// This function ensures a given path ending with '/' still
|
||||||
|
/// ends with '/' after normalization.
|
||||||
|
pub fn normalize_path<P: AsRef<Path>>(path: P) -> PathBuf {
|
||||||
|
let ends_with_slash = path.as_ref()
|
||||||
|
.to_str()
|
||||||
|
.map_or(false, |s| s.ends_with('/'));
|
||||||
|
let mut normalized = PathBuf::new();
|
||||||
|
for component in path.as_ref().components() {
|
||||||
|
match &component {
|
||||||
|
Component::ParentDir => {
|
||||||
|
if !normalized.pop() {
|
||||||
|
normalized.push(component);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
_ => {
|
||||||
|
normalized.push(component);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if ends_with_slash {
|
||||||
|
normalized.push("");
|
||||||
|
}
|
||||||
|
normalized
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn normalize_relative(file: &str) -> Result<String, String> {
|
||||||
|
let current = match current() {
|
||||||
|
Some(p) => p,
|
||||||
|
None => {
|
||||||
|
return Err("cannot find current location".to_owned());
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
let p = {
|
||||||
|
let tmp_p = current.join(PathBuf::from(file));
|
||||||
|
normalize_path(tmp_p)
|
||||||
|
};
|
||||||
|
|
||||||
|
let relative_p = match p.strip_prefix(repo_root()) {
|
||||||
|
Ok(p) => p,
|
||||||
|
Err(_) => return Err("is not in a nextsync repo or doesn't exist".to_owned()),
|
||||||
|
};
|
||||||
|
Ok(relative_p.to_str().unwrap().to_owned())
|
||||||
|
}
|
||||||
|
|
||||||
pub fn current() -> Option<PathBuf> {
|
pub fn current() -> Option<PathBuf> {
|
||||||
let d = DIR_PATH.lock().unwrap();
|
let d = DIR_PATH.lock().unwrap();
|
||||||
|
|
||||||
@@ -23,7 +76,7 @@ pub fn current() -> Option<PathBuf> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nextsync_root() -> Option<PathBuf> {
|
pub fn repo_root_without_err() -> Option<PathBuf> {
|
||||||
let mut path = current()?;
|
let mut path = current()?;
|
||||||
|
|
||||||
let root = loop {
|
let root = loop {
|
||||||
@@ -41,32 +94,61 @@ pub fn nextsync_root() -> Option<PathBuf> {
|
|||||||
root
|
root
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn repo_root() -> PathBuf {
|
||||||
pub fn nextsync() -> Option<PathBuf> {
|
match repo_root_without_err() {
|
||||||
if let Some(mut path) = nextsync_root() {
|
Some(p) => p,
|
||||||
path.push(".nextsync");
|
None => {
|
||||||
return Some(path);
|
eprintln!("fatal: not a nextsync repository (or any of the parent directories): .nextsync");
|
||||||
|
std::process::exit(1);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
None
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn objects() -> Option<PathBuf> {
|
pub fn is_nextsync_config(path: PathBuf) -> bool {
|
||||||
if let Some(mut path) = nextsync_root() {
|
path.ends_with(".nextsync") || path.starts_with(".nextsync")
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn nextsync() -> PathBuf {
|
||||||
|
let mut path = repo_root();
|
||||||
|
path.push(".nextsync");
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn config() -> PathBuf {
|
||||||
|
let mut path = repo_root();
|
||||||
|
path.push(".nextsync");
|
||||||
|
path.push("config");
|
||||||
|
path
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn objects() -> PathBuf {
|
||||||
|
let mut path = repo_root();
|
||||||
path.push(".nextsync");
|
path.push(".nextsync");
|
||||||
path.push("objects");
|
path.push("objects");
|
||||||
return Some(path);
|
path
|
||||||
}
|
}
|
||||||
None
|
|
||||||
|
pub fn refs() -> PathBuf {
|
||||||
|
let mut path = repo_root();
|
||||||
|
path.push(".nextsync");
|
||||||
|
path.push("refs");
|
||||||
|
path
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn nextsyncignore() -> Option<PathBuf> {
|
pub fn nextsyncignore() -> Option<PathBuf> {
|
||||||
if let Some(mut path) = nextsync_root() {
|
let mut path = repo_root();
|
||||||
path.push(".nextsyncignore");
|
path.push(".nextsyncignore");
|
||||||
if path.exists() {
|
if path.exists() {
|
||||||
return Some(path);
|
Some(path)
|
||||||
} else {
|
} else {
|
||||||
return None;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn path_buf_to_string(p: PathBuf) -> String {
|
||||||
|
if let Some(str) = p.to_str() {
|
||||||
|
str.to_string()
|
||||||
|
} else {
|
||||||
|
String::new()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
258
src/utils/remote.rs
Normal file
258
src/utils/remote.rs
Normal file
@@ -0,0 +1,258 @@
|
|||||||
|
use std::path::PathBuf;
|
||||||
|
use crate::{services::{req_props::ObjProps, api::ApiError}, store::object::{blob::Blob, Object}, commands::status::State};
|
||||||
|
use std::collections::HashMap;
|
||||||
|
|
||||||
|
use super::{path::{path_buf_to_string, self}, read};
|
||||||
|
|
||||||
|
pub struct EnumerateOptions {
|
||||||
|
pub depth: Option<String>,
|
||||||
|
pub relative_s: Option<String>,
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn enumerate_remote(
|
||||||
|
req: impl Fn(&str) -> Result<Vec<ObjProps>, ApiError>,
|
||||||
|
should_skip: Option<&dyn Fn(ObjProps) -> bool>,
|
||||||
|
options: EnumerateOptions
|
||||||
|
) -> (Vec<ObjProps>, Vec<ObjProps>) {
|
||||||
|
|
||||||
|
let mut folders: Vec<ObjProps> = vec![ObjProps::new()];
|
||||||
|
let mut all_folders: Vec<ObjProps> = vec![];
|
||||||
|
let mut deleted: Vec<PathBuf> = vec![];
|
||||||
|
let mut files: Vec<ObjProps> = vec![];
|
||||||
|
let mut objs_hashmap: HashMap<String, Vec<String>> = HashMap::new();
|
||||||
|
|
||||||
|
objs_hashmap.insert(
|
||||||
|
options.relative_s.clone().unwrap_or(String::new()),
|
||||||
|
Vec::new());
|
||||||
|
|
||||||
|
while folders.len() > 0 {
|
||||||
|
let folder = folders.pop().unwrap();
|
||||||
|
|
||||||
|
let relative_s = match folder.relative_s {
|
||||||
|
Some(relative_s) => relative_s,
|
||||||
|
None => options.relative_s.clone().unwrap_or(String::new())
|
||||||
|
};
|
||||||
|
|
||||||
|
// request folder content
|
||||||
|
let res = req(relative_s.as_str());
|
||||||
|
|
||||||
|
let objs = match res {
|
||||||
|
Ok(o) => o,
|
||||||
|
Err(ApiError::IncorrectRequest(err)) => {
|
||||||
|
eprintln!("fatal: {}", err.status());
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::EmptyError(_)) => {
|
||||||
|
eprintln!("Failed to get body");
|
||||||
|
vec![]
|
||||||
|
}
|
||||||
|
Err(ApiError::RequestError(err)) => {
|
||||||
|
eprintln!("fatal: {}", err);
|
||||||
|
std::process::exit(1);
|
||||||
|
},
|
||||||
|
Err(ApiError::Unexpected(_)) => todo!()
|
||||||
|
};
|
||||||
|
|
||||||
|
// separate folders and files in response
|
||||||
|
let d = options.depth.clone()
|
||||||
|
.unwrap_or("0".to_owned())
|
||||||
|
.parse::<u16>()
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
// first element is not used as it is the fetched folder
|
||||||
|
if let Some(should_skip_fct) = should_skip.clone() {
|
||||||
|
iter_with_skip_fct(
|
||||||
|
objs,
|
||||||
|
d,
|
||||||
|
&mut files,
|
||||||
|
&mut folders,
|
||||||
|
should_skip_fct,
|
||||||
|
&mut objs_hashmap,
|
||||||
|
&mut all_folders);
|
||||||
|
|
||||||
|
// check for deletion only when folder are not empty
|
||||||
|
// as the folder's content may not have been fetched yet
|
||||||
|
for (key, children) in objs_hashmap.clone() {
|
||||||
|
if children.len() != 0 {
|
||||||
|
get_deleted(key.clone(), children, &mut deleted);
|
||||||
|
objs_hashmap.remove(&key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
iter_without_skip_fct(
|
||||||
|
objs,
|
||||||
|
d,
|
||||||
|
&mut files,
|
||||||
|
&mut folders,
|
||||||
|
&mut all_folders);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// go through all folders not checked for deletion before
|
||||||
|
// as they were empty
|
||||||
|
if let Some(_) = should_skip.clone() {
|
||||||
|
for (key, children) in objs_hashmap.clone() {
|
||||||
|
get_deleted(key.clone(), children, &mut deleted);
|
||||||
|
objs_hashmap.remove(&key);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
(all_folders, files)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn calc_depth(obj: &ObjProps) -> u16 {
|
||||||
|
let path = obj.relative_s.clone().unwrap_or(String::new());
|
||||||
|
path.split("/").count() as u16
|
||||||
|
}
|
||||||
|
|
||||||
|
fn iter_with_skip_fct(
|
||||||
|
objs: Vec<ObjProps>,
|
||||||
|
d: u16,
|
||||||
|
files: &mut Vec<ObjProps>,
|
||||||
|
folders: &mut Vec<ObjProps>,
|
||||||
|
should_skip: &dyn Fn(ObjProps) -> bool,
|
||||||
|
objs_hashmap: &mut HashMap<String, Vec<String>>,
|
||||||
|
all_folders: &mut Vec<ObjProps>) {
|
||||||
|
|
||||||
|
let mut iter = objs.iter();
|
||||||
|
let default_depth = calc_depth(iter.next().unwrap());
|
||||||
|
let mut skip_depth = 0;
|
||||||
|
|
||||||
|
for object in iter {
|
||||||
|
let current_depth = calc_depth(object);
|
||||||
|
|
||||||
|
if object.is_dir() {
|
||||||
|
// add folder to parent folder only if exists
|
||||||
|
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
|
||||||
|
r_path.pop();
|
||||||
|
let r_ps = path_buf_to_string(r_path);
|
||||||
|
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
|
||||||
|
values.push(object.relative_s.clone().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip children of skiped folder
|
||||||
|
if skip_depth != 0 && skip_depth < current_depth {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let should_skip = should_skip(object.clone());
|
||||||
|
if should_skip {
|
||||||
|
skip_depth = current_depth;
|
||||||
|
} else {
|
||||||
|
// if this folder is not skipped then we initialised its vector
|
||||||
|
let r_ps_dir = object.relative_s.clone().unwrap();
|
||||||
|
let mut r_ps_key = r_ps_dir.chars();
|
||||||
|
r_ps_key.next_back();
|
||||||
|
objs_hashmap.insert(r_ps_key.as_str().to_owned(), Vec::new());
|
||||||
|
|
||||||
|
skip_depth = 0;
|
||||||
|
all_folders.push(object.clone());
|
||||||
|
}
|
||||||
|
|
||||||
|
// should get content of this folder if it is not already in this reponse
|
||||||
|
if current_depth - default_depth == d && !should_skip {
|
||||||
|
folders.push(object.clone());
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// add file to parent folder only if exists
|
||||||
|
let mut r_path = PathBuf::from(object.relative_s.clone().unwrap());
|
||||||
|
r_path.pop();
|
||||||
|
let r_ps = path_buf_to_string(r_path);
|
||||||
|
if let Some(values) = objs_hashmap.get_mut(&r_ps.clone()) {
|
||||||
|
values.push(object.relative_s.clone().unwrap());
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip children of skiped folder
|
||||||
|
if skip_depth != 0 && skip_depth < current_depth {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if !should_skip(object.clone()) {
|
||||||
|
skip_depth = 0;
|
||||||
|
files.push(object.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn iter_without_skip_fct(
|
||||||
|
objs: Vec<ObjProps>,
|
||||||
|
d: u16,
|
||||||
|
files: &mut Vec<ObjProps>,
|
||||||
|
folders: &mut Vec<ObjProps>,
|
||||||
|
all_folders: &mut Vec<ObjProps>) {
|
||||||
|
|
||||||
|
let mut iter = objs.iter();
|
||||||
|
let default_depth = calc_depth(iter.next().unwrap());
|
||||||
|
|
||||||
|
for object in iter {
|
||||||
|
if object.is_dir() {
|
||||||
|
// should get content of this folder if it is not already in this reponse
|
||||||
|
let current_depth = calc_depth(object);
|
||||||
|
if current_depth - default_depth == d {
|
||||||
|
folders.push(object.clone());
|
||||||
|
}
|
||||||
|
all_folders.push(object.clone());
|
||||||
|
} else {
|
||||||
|
files.push(object.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_non_new_local_element(iter: &mut dyn Iterator<Item = &PathBuf>) -> Option<PathBuf> {
|
||||||
|
let mut el = iter.next();
|
||||||
|
while !el.is_none() && {
|
||||||
|
if el.unwrap().is_dir() {
|
||||||
|
// ignore newly created directory (not sync)
|
||||||
|
!Object::new(el.unwrap().clone().to_str().unwrap()).exists()
|
||||||
|
} else {
|
||||||
|
// ignore newly created file (not sync)
|
||||||
|
Blob::from_path(el.unwrap().clone()).status(&mut None) == State::New
|
||||||
|
}
|
||||||
|
} {
|
||||||
|
el = iter.next();
|
||||||
|
}
|
||||||
|
match el {
|
||||||
|
Some(e) => Some(e.to_owned()),
|
||||||
|
None => None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn get_deleted(source: String, children: Vec<String>, deleted: &mut Vec<PathBuf>) {
|
||||||
|
let root = path::repo_root();
|
||||||
|
let abs_p = root.join(PathBuf::from(source.clone()));
|
||||||
|
|
||||||
|
let folder_read = read::read_folder(abs_p.clone());
|
||||||
|
if let Ok(mut local_objs) = folder_read {
|
||||||
|
// set path to be ref one not abs
|
||||||
|
local_objs.iter_mut().for_each(|e| {
|
||||||
|
*e = e.strip_prefix(path_buf_to_string(root.clone())).unwrap().to_path_buf();
|
||||||
|
});
|
||||||
|
|
||||||
|
let mut iter = local_objs.iter();
|
||||||
|
let mut local_element = get_non_new_local_element(&mut iter);
|
||||||
|
|
||||||
|
while let Some(local) = local_element {
|
||||||
|
if let None = children.iter().position(|child| {
|
||||||
|
let child_compared = {
|
||||||
|
// remove traling / of directory
|
||||||
|
if child.ends_with("/") {
|
||||||
|
let t = child.clone();
|
||||||
|
let mut ts = t.chars();
|
||||||
|
ts.next_back();
|
||||||
|
ts.as_str().to_owned()
|
||||||
|
} else {
|
||||||
|
child.clone()
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
child_compared == path_buf_to_string(local.clone())
|
||||||
|
}) {
|
||||||
|
deleted.push(local.clone());
|
||||||
|
}
|
||||||
|
local_element = get_non_new_local_element(&mut iter);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
97
tests/add.rs
Normal file
97
tests/add.rs
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
use std::str;
|
||||||
|
|
||||||
|
mod utils;
|
||||||
|
use utils::{utils::*, client::ClientTest};
|
||||||
|
|
||||||
|
fn line_should_contains(lines: &Vec<String>, nb: usize, str: &str) {
|
||||||
|
|
||||||
|
if lines[nb].find(str).is_none()
|
||||||
|
{
|
||||||
|
eprintln!("'{}' not found in '{}'", str, lines[nb]);
|
||||||
|
dbg!(lines);
|
||||||
|
}
|
||||||
|
|
||||||
|
assert!(lines[nb].find(str).is_some());
|
||||||
|
}
|
||||||
|
|
||||||
|
fn lines_should_not_contains(lines: Vec<String>, str: &str) {
|
||||||
|
|
||||||
|
for line in lines {
|
||||||
|
if line.find("Changes not staged for push").is_some() {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if line.find(str).is_some() {
|
||||||
|
eprintln!("'{}' found in '{}'", str, line);
|
||||||
|
}
|
||||||
|
assert!(line.find(str).is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn collect_status_lines(client: &mut ClientTest) -> Vec<String> {
|
||||||
|
let out = client.run_cmd("status");
|
||||||
|
|
||||||
|
str::from_utf8(&out.stdout)
|
||||||
|
.unwrap()
|
||||||
|
.split("\n")
|
||||||
|
.map(|s| s.to_owned())
|
||||||
|
.collect()
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod add_tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simple_add() {
|
||||||
|
let id = get_random_test_id();
|
||||||
|
let mut client = ClientTest::new(id).init();
|
||||||
|
|
||||||
|
let _ = client.add_file("file1", "foo");
|
||||||
|
client.run_cmd_ok("add file1");
|
||||||
|
|
||||||
|
let lines = collect_status_lines(&mut client);
|
||||||
|
|
||||||
|
// test
|
||||||
|
line_should_contains(&lines, 2, "file1");
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add_config_file() {
|
||||||
|
let id = get_random_test_id();
|
||||||
|
let mut client = ClientTest::new(id).init();
|
||||||
|
|
||||||
|
let _ = client.add_file("file1", "foo");
|
||||||
|
client.run_cmd_ok("add .nextsync -f");
|
||||||
|
|
||||||
|
let lines = collect_status_lines(&mut client);
|
||||||
|
|
||||||
|
// test
|
||||||
|
lines_should_not_contains(lines, ".nextsync");
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn add_dir_implicit() {
|
||||||
|
let id = get_random_test_id();
|
||||||
|
let mut client = ClientTest::new(id).init();
|
||||||
|
|
||||||
|
let _ = client.add_dir("dir");
|
||||||
|
let _ = client.add_file("dir/file1", "foo");
|
||||||
|
|
||||||
|
// adding the file should add the dir
|
||||||
|
client.run_cmd_ok("add dir/file1");
|
||||||
|
|
||||||
|
let lines = collect_status_lines(&mut client);
|
||||||
|
|
||||||
|
// tests
|
||||||
|
line_should_contains(&lines, 2, "dir");
|
||||||
|
line_should_contains(&lines, 3, "dir/file1");
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
44
tests/pull.rs
Normal file
44
tests/pull.rs
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
mod utils;
|
||||||
|
use utils::{utils::*, server::ServerTest, client::ClientTest};
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod pull_tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simple_pull() {
|
||||||
|
let id = get_random_test_id();
|
||||||
|
let mut server = ServerTest::new(id.clone());
|
||||||
|
server.init();
|
||||||
|
let mut client = ClientTest::new(id).init();
|
||||||
|
|
||||||
|
let _ = server.add_file("file1", "foo");
|
||||||
|
client.run_cmd_ok("pull");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(client.has_file("file1", "foo"));
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
server.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simple_pull_directory() {
|
||||||
|
let id = get_random_test_id();
|
||||||
|
let mut server = ServerTest::new(id.clone());
|
||||||
|
server.init();
|
||||||
|
let mut client = ClientTest::new(id).init();
|
||||||
|
|
||||||
|
let _ = server.add_dir("dir");
|
||||||
|
let _ = server.add_file("dir/file1", "foo");
|
||||||
|
|
||||||
|
client.run_cmd_ok("pull");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(client.has_file("dir/file1", "foo"));
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
server.clean();
|
||||||
|
}
|
||||||
|
}
|
||||||
164
tests/push.rs
Normal file
164
tests/push.rs
Normal file
@@ -0,0 +1,164 @@
|
|||||||
|
|
||||||
|
mod utils;
|
||||||
|
use utils::{utils::*, status_utils::*, server::ServerTest, client::ClientTest};
|
||||||
|
|
||||||
|
fn init_test() -> (ClientTest, ServerTest) {
|
||||||
|
let id = get_random_test_id();
|
||||||
|
let mut server = ServerTest::new(id.clone());
|
||||||
|
server.init();
|
||||||
|
let client = ClientTest::new(id).init();
|
||||||
|
(client, server)
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
mod push_tests {
|
||||||
|
use super::*;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn simple_push() {
|
||||||
|
let (mut client, mut server) = init_test();
|
||||||
|
|
||||||
|
let _ = client.add_file("file1", "foo");
|
||||||
|
client.run_cmd_ok("add file1");
|
||||||
|
client.run_cmd_ok("push");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(server.has_file("file1", "foo"));
|
||||||
|
let (staged, not_staged) = client.get_status();
|
||||||
|
lines_should_not_contains(staged, "file1");
|
||||||
|
lines_should_not_contains(not_staged, "file1");
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
server.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn push_update() {
|
||||||
|
let (mut client, mut server) = init_test();
|
||||||
|
|
||||||
|
// init content of file1
|
||||||
|
let _ = client.add_file("file1", "foo");
|
||||||
|
client.run_cmd_ok("add file1");
|
||||||
|
client.run_cmd_ok("push");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(server.has_file("file1", "foo"));
|
||||||
|
|
||||||
|
let (staged, not_staged) = client.get_status();
|
||||||
|
lines_should_not_contains(staged, "file1");
|
||||||
|
lines_should_not_contains(not_staged, "file1");
|
||||||
|
|
||||||
|
// change content of file1
|
||||||
|
let _ = client.add_file("file1", "bar");
|
||||||
|
client.run_cmd_ok("add file1");
|
||||||
|
client.run_cmd_ok("push");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(server.has_file("file1", "bar"));
|
||||||
|
let (staged, not_staged) = client.get_status();
|
||||||
|
lines_should_not_contains(staged, "file1");
|
||||||
|
lines_should_not_contains(not_staged, "file1");
|
||||||
|
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
server.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn push_dir_explicit() {
|
||||||
|
let (mut client, mut server) = init_test();
|
||||||
|
|
||||||
|
let _ = client.add_dir("dir");
|
||||||
|
let _ = client.add_file("dir/file2", "bar");
|
||||||
|
|
||||||
|
// push dir and file2
|
||||||
|
client.run_cmd_ok("add dir");
|
||||||
|
client.run_cmd_ok("push");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(server.has_file("dir/file2", "bar"));
|
||||||
|
let (staged, not_staged) = client.get_status();
|
||||||
|
lines_should_not_contains(staged.clone(), "file2");
|
||||||
|
lines_should_not_contains(staged, "foo");
|
||||||
|
lines_should_not_contains(not_staged.clone(), "file2");
|
||||||
|
lines_should_not_contains(not_staged, "foo");
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
server.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn push_dir_implicit() {
|
||||||
|
let (mut client, mut server) = init_test();
|
||||||
|
|
||||||
|
let _ = client.add_dir("dir");
|
||||||
|
let _ = client.add_file("dir/file2", "bar");
|
||||||
|
|
||||||
|
// push dir and file2
|
||||||
|
client.run_cmd_ok("add dir/file2");
|
||||||
|
client.run_cmd_ok("push");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(server.has_file("dir/file2", "bar"));
|
||||||
|
let (staged, not_staged) = client.get_status();
|
||||||
|
lines_should_not_contains(staged.clone(), "file2");
|
||||||
|
lines_should_not_contains(staged, "foo");
|
||||||
|
lines_should_not_contains(not_staged.clone(), "file2");
|
||||||
|
lines_should_not_contains(not_staged, "foo");
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
server.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn push_all() {
|
||||||
|
let (mut client, mut server) = init_test();
|
||||||
|
|
||||||
|
let _ = client.add_file("file1", "foo");
|
||||||
|
let _ = client.add_dir("dir");
|
||||||
|
let _ = client.add_file("dir/file2", "bar");
|
||||||
|
|
||||||
|
// push dir and file2
|
||||||
|
client.run_cmd_ok("add *");
|
||||||
|
client.run_cmd_ok("push");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(server.has_file("file1", "foo"));
|
||||||
|
assert!(server.has_file("dir/file2", "bar"));
|
||||||
|
let (staged, not_staged) = client.get_status();
|
||||||
|
assert!(staged.len() == 0);
|
||||||
|
assert!(not_staged.len() == 0);
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
server.clean();
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn push_file_deletion() {
|
||||||
|
let (mut client, mut server) = init_test();
|
||||||
|
|
||||||
|
let _ = client.add_file("file1", "foo");
|
||||||
|
|
||||||
|
// push file1
|
||||||
|
client.run_cmd_ok("add file1");
|
||||||
|
client.run_cmd_ok("push");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(server.has_file("file1", "foo"));
|
||||||
|
status_should_be_empty(&mut client);
|
||||||
|
|
||||||
|
// remove it
|
||||||
|
let _ = client.remove_file("file1");
|
||||||
|
client.run_cmd_ok("add file1");
|
||||||
|
dbg!(client.get_status());
|
||||||
|
client.run_cmd_ok("push");
|
||||||
|
|
||||||
|
// tests
|
||||||
|
assert!(server.has_not_file("file1"));
|
||||||
|
status_should_be_empty(&mut client);
|
||||||
|
|
||||||
|
client.clean();
|
||||||
|
server.clean();
|
||||||
|
}
|
||||||
|
}
|
||||||
14
tests/utils.rs
Normal file
14
tests/utils.rs
Normal file
@@ -0,0 +1,14 @@
|
|||||||
|
#[path = "utils/server.rs"]
|
||||||
|
pub mod server;
|
||||||
|
|
||||||
|
#[path = "utils/client.rs"]
|
||||||
|
pub mod client;
|
||||||
|
|
||||||
|
#[path = "utils/utils.rs"]
|
||||||
|
pub mod utils;
|
||||||
|
|
||||||
|
#[path = "utils/status_utils.rs"]
|
||||||
|
pub mod status_utils;
|
||||||
|
|
||||||
|
#[path = "utils/files_utils.rs"]
|
||||||
|
pub mod files_utils;
|
||||||
155
tests/utils/client.rs
Normal file
155
tests/utils/client.rs
Normal file
@@ -0,0 +1,155 @@
|
|||||||
|
use std::str;
|
||||||
|
use std::process::{Command, Output};
|
||||||
|
use std::fs::{self, File};
|
||||||
|
use std::io::Write;
|
||||||
|
use std::env;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use super::files_utils::has_files;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub struct ClientTest {
|
||||||
|
user: String, // the nextcloud user
|
||||||
|
volume: String, // temp dir for the test
|
||||||
|
pub test_id: String, // name of the test (e.g nextsync_rand)
|
||||||
|
exe_path: PathBuf, // absolute path of nextsync executable
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl ClientTest {
|
||||||
|
pub fn new(id: String) -> Self {
|
||||||
|
// create a directory in /tmp with the given id
|
||||||
|
let mut vol = String::from("/tmp/");
|
||||||
|
vol.push_str(&id);
|
||||||
|
let _ = fs::create_dir(vol.clone());
|
||||||
|
|
||||||
|
// get nextsync path
|
||||||
|
let mut exe_path = env::current_dir().unwrap();
|
||||||
|
exe_path = exe_path.join("target/debug/nextsync");
|
||||||
|
|
||||||
|
// build the client
|
||||||
|
ClientTest {
|
||||||
|
user: String::from("admin"),
|
||||||
|
volume: vol,
|
||||||
|
test_id: id,
|
||||||
|
exe_path
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init(mut self) -> Self {
|
||||||
|
self.run_cmd_ok("init");
|
||||||
|
|
||||||
|
// set remote url
|
||||||
|
let url = String::from(format!("{}@nextcloud.local/{}", self.user, self.test_id));
|
||||||
|
self.run_cmd_ok(&format!("remote add origin {}", url));
|
||||||
|
|
||||||
|
// set force_unsecure as debug server has not certificate
|
||||||
|
self.run_cmd_ok("config set force_insecure true");
|
||||||
|
|
||||||
|
// set token for request
|
||||||
|
self.run_cmd_ok(&format!("credential add {} {}", self.user, self.user));
|
||||||
|
self
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clean(self) -> Self {
|
||||||
|
let _ = fs::remove_dir_all(&self.volume);
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_cmd_ok(&mut self, args: &str) -> Output {
|
||||||
|
let output = self.run_cmd(args);
|
||||||
|
if !output.status.success() {
|
||||||
|
println!("id: {}", self.test_id.clone());
|
||||||
|
println!("Failed to execute: '{}'", args);
|
||||||
|
println!("stderr: {}", String::from_utf8_lossy(&output.stderr));
|
||||||
|
println!("stdout: {}", String::from_utf8_lossy(&output.stdout));
|
||||||
|
}
|
||||||
|
assert!(output.status.success());
|
||||||
|
output
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_cmd(&mut self, args: &str) -> Output {
|
||||||
|
let output = Command::new(self.exe_path.to_str().unwrap())
|
||||||
|
.current_dir(self.volume.clone())
|
||||||
|
.args(args.split(" "))
|
||||||
|
.output()
|
||||||
|
.expect("Could not execute nextsync command");
|
||||||
|
return output;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_dir(&mut self, name: &str) -> std::io::Result<()> {
|
||||||
|
let mut path = self.volume.clone();
|
||||||
|
path.push_str("/");
|
||||||
|
path.push_str(name);
|
||||||
|
let _ = fs::create_dir_all(path)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
|
||||||
|
let mut path = self.volume.clone();
|
||||||
|
path.push_str("/");
|
||||||
|
path.push_str(name);
|
||||||
|
|
||||||
|
let mut file = File::create(path)?;
|
||||||
|
file.write_all(content.as_bytes())?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove_file(&mut self, name: &str) -> std::io::Result<()> {
|
||||||
|
let mut path = self.volume.clone();
|
||||||
|
path.push_str("/");
|
||||||
|
path.push_str(name);
|
||||||
|
|
||||||
|
fs::remove_file(name)?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
|
||||||
|
let full_path = PathBuf::from(self.volume.clone()).join(file);
|
||||||
|
|
||||||
|
has_files(full_path, file, content, self.test_id.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
/// get the files given by the status command in two vector (staged and not staged)
|
||||||
|
pub fn get_status(&mut self) -> (Vec<String>, Vec<String>) {
|
||||||
|
let out = self.run_cmd("status");
|
||||||
|
|
||||||
|
let lines: Vec<String> = str::from_utf8(&out.stdout)
|
||||||
|
.unwrap()
|
||||||
|
.split("\n")
|
||||||
|
.map(|s| s.to_owned())
|
||||||
|
.collect();
|
||||||
|
|
||||||
|
let mut staged = vec![];
|
||||||
|
let mut not_staged = vec![];
|
||||||
|
let mut in_staged = true;
|
||||||
|
let mut counter = 0;
|
||||||
|
for line in lines {
|
||||||
|
if line.find("not staged").is_some() {
|
||||||
|
in_staged = false;
|
||||||
|
counter = 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
// skip two first line as there are not files
|
||||||
|
if counter < 2 {
|
||||||
|
counter += 1;
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
if line == String::from("") {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if in_staged {
|
||||||
|
staged.push(line);
|
||||||
|
} else {
|
||||||
|
not_staged.push(line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return (staged, not_staged);
|
||||||
|
}
|
||||||
|
}
|
||||||
38
tests/utils/files_utils.rs
Normal file
38
tests/utils/files_utils.rs
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
use std::io::{BufReader, BufRead};
|
||||||
|
use std::fs::File;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn has_files(full_path: PathBuf, file: &str, content: &str, test_id: String) -> bool
|
||||||
|
{
|
||||||
|
if !full_path.exists() {
|
||||||
|
println!("id: {}", test_id.clone());
|
||||||
|
eprintln!("File '{}' doesn't exists", file);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
let f = File::open(full_path).unwrap();
|
||||||
|
for line in BufReader::new(f).lines(){
|
||||||
|
if let Ok(line) = line {
|
||||||
|
if line != content {
|
||||||
|
println!("id: {}", test_id);
|
||||||
|
eprintln!("File '{}' is not equal, {} != {}", file, line, content);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
return line == content;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn has_not_file(full_path: PathBuf, file: &str, test_id: String) -> bool
|
||||||
|
{
|
||||||
|
if full_path.exists() {
|
||||||
|
println!("id: {}", test_id.clone());
|
||||||
|
eprintln!("File '{}' exists but it shouldn't", file);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
return true;
|
||||||
|
}
|
||||||
117
tests/utils/server.rs
Normal file
117
tests/utils/server.rs
Normal file
@@ -0,0 +1,117 @@
|
|||||||
|
use std::process::Command;
|
||||||
|
use std::os::unix::fs::PermissionsExt;
|
||||||
|
use std::fs::{self, File, Permissions};
|
||||||
|
use std::io::Write;
|
||||||
|
use std::env;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use super::files_utils::{self, has_files};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub struct ServerTest {
|
||||||
|
user: String,
|
||||||
|
volume: PathBuf,
|
||||||
|
pub test_id: String
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl ServerTest {
|
||||||
|
pub fn new(id: String) -> Self {
|
||||||
|
let mut volume = env::current_dir().unwrap();
|
||||||
|
volume = volume.join("tests/data/admin/files");
|
||||||
|
|
||||||
|
ServerTest {
|
||||||
|
user: String::from("admin"),
|
||||||
|
volume,
|
||||||
|
test_id: id
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn init(&mut self) -> &mut ServerTest{
|
||||||
|
self.add_dir(&self.test_id.clone());
|
||||||
|
self.volume = self.volume.join(self.test_id.clone());
|
||||||
|
self.sync_root();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn clean(&mut self) -> &mut ServerTest{
|
||||||
|
self.remove_dir(self.test_id.clone());
|
||||||
|
self.sync_root();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_dir(&mut self, path: &str) -> &mut ServerTest {
|
||||||
|
let mut full_path = self.volume.clone();
|
||||||
|
full_path.push(path);
|
||||||
|
|
||||||
|
match fs::create_dir(&full_path) {
|
||||||
|
Ok(_) => {
|
||||||
|
// Set permissions to 777 to allow nextcloud to access it (workaround avoiding to
|
||||||
|
// set group and owner to www-data)
|
||||||
|
if let Err(e) = fs::set_permissions(&full_path, Permissions::from_mode(0o777)) {
|
||||||
|
eprintln!("Error setting permissions: {}", e);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
Err(e) => eprintln!("Error creating directory: {}", e),
|
||||||
|
}
|
||||||
|
|
||||||
|
// do not sync test directory when creating it
|
||||||
|
if !path.ends_with("_nextsync")
|
||||||
|
{
|
||||||
|
self.sync_test();
|
||||||
|
}
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn add_file(&mut self, name: &str, content: &str) -> std::io::Result<()> {
|
||||||
|
let mut full_path = self.volume.clone();
|
||||||
|
full_path.push(name);
|
||||||
|
|
||||||
|
let mut file = File::create(full_path)?;
|
||||||
|
file.write_all(content.as_bytes())?;
|
||||||
|
self.sync_test();
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn remove_dir(&mut self, path: String) -> &mut ServerTest {
|
||||||
|
let mut full_path = self.volume.clone();
|
||||||
|
full_path.push(path);
|
||||||
|
|
||||||
|
let _ = fs::remove_dir_all(&full_path);
|
||||||
|
self.sync_test();
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sync_root(&self) -> &Self {
|
||||||
|
self.sync("")
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sync_test(&self) -> &Self {
|
||||||
|
let test_id = self.test_id.clone();
|
||||||
|
self.sync(&test_id)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn sync(&self, path: &str) -> &Self {
|
||||||
|
// perform the occ files:scan command inside the nextcloud docker container
|
||||||
|
|
||||||
|
let nextcloud_docker = "master-nextcloud-1";
|
||||||
|
let args = format!("exec -t --user www-data {} /var/www/html/occ files:scan --path=/{}/files/{}", nextcloud_docker, &self.user, path);
|
||||||
|
|
||||||
|
let _output = Command::new("docker")
|
||||||
|
.args(args.split(" "))
|
||||||
|
.output()
|
||||||
|
.expect("Could not execute docker exec command");
|
||||||
|
self
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_file(&mut self, file: &str, content: &str) -> bool {
|
||||||
|
let full_path = self.volume.clone().join(file);
|
||||||
|
has_files(full_path, file, content, self.test_id.clone())
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn has_not_file(&mut self, file: &str) -> bool {
|
||||||
|
let full_path = self.volume.clone().join(file);
|
||||||
|
files_utils::has_not_file(full_path, file, self.test_id.clone())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
27
tests/utils/status_utils.rs
Normal file
27
tests/utils/status_utils.rs
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
use super::client::ClientTest;
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn lines_should_not_contains(lines: Vec<String>, str: &str) {
|
||||||
|
for line in lines {
|
||||||
|
if line.find(str).is_some() {
|
||||||
|
eprintln!("'{}' found in '{}'", str, line);
|
||||||
|
}
|
||||||
|
assert!(line.find(str).is_none());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn status_should_be_empty(client: &mut ClientTest) {
|
||||||
|
let (staged, not_staged) = client.get_status();
|
||||||
|
if staged.len() != 0 {
|
||||||
|
eprintln!("id: {}", client.test_id.clone());
|
||||||
|
eprintln!("Staged should be empty but has '{}'", staged.len());
|
||||||
|
assert!(staged.len() == 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
if staged.len() != 0 {
|
||||||
|
eprintln!("id: {}", client.test_id.clone());
|
||||||
|
eprintln!("Not Staged should be empty but has '{}'", not_staged.len());
|
||||||
|
assert!(not_staged.len() == 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
12
tests/utils/utils.rs
Normal file
12
tests/utils/utils.rs
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
use rand::{distributions::Alphanumeric, Rng};
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
pub fn get_random_test_id() -> String {
|
||||||
|
let mut id: String = rand::thread_rng()
|
||||||
|
.sample_iter(&Alphanumeric)
|
||||||
|
.take(7)
|
||||||
|
.map(char::from)
|
||||||
|
.collect();
|
||||||
|
id.push_str("_nextsync");
|
||||||
|
id.to_owned()
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user