mirror of
https://github.com/usatiuk/backup.git
synced 2025-10-26 09:27:48 +01:00
clang format 120 length
This commit is contained in:
@@ -34,7 +34,7 @@ BreakBeforeBinaryOperators: None
|
||||
BreakBeforeTernaryOperators: true
|
||||
BreakConstructorInitializers: BeforeColon
|
||||
BreakInheritanceList: BeforeColon
|
||||
ColumnLimit: 0
|
||||
ColumnLimit: 120
|
||||
CompactNamespaces: true
|
||||
ContinuationIndentWidth: 8
|
||||
IndentCaseLabels: true
|
||||
|
||||
@@ -34,9 +34,10 @@ struct ComparableFile {
|
||||
|
||||
const std::string path; ///< Relative path to the file
|
||||
const File::Type type; ///< File type
|
||||
const unsigned long long bytes; ///< Number of bytes in the file
|
||||
const unsigned long long mtime; ///< Timestamp of last file modification
|
||||
const std::function<std::unique_ptr<std::streambuf>()> contents;///< Function that returns a unique pointer to a std::streambuf instance linked to the contents of the file
|
||||
const unsigned long long bytes;///< Number of bytes in the file
|
||||
const unsigned long long mtime;///< Timestamp of last file modification
|
||||
const std::function<std::unique_ptr<std::streambuf>()>
|
||||
contents;///< Function that returns a unique pointer to a std::streambuf instance linked to the contents of the file
|
||||
};
|
||||
|
||||
|
||||
|
||||
@@ -8,9 +8,8 @@
|
||||
|
||||
bool ChangeDetectorContainer::check(const ComparableFile &f1, const ComparableFile &f2) const {
|
||||
return std::any_of(changeDetectors.begin(), changeDetectors.end(),
|
||||
[&](const auto &changeDetector) {
|
||||
return changeDetector->check(f1, f2);
|
||||
});
|
||||
[&](const auto &changeDetector) { return changeDetector->check(f1, f2); });
|
||||
}
|
||||
|
||||
ChangeDetectorContainer::ChangeDetectorContainer(std::vector<std::unique_ptr<ChangeDetector>> &&changeDetectors) : changeDetectors(std::move(changeDetectors)) {}
|
||||
ChangeDetectorContainer::ChangeDetectorContainer(std::vector<std::unique_ptr<ChangeDetector>> &&changeDetectors)
|
||||
: changeDetectors(std::move(changeDetectors)) {}
|
||||
|
||||
@@ -12,22 +12,13 @@
|
||||
#include "objects/FileBuffer.h"
|
||||
|
||||
ComparableFile::ComparableFile(const File &file, const Repository *repo)
|
||||
: path(file.name),
|
||||
type(file.fileType),
|
||||
bytes(file.bytes),
|
||||
mtime(file.mtime),
|
||||
contents(
|
||||
[file, repo]() {
|
||||
return std::make_unique<FileBuffer>(repo, file.id);
|
||||
}) {}
|
||||
: path(file.name), type(file.fileType), bytes(file.bytes), mtime(file.mtime),
|
||||
contents([file, repo]() { return std::make_unique<FileBuffer>(repo, file.id); }) {}
|
||||
|
||||
ComparableFile::ComparableFile(const std::filesystem::path &p, const std::filesystem::path &base)
|
||||
: path(p.lexically_relative(base).u8string()),
|
||||
type(File::getFileType(p)),
|
||||
bytes(File::getFileSize(p)),
|
||||
: path(p.lexically_relative(base).u8string()), type(File::getFileType(p)), bytes(File::getFileSize(p)),
|
||||
mtime(File::getFileMtime(p)),
|
||||
contents(
|
||||
[p, path = this->path, type = this->type]() -> std::unique_ptr<std::streambuf> {
|
||||
contents([p, path = this->path, type = this->type]() -> std::unique_ptr<std::streambuf> {
|
||||
if (type == File::Type::Normal) {
|
||||
auto fb = std::make_unique<std::filebuf>();
|
||||
fb->open(p, std::ios::in | std::ios::binary);
|
||||
|
||||
@@ -12,8 +12,6 @@ bool ContentsChangeDetector::check(const ComparableFile &f1, const ComparableFil
|
||||
auto b1 = f1.contents();
|
||||
auto b2 = f2.contents();
|
||||
|
||||
return !std::equal(std::istreambuf_iterator<char>(b1.get()),
|
||||
std::istreambuf_iterator<char>(),
|
||||
std::istreambuf_iterator<char>(b2.get()),
|
||||
std::istreambuf_iterator<char>());
|
||||
return !std::equal(std::istreambuf_iterator<char>(b1.get()), std::istreambuf_iterator<char>(),
|
||||
std::istreambuf_iterator<char>(b2.get()), std::istreambuf_iterator<char>());
|
||||
}
|
||||
|
||||
@@ -4,6 +4,4 @@
|
||||
|
||||
#include "TypeChangeDetector.h"
|
||||
|
||||
bool TypeChangeDetector::check(const ComparableFile &f1, const ComparableFile &f2) const {
|
||||
return f1.type != f2.type;
|
||||
}
|
||||
bool TypeChangeDetector::check(const ComparableFile &f1, const ComparableFile &f2) const { return f1.type != f2.type; }
|
||||
|
||||
@@ -31,8 +31,9 @@ public:
|
||||
|
||||
private:
|
||||
uint32_t cur = 0; ///< Current hash value
|
||||
const uint32_t blockSize; ///< Hashing window size
|
||||
std::deque<uint32_t> history;///< Bytes used to calculate current hash, used to compute the hash in a rolling fashion (to remove the oldest byte from the hash when blockSize is reached)
|
||||
const uint32_t blockSize;///< Hashing window size
|
||||
std::deque<uint32_t>
|
||||
history;///< Bytes used to calculate current hash, used to compute the hash in a rolling fashion (to remove the oldest byte from the hash when blockSize is reached)
|
||||
|
||||
// Circular shift taken from: https://en.wikipedia.org/wiki/Circular_shift
|
||||
/// Shift \p value \p count bits to the right circularly
|
||||
|
||||
@@ -18,7 +18,8 @@ public:
|
||||
/// \param minBytes Minimum amount of bytes in returned chunks
|
||||
/// \param mask Amount of trailing zeroes in the rolling hash at which the file is cut (results in average chunk size of 2^mask bytes)
|
||||
/// \param window Rolling hash window (how many of chunks last bytes are included in the hash, the default is recommended)
|
||||
BuzhashChunker(std::streambuf *buf, unsigned long long minBytes, unsigned long long maxBytes, unsigned long long mask, uint32_t window = 4095);
|
||||
BuzhashChunker(std::streambuf *buf, unsigned long long minBytes, unsigned long long maxBytes,
|
||||
unsigned long long mask, uint32_t window = 4095);
|
||||
|
||||
/// \copydoc Chunker::getNext
|
||||
std::pair<std::string, std::vector<char>> getNext() override;
|
||||
|
||||
@@ -6,9 +6,7 @@
|
||||
|
||||
Buzhash::Buzhash(uint32_t blockSize) : blockSize(blockSize), history() {}
|
||||
|
||||
uint32_t Buzhash::get() const {
|
||||
return cur;
|
||||
}
|
||||
uint32_t Buzhash::get() const { return cur; }
|
||||
|
||||
uint32_t Buzhash::feed(uint8_t in) {
|
||||
cur = rotr32(cur, 1);
|
||||
|
||||
@@ -7,7 +7,9 @@
|
||||
#include "Exception.h"
|
||||
#include "SHA.h"
|
||||
|
||||
BuzhashChunker::BuzhashChunker(std::streambuf *buf, unsigned long long minBytes, unsigned long long maxBytes, unsigned long long mask, uint32_t window) : Chunker(buf, maxBytes), window(window), minBytes(minBytes), mask(mask), buzhash(window) {}
|
||||
BuzhashChunker::BuzhashChunker(std::streambuf *buf, unsigned long long minBytes, unsigned long long maxBytes,
|
||||
unsigned long long mask, uint32_t window)
|
||||
: Chunker(buf, maxBytes), window(window), minBytes(minBytes), mask(mask), buzhash(window) {}
|
||||
|
||||
std::pair<std::string, std::vector<char>> BuzhashChunker::getNext() {
|
||||
if (eof) throw Exception("Trying to read from a file that is finished!");
|
||||
@@ -21,9 +23,7 @@ std::pair<std::string, std::vector<char>> BuzhashChunker::getNext() {
|
||||
return {SHA::calculate(rbuf), rbuf};
|
||||
}
|
||||
|
||||
for (auto c: rbuf) {
|
||||
buzhash.feed(static_cast<uint8_t>(c));
|
||||
}
|
||||
for (auto c: rbuf) { buzhash.feed(static_cast<uint8_t>(c)); }
|
||||
|
||||
// Continue reading the file until either the last mask bits are zero of we exceed the maxSize
|
||||
while (((buzhash.get() & (~0UL >> (sizeof(unsigned long long) * 8 - mask))) != 0) && rbuf.size() < maxBytes) {
|
||||
|
||||
@@ -8,44 +8,31 @@
|
||||
|
||||
Chunker::Chunker(std::streambuf *buf, unsigned long long maxBytes) : buf(buf), maxBytes(maxBytes) {}
|
||||
|
||||
bool Chunker::getEof() const {
|
||||
return eof;
|
||||
}
|
||||
bool Chunker::getEof() const { return eof; }
|
||||
|
||||
Chunker::~Chunker() = default;
|
||||
|
||||
Chunker::ChunkerIterator Chunker::begin() {
|
||||
return {this};
|
||||
}
|
||||
Chunker::ChunkerIterator Chunker::begin() { return {this}; }
|
||||
|
||||
Chunker::ChunkerIterator Chunker::end() {
|
||||
return {nullptr};
|
||||
}
|
||||
Chunker::ChunkerIterator Chunker::end() { return {nullptr}; }
|
||||
|
||||
Chunker::ChunkerIterator &Chunker::ChunkerIterator::operator++() {
|
||||
if (pastEOF) throw Exception("Trying to increment pastEOF ChunkerIterator!");
|
||||
if (source->getEof())
|
||||
pastEOF = true;
|
||||
if (source->getEof()) pastEOF = true;
|
||||
else
|
||||
buf = source->getNext();
|
||||
return *this;
|
||||
}
|
||||
|
||||
bool Chunker::ChunkerIterator::operator!=(const Chunker::ChunkerIterator &rhs) const {
|
||||
return pastEOF != rhs.pastEOF;
|
||||
}
|
||||
bool Chunker::ChunkerIterator::operator!=(const Chunker::ChunkerIterator &rhs) const { return pastEOF != rhs.pastEOF; }
|
||||
|
||||
Chunker::ChunkerIterator::value_type Chunker::ChunkerIterator::operator*() const {
|
||||
if (pastEOF) throw Exception("Trying to dereference pastEOF ChunkerIterator!");
|
||||
return buf.value();
|
||||
}
|
||||
|
||||
bool Chunker::ChunkerIterator::operator==(const Chunker::ChunkerIterator &rhs) const {
|
||||
return pastEOF == rhs.pastEOF;
|
||||
}
|
||||
bool Chunker::ChunkerIterator::operator==(const Chunker::ChunkerIterator &rhs) const { return pastEOF == rhs.pastEOF; }
|
||||
|
||||
Chunker::ChunkerIterator::ChunkerIterator(Chunker *source)
|
||||
: source(source), pastEOF(source == nullptr) {
|
||||
if (source)
|
||||
operator++();
|
||||
Chunker::ChunkerIterator::ChunkerIterator(Chunker *source) : source(source), pastEOF(source == nullptr) {
|
||||
if (source) operator++();
|
||||
}
|
||||
|
||||
@@ -12,7 +12,8 @@ std::unique_ptr<Chunker> ChunkerFactory::getChunker(const Config &config, std::s
|
||||
if (config.getStr("chunker") == "const") {
|
||||
return std::make_unique<ConstChunker>(buf, config.getInt("chunker-max") * 1024);
|
||||
} else if (config.getStr("chunker") == "buzhash") {
|
||||
return std::make_unique<BuzhashChunker>(buf, config.getInt("chunker-min") * 1024, config.getInt("chunker-max") * 1024, config.getInt("chunker-mask"));
|
||||
return std::make_unique<BuzhashChunker>(buf, config.getInt("chunker-min") * 1024,
|
||||
config.getInt("chunker-max") * 1024, config.getInt("chunker-mask"));
|
||||
} else {
|
||||
throw Exception("Unknown chunker type!");
|
||||
}
|
||||
|
||||
@@ -24,7 +24,8 @@ private:
|
||||
/// \param base Base directory to restore to
|
||||
/// \param callback Stats callback
|
||||
/// \return Name of the restored file
|
||||
std::string backupRestoreFile(const File &file, const std::filesystem::path &base, CommandsCommon::workerStatsFunction &callback, Context ctx);
|
||||
std::string backupRestoreFile(const File &file, const std::filesystem::path &base,
|
||||
CommandsCommon::workerStatsFunction &callback, Context ctx);
|
||||
};
|
||||
|
||||
|
||||
|
||||
@@ -22,7 +22,8 @@ private:
|
||||
/// \param saveAs UTF-8 encoded file name to save as
|
||||
/// \param callback Stats callback
|
||||
/// \return ID of the saved file
|
||||
Object::idType backupChunkFile(const std::filesystem::path &orig, const std::string &saveAs, CommandsCommon::workerStatsFunction &callback, Context ctx);
|
||||
Object::idType backupChunkFile(const std::filesystem::path &orig, const std::string &saveAs,
|
||||
CommandsCommon::workerStatsFunction &callback, Context ctx);
|
||||
};
|
||||
|
||||
|
||||
|
||||
@@ -18,7 +18,9 @@ namespace CommandsCommon {
|
||||
/// \param ignore List of files to ignore
|
||||
/// \param spawner Function to spawn other tasks
|
||||
/// \param processFile Task to spawn on found files
|
||||
void processDirWithIgnore(const std::filesystem::path &dir, std::vector<std::string> ignore, const std::function<void(std::function<void()>)> &spawner, std::function<void(std::filesystem::directory_entry)> processFile);
|
||||
void processDirWithIgnore(const std::filesystem::path &dir, std::vector<std::string> ignore,
|
||||
const std::function<void(std::function<void()>)> &spawner,
|
||||
std::function<void(std::filesystem::directory_entry)> processFile);
|
||||
|
||||
struct WorkerStats {
|
||||
public:
|
||||
@@ -40,7 +42,8 @@ namespace CommandsCommon {
|
||||
/// \return True if \p p contains \p prefix at its prefix, False otherwise
|
||||
bool isSubpath(const std::filesystem::path &prefix, const std::filesystem::path &p);
|
||||
|
||||
void workerCallback(unsigned long long bytesWritten, unsigned long long bytesSkipped, unsigned long long filesWritten, WorkerStats &to);
|
||||
void workerCallback(unsigned long long bytesWritten, unsigned long long bytesSkipped,
|
||||
unsigned long long filesWritten, WorkerStats &to);
|
||||
|
||||
};// namespace CommandsCommon
|
||||
|
||||
|
||||
@@ -27,15 +27,16 @@ void CommandDiff::run(Context ctx) {
|
||||
Object::idType archive1;
|
||||
if (!ctx.repo->getConfig().exists("aid")) {
|
||||
auto archives = ctx.repo->getObjects(Object::ObjectType::Archive);
|
||||
archive1 = std::max_element(archives.begin(), archives.end(), [](const auto &a1, const auto &a2) { return a1.second < a2.second; })->second;
|
||||
archive1 = std::max_element(archives.begin(), archives.end(), [](const auto &a1, const auto &a2) {
|
||||
return a1.second < a2.second;
|
||||
})->second;
|
||||
} else {
|
||||
archive1 = ctx.repo->getConfig().getInt("aid");
|
||||
}
|
||||
|
||||
ThreadPool threadPool([&](const std::string &error) {
|
||||
ctx.logger->write("Error: " + error, 0);
|
||||
},
|
||||
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads") : std::thread::hardware_concurrency());
|
||||
ThreadPool threadPool([&](const std::string &error) { ctx.logger->write("Error: " + error, 0); },
|
||||
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads")
|
||||
: std::thread::hardware_concurrency());
|
||||
|
||||
auto archiveO1 = Serialize::deserialize<Archive>(ctx.repo->getObject(archive1));
|
||||
std::mutex filesLock;
|
||||
@@ -43,8 +44,7 @@ void CommandDiff::run(Context ctx) {
|
||||
for (auto id: archiveO1.files) {
|
||||
auto file = Serialize::deserialize<File>(ctx.repo->getObject(id));
|
||||
auto path = std::filesystem::u8path(file.name);
|
||||
if (isSubpath(ctx.repo->getConfig().getStr("prefix"), path))
|
||||
files.emplace(file.getKey(), std::move(file));
|
||||
if (isSubpath(ctx.repo->getConfig().getStr("prefix"), path)) files.emplace(file.getKey(), std::move(file));
|
||||
}
|
||||
|
||||
/// Container of ChangeDetectors built using the config of the repository
|
||||
@@ -63,8 +63,7 @@ void CommandDiff::run(Context ctx) {
|
||||
if (changeDetector.check({repoFile, ctx.repo}, p)) {
|
||||
ctx.logger->write(relPath + " is different " + Diff::diff({repoFile, ctx.repo}, p) + "\n", 1);
|
||||
} else {
|
||||
if (diffMode == "file")
|
||||
ctx.logger->write(relPath + " are same ", 0);
|
||||
if (diffMode == "file") ctx.logger->write(relPath + " are same ", 0);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -76,7 +75,8 @@ void CommandDiff::run(Context ctx) {
|
||||
if (diffMode == "normal") {
|
||||
/// If a second archive is given, run the task for each of its files, otherwise use the "from" config option
|
||||
if (ctx.repo->getConfig().exists("aid2")) {
|
||||
archiveO2.emplace(Serialize::deserialize<Archive>(ctx.repo->getObject(ctx.repo->getConfig().getInt("aid2"))));
|
||||
archiveO2.emplace(
|
||||
Serialize::deserialize<Archive>(ctx.repo->getObject(ctx.repo->getConfig().getInt("aid2"))));
|
||||
|
||||
threadPool.push([&]() {
|
||||
for (auto id: archiveO2.value().files) {
|
||||
@@ -84,9 +84,7 @@ void CommandDiff::run(Context ctx) {
|
||||
if (Signals::shouldQuit) throw Exception("Quitting");
|
||||
auto file = Serialize::deserialize<File>(ctx.repo->getObject(id));
|
||||
if (isSubpath(ctx.repo->getConfig().getStr("prefix"), std::filesystem::u8path(file.name)))
|
||||
threadPool.push([&, file]() {
|
||||
processFile(ComparableFile{file, ctx.repo});
|
||||
});
|
||||
threadPool.push([&, file]() { processFile(ComparableFile{file, ctx.repo}); });
|
||||
if (Signals::shouldQuit) break;
|
||||
}
|
||||
|
||||
@@ -97,10 +95,9 @@ void CommandDiff::run(Context ctx) {
|
||||
/// Start the diff with the root directory and empty ignore list
|
||||
threadPool.push([&, from]() {
|
||||
processDirWithIgnore(
|
||||
from,
|
||||
{},
|
||||
[&](std::function<void()> f) { threadPool.push(std::move(f)); },
|
||||
[processFile, from, prefix = ctx.repo->getConfig().getStr("prefix")](const std::filesystem::directory_entry &dirEntry) {
|
||||
from, {}, [&](std::function<void()> f) { threadPool.push(std::move(f)); },
|
||||
[processFile, from, prefix = ctx.repo->getConfig().getStr("prefix")](
|
||||
const std::filesystem::directory_entry &dirEntry) {
|
||||
if (isSubpath(prefix, dirEntry.path().lexically_relative(from)))
|
||||
processFile(ComparableFile{dirEntry, from});
|
||||
});
|
||||
@@ -113,7 +110,8 @@ void CommandDiff::run(Context ctx) {
|
||||
}
|
||||
|
||||
if (ctx.repo->getConfig().exists("aid2")) {
|
||||
archiveO2.emplace(Serialize::deserialize<Archive>(ctx.repo->getObject(ctx.repo->getConfig().getInt("aid2"))));
|
||||
archiveO2.emplace(
|
||||
Serialize::deserialize<Archive>(ctx.repo->getObject(ctx.repo->getConfig().getInt("aid2"))));
|
||||
std::map<std::filesystem::path, File> files2;///< Files in the first archive
|
||||
for (auto id: archiveO2->files) {
|
||||
auto file = Serialize::deserialize<File>(ctx.repo->getObject(id));
|
||||
@@ -146,7 +144,5 @@ void CommandDiff::run(Context ctx) {
|
||||
std::unique_lock finishedLock(threadPool.finishedLock);
|
||||
threadPool.finished.wait(finishedLock, [&threadPool] { return threadPool.empty(); });
|
||||
if (diffMode == "normal")
|
||||
for (auto const &s: files) {
|
||||
ctx.logger->write(s.first.u8string() + " is removed\n", 0);
|
||||
}
|
||||
for (auto const &s: files) { ctx.logger->write(s.first.u8string() + " is removed\n", 0); }
|
||||
}
|
||||
|
||||
@@ -4,13 +4,10 @@
|
||||
|
||||
#include "CommandList.h"
|
||||
|
||||
CommandList::CommandList() : Command() {
|
||||
}
|
||||
CommandList::CommandList() : Command() {}
|
||||
|
||||
void CommandList::run(Context ctx) {
|
||||
auto list = ctx.repo->getObjects(Object::ObjectType::Archive);
|
||||
std::sort(list.begin(), list.end(), [](const auto &l, const auto &r) { return l.second < r.second; });
|
||||
for (auto const &aid: list) {
|
||||
std::cout << "Name: " << aid.first << " Id: " << aid.second << std::endl;
|
||||
}
|
||||
for (auto const &aid: list) { std::cout << "Name: " << aid.first << " Id: " << aid.second << std::endl; }
|
||||
}
|
||||
|
||||
@@ -10,13 +10,13 @@
|
||||
#include "objects/Chunk.h"
|
||||
#include "objects/File.h"
|
||||
|
||||
CommandListFiles::CommandListFiles() : Command() {
|
||||
}
|
||||
CommandListFiles::CommandListFiles() : Command() {}
|
||||
|
||||
void CommandListFiles::run(Context ctx) {
|
||||
auto archive = Serialize::deserialize<Archive>(ctx.repo->getObject(ctx.repo->getConfig().getInt("aid")));
|
||||
for (auto const &fid: archive.files) {
|
||||
auto file = Serialize::deserialize<File>(ctx.repo->getObject(fid));
|
||||
std::cout << "Name: " << file.name << " type: " << File::TypeToStr.at(file.fileType) << " size: " << BytesFormatter::formatStr(file.bytes) << std::endl;
|
||||
std::cout << "Name: " << file.name << " type: " << File::TypeToStr.at(file.fileType)
|
||||
<< " size: " << BytesFormatter::formatStr(file.bytes) << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -5,9 +5,6 @@
|
||||
#include "CommandMount.h"
|
||||
#include "RepoFS.h"
|
||||
|
||||
CommandMount::CommandMount() : Command() {
|
||||
}
|
||||
CommandMount::CommandMount() : Command() {}
|
||||
|
||||
void CommandMount::run(Context ctx) {
|
||||
RepoFS::start(ctx.repo, ctx.repo->getConfig().getStr("to"));
|
||||
}
|
||||
void CommandMount::run(Context ctx) { RepoFS::start(ctx.repo, ctx.repo->getConfig().getStr("to")); }
|
||||
|
||||
@@ -20,8 +20,7 @@
|
||||
|
||||
using namespace CommandsCommon;
|
||||
|
||||
CommandRestore::CommandRestore() : Command() {
|
||||
}
|
||||
CommandRestore::CommandRestore() : Command() {}
|
||||
|
||||
void CommandRestore::run(Context ctx) {
|
||||
Object::idType archive = ctx.repo->getConfig().getInt("aid");
|
||||
@@ -33,14 +32,14 @@ void CommandRestore::run(Context ctx) {
|
||||
WorkerStats workerStats;///< Backup statistics of the worker threads
|
||||
|
||||
/// Worker callback, bound to the local workerStats variable
|
||||
workerStatsFunction workerCallback = [&workerStats](unsigned long long bytesWritten, unsigned long long bytesSkipped, unsigned long long filesWritten) {
|
||||
workerStatsFunction workerCallback = [&workerStats](unsigned long long bytesWritten,
|
||||
unsigned long long bytesSkipped,
|
||||
unsigned long long filesWritten) {
|
||||
CommandsCommon::workerCallback(bytesWritten, bytesSkipped, filesWritten, workerStats);
|
||||
};
|
||||
{
|
||||
/// Calculate the average speed of backup
|
||||
RunningDiffAverage avg(
|
||||
[&]() { return workerStats.bytesWritten.load(); },
|
||||
100, 100);
|
||||
RunningDiffAverage avg([&]() { return workerStats.bytesWritten.load(); }, 100, 100);
|
||||
|
||||
/// Show restore progress
|
||||
Progress progress([this, ctx](const std::string &s, int l) { ctx.logger->write(s, l); },
|
||||
@@ -49,7 +48,10 @@ void CommandRestore::run(Context ctx) {
|
||||
"/",
|
||||
[&filesToRestoreCount]() { return std::to_string(filesToRestoreCount); },
|
||||
" files saved, ",
|
||||
[&workerStats]() { return BytesFormatter::formatStr(workerStats.bytesWritten.load() + workerStats.bytesSkipped.load()); },
|
||||
[&workerStats]() {
|
||||
return BytesFormatter::formatStr(workerStats.bytesWritten.load() +
|
||||
workerStats.bytesSkipped.load());
|
||||
},
|
||||
" / ",
|
||||
[&bytesToRestore]() { return BytesFormatter::formatStr(bytesToRestore); },
|
||||
" saved @ ",
|
||||
@@ -59,10 +61,9 @@ void CommandRestore::run(Context ctx) {
|
||||
ctx.repo->getConfig());
|
||||
|
||||
/// Thread pool for restore tasks
|
||||
ThreadPool threadPool([&](const std::string &error) {
|
||||
progress.print("Error: " + error, 0);
|
||||
},
|
||||
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads") : std::thread::hardware_concurrency());
|
||||
ThreadPool threadPool([&](const std::string &error) { progress.print("Error: " + error, 0); },
|
||||
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads")
|
||||
: std::thread::hardware_concurrency());
|
||||
|
||||
/// Add the main restore task
|
||||
threadPool.push([&, this]() {
|
||||
@@ -92,7 +93,8 @@ void CommandRestore::run(Context ctx) {
|
||||
ctx.logger->write("\n", 1);
|
||||
}
|
||||
|
||||
std::string CommandRestore::backupRestoreFile(const File &file, const std::filesystem::path &baseDir, workerStatsFunction &callback, Context ctx) {
|
||||
std::string CommandRestore::backupRestoreFile(const File &file, const std::filesystem::path &baseDir,
|
||||
workerStatsFunction &callback, Context ctx) {
|
||||
auto fullpath = baseDir / std::filesystem::u8path(file.name);
|
||||
|
||||
std::filesystem::create_directories(fullpath.parent_path());
|
||||
@@ -104,7 +106,8 @@ std::string CommandRestore::backupRestoreFile(const File &file, const std::files
|
||||
}
|
||||
if (file.fileType == File::Type::Symlink) {
|
||||
auto dest = Serialize::deserialize<Chunk>(ctx.repo->getObject(file.chunks.at(0)));
|
||||
std::filesystem::create_symlink(std::filesystem::u8path(std::string{dest.data.begin(), dest.data.end()}), fullpath);
|
||||
std::filesystem::create_symlink(std::filesystem::u8path(std::string{dest.data.begin(), dest.data.end()}),
|
||||
fullpath);
|
||||
callback(0, 0, 1);
|
||||
return fullpath.u8string();
|
||||
}
|
||||
|
||||
@@ -26,8 +26,7 @@
|
||||
|
||||
using namespace CommandsCommon;
|
||||
|
||||
CommandRun::CommandRun() : Command() {
|
||||
}
|
||||
CommandRun::CommandRun() : Command() {}
|
||||
|
||||
void CommandRun::run(Context ctx) {
|
||||
WorkerStats workerStats;///< Backup statistics of the worker threads
|
||||
@@ -36,49 +35,48 @@ void CommandRun::run(Context ctx) {
|
||||
std::filesystem::path from = ctx.repo->getConfig().getStr("from");///< Directory to back up from
|
||||
|
||||
/// Worker callback, bound to the local workerStats variable
|
||||
workerStatsFunction workerCallback = [&](unsigned long long bytesWritten, unsigned long long bytesSkipped, unsigned long long filesWritten) {
|
||||
workerStatsFunction workerCallback = [&](unsigned long long bytesWritten, unsigned long long bytesSkipped,
|
||||
unsigned long long filesWritten) {
|
||||
CommandsCommon::workerCallback(bytesWritten, bytesSkipped, filesWritten, workerStats);
|
||||
};
|
||||
|
||||
std::vector<Object::idType> files;///< File ids so far added to the archive
|
||||
std::mutex filesLock; ///< Files vector lock
|
||||
/// Function to safely add new file ids to `files`
|
||||
std::function addFile = [&](Object::idType id) {std::lock_guard lock(filesLock); files.emplace_back(id); };
|
||||
std::function addFile = [&](Object::idType id) {
|
||||
std::lock_guard lock(filesLock);
|
||||
files.emplace_back(id);
|
||||
};
|
||||
|
||||
{
|
||||
/// Calculate the average speed of backup
|
||||
RunningDiffAverage avg(
|
||||
[&]() { return workerStats.bytesWritten.load(); },
|
||||
100, 100);
|
||||
RunningDiffAverage avg([&]() { return workerStats.bytesWritten.load(); }, 100, 100);
|
||||
|
||||
/// Show the progress of backup
|
||||
Progress progress([this, ctx](const std::string &s, int l) { ctx.logger->write(s, l); },
|
||||
{[&]() { return std::to_string(workerStats.filesWritten.load()); },
|
||||
"/",
|
||||
[&]() { return std::to_string(runnerStats.filesToSaveCount); },
|
||||
" files saved, ",
|
||||
[&]() { return std::to_string(runnerStats.filesSkipped); },
|
||||
" files skipped, ",
|
||||
[&]() { return BytesFormatter::formatStr((workerStats.bytesWritten.load() + workerStats.bytesSkipped.load())); },
|
||||
" / ",
|
||||
[&]() { return BytesFormatter::formatStr(runnerStats.bytesToSave); },
|
||||
" read @ ",
|
||||
[&]() { return BytesFormatter::formatStr(avg.get() * 10); },
|
||||
"/s"},
|
||||
{[&]() { return std::to_string(workerStats.filesWritten.load()); }, "/",
|
||||
[&]() { return std::to_string(runnerStats.filesToSaveCount); }, " files saved, ",
|
||||
[&]() { return std::to_string(runnerStats.filesSkipped); }, " files skipped, ",
|
||||
[&]() {
|
||||
return BytesFormatter::formatStr(
|
||||
(workerStats.bytesWritten.load() + workerStats.bytesSkipped.load()));
|
||||
},
|
||||
" / ", [&]() { return BytesFormatter::formatStr(runnerStats.bytesToSave); }, " read @ ",
|
||||
[&]() { return BytesFormatter::formatStr(avg.get() * 10); }, "/s"},
|
||||
ctx.repo->getConfig());
|
||||
|
||||
/// Thread pool for backup tasks, prints to progress on any errors
|
||||
ThreadPool threadPool([&](const std::string &error) {
|
||||
progress.print("Error: " + error, 0);
|
||||
},
|
||||
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads") : std::thread::hardware_concurrency());
|
||||
ThreadPool threadPool([&](const std::string &error) { progress.print("Error: " + error, 0); },
|
||||
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads")
|
||||
: std::thread::hardware_concurrency());
|
||||
|
||||
/// Container of ChangeDetectors built using the config of the repository
|
||||
ChangeDetectorContainer changeDetector = ChangeDetectorFactory::getChangeDetectors(ctx.repo->getConfig());
|
||||
|
||||
/// Function to spawn a rechunking task
|
||||
auto saveFile = [&, this](const std::filesystem::path &absPath, const std::filesystem::path &relPath) {
|
||||
runnerStats.bytesToSave += File::getFileType(absPath) == File::Type::Normal ? std::filesystem::file_size(absPath) : 0;
|
||||
runnerStats.bytesToSave +=
|
||||
File::getFileType(absPath) == File::Type::Normal ? std::filesystem::file_size(absPath) : 0;
|
||||
runnerStats.filesToSaveCount++;
|
||||
threadPool.push([&, relPath, absPath]() {
|
||||
addFile(backupChunkFile(absPath, relPath.u8string(), workerCallback, ctx));
|
||||
@@ -87,8 +85,7 @@ void CommandRun::run(Context ctx) {
|
||||
};
|
||||
|
||||
/// Task to process an individual file in the backup
|
||||
std::function<void(std::filesystem::path)> processFile =
|
||||
[&, this](const std::filesystem::path &p) {
|
||||
std::function<void(std::filesystem::path)> processFile = [&, this](const std::filesystem::path &p) {
|
||||
auto relPath = p.lexically_relative(from).u8string();
|
||||
|
||||
if (ctx.repo->exists(Object::ObjectType::File, relPath) != 0) {
|
||||
@@ -108,10 +105,7 @@ void CommandRun::run(Context ctx) {
|
||||
/// Start the backup with the root directory and empty ignore list
|
||||
threadPool.push([&]() {
|
||||
processDirWithIgnore(
|
||||
from,
|
||||
{},
|
||||
[&](std::function<void()> f) { threadPool.push(std::move(f)); },
|
||||
processFile);
|
||||
from, {}, [&](std::function<void()> f) { threadPool.push(std::move(f)); }, processFile);
|
||||
});
|
||||
|
||||
/// Wait for all the tasks to finish
|
||||
@@ -137,19 +131,20 @@ void CommandRun::run(Context ctx) {
|
||||
ctx.repo->putObject(a);
|
||||
}
|
||||
|
||||
Object::idType CommandRun::backupChunkFile(const std::filesystem::path &orig, const std::string &saveAs, workerStatsFunction &callback, Context ctx) {
|
||||
Object::idType CommandRun::backupChunkFile(const std::filesystem::path &orig, const std::string &saveAs,
|
||||
workerStatsFunction &callback, Context ctx) {
|
||||
/// If it's a symlink or directory, treat it specially
|
||||
/// The order of checks is important, because is_directory follows the symlink
|
||||
if (std::filesystem::is_symlink(orig) || std::filesystem::is_directory(orig)) {
|
||||
auto contents = File::getFileContents(orig);
|
||||
Chunk c(ctx.repo->getId(), SHA::calculate(contents), contents);
|
||||
File f(ctx.repo->getId(), saveAs, c.length, File::getFileMtime(orig), c.SHA, {{0, c.id}}, File::getFileType(orig));
|
||||
File f(ctx.repo->getId(), saveAs, c.length, File::getFileMtime(orig), c.SHA, {{0, c.id}},
|
||||
File::getFileType(orig));
|
||||
ctx.repo->putObject(c);
|
||||
ctx.repo->putObject(f);
|
||||
return f.id;
|
||||
}
|
||||
if (!std::filesystem::is_regular_file(orig))
|
||||
throw Exception(orig.u8string() + "is a special file, not saving");
|
||||
if (!std::filesystem::is_regular_file(orig)) throw Exception(orig.u8string() + "is a special file, not saving");
|
||||
|
||||
std::ifstream ifstream(orig, std::ios::in | std::ios::binary);
|
||||
if (!ifstream) throw Exception("Couldn't open " + orig.u8string() + " for reading");
|
||||
@@ -186,7 +181,8 @@ Object::idType CommandRun::backupChunkFile(const std::filesystem::path &orig, co
|
||||
if (size != File::getFileSize(orig)) {
|
||||
throw Exception("Something really bad happened or file " + orig.u8string() + " changed during backup");
|
||||
}
|
||||
File f(ctx.repo->getId(), saveAs, size, File::getFileMtime(orig), fileHash.getHash(), fileChunks, File::getFileType(orig));
|
||||
File f(ctx.repo->getId(), saveAs, size, File::getFileMtime(orig), fileHash.getHash(), fileChunks,
|
||||
File::getFileType(orig));
|
||||
ctx.repo->putObject(f);
|
||||
callback(0, 0, 1);
|
||||
|
||||
|
||||
@@ -10,7 +10,8 @@
|
||||
#include "Exception.h"
|
||||
#include "Signals.h"
|
||||
|
||||
void CommandsCommon::workerCallback(unsigned long long int bytesWritten, unsigned long long int bytesSkipped, unsigned long long int filesWritten, WorkerStats &to) {
|
||||
void CommandsCommon::workerCallback(unsigned long long int bytesWritten, unsigned long long int bytesSkipped,
|
||||
unsigned long long int filesWritten, WorkerStats &to) {
|
||||
to.bytesWritten += bytesWritten;
|
||||
to.bytesSkipped += bytesSkipped;
|
||||
to.filesWritten += filesWritten;
|
||||
@@ -23,7 +24,9 @@ bool CommandsCommon::isSubpath(const std::filesystem::path &prefix, const std::f
|
||||
return true;
|
||||
}
|
||||
|
||||
void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std::vector<std::string> ignore, const std::function<void(std::function<void()>)> &spawner, std::function<void(std::filesystem::directory_entry)> processFile) {
|
||||
void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std::vector<std::string> ignore,
|
||||
const std::function<void(std::function<void()>)> &spawner,
|
||||
std::function<void(std::filesystem::directory_entry)> processFile) {
|
||||
if (!std::filesystem::is_directory(dir)) throw Exception(dir.u8string() + " is not a directory!");
|
||||
|
||||
/// Don't process the directory if it has a ".nobackup" file
|
||||
@@ -33,9 +36,7 @@ void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std:
|
||||
if (std::filesystem::exists(dir / ".ignore")) {
|
||||
std::ifstream ignorefile(dir / ".ignore", std::ios::in);
|
||||
std::string line;
|
||||
while (std::getline(ignorefile, line)) {
|
||||
ignore.emplace_back(line);
|
||||
}
|
||||
while (std::getline(ignorefile, line)) { ignore.emplace_back(line); }
|
||||
}
|
||||
|
||||
/// For each directory entry...
|
||||
@@ -48,7 +49,8 @@ void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std:
|
||||
std::smatch m;
|
||||
auto s = dirEntry.path().filename().u8string();
|
||||
return std::regex_match(s, m, std::regex(pred));
|
||||
})) continue;
|
||||
}))
|
||||
continue;
|
||||
|
||||
/// If it's a directory, spawn a task to process the entries in it
|
||||
if (!dirEntry.is_symlink() && dirEntry.is_directory()) {
|
||||
@@ -60,8 +62,6 @@ void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std:
|
||||
}
|
||||
|
||||
/// Spawn a task to process each individual file
|
||||
spawner([processFile, dirEntry]() {
|
||||
processFile(dirEntry);
|
||||
});
|
||||
spawner([processFile, dirEntry]() { processFile(dirEntry); });
|
||||
}
|
||||
}
|
||||
@@ -51,13 +51,9 @@ std::string Diff::diff(const ComparableFile &c1, const ComparableFile &c2) {
|
||||
}
|
||||
|
||||
out << "\nLines only in first file: " << std::endl;
|
||||
for (const auto &s: f1lines) {
|
||||
out << s.second << "<" << s.first << std::endl;
|
||||
}
|
||||
for (const auto &s: f1lines) { out << s.second << "<" << s.first << std::endl; }
|
||||
out << "Lines only in second file: " << std::endl;
|
||||
for (const auto &s: f2diff) {
|
||||
out << s.second << ">" << s.first << std::endl;
|
||||
}
|
||||
for (const auto &s: f2diff) { out << s.second << ">" << s.first << std::endl; }
|
||||
out << "^^^\n";
|
||||
return out.str();
|
||||
}
|
||||
@@ -92,12 +88,8 @@ std::string Diff::diffPercent(const ComparableFile &c1, const ComparableFile &c2
|
||||
|
||||
unsigned long long diff = 0;
|
||||
|
||||
for (const auto &c: ch1hashes) {
|
||||
diff += hashsize[c];
|
||||
}
|
||||
for (const auto &c: ch2diff) {
|
||||
diff += hashsize[c];
|
||||
}
|
||||
for (const auto &c: ch1hashes) { diff += hashsize[c]; }
|
||||
for (const auto &c: ch2diff) { diff += hashsize[c]; }
|
||||
|
||||
return "at most " + BytesFormatter::formatStr(diff);
|
||||
}
|
||||
|
||||
@@ -41,7 +41,8 @@ public:
|
||||
std::string getHash();
|
||||
|
||||
private:
|
||||
const std::unique_ptr<EVP_MD_CTX, decltype(&EVP_MD_CTX_free)> mdctx{EVP_MD_CTX_new(), &EVP_MD_CTX_free};///< Current hashing context
|
||||
const std::unique_ptr<EVP_MD_CTX, decltype(&EVP_MD_CTX_free)> mdctx{EVP_MD_CTX_new(),
|
||||
&EVP_MD_CTX_free};///< Current hashing context
|
||||
};
|
||||
|
||||
|
||||
|
||||
@@ -23,15 +23,16 @@ std::vector<char> AES::encrypt(const std::vector<char> &in, const std::array<uin
|
||||
if (!ctx) throw Exception("Error initializing encryption context!");
|
||||
|
||||
std::vector<char> out(in.size() + AES_BLOCK_SIZE + 32);
|
||||
if (!RAND_bytes(reinterpret_cast<unsigned char *>(out.data()), 32))
|
||||
throw Exception("Error generating IV!");
|
||||
if (!RAND_bytes(reinterpret_cast<unsigned char *>(out.data()), 32)) throw Exception("Error generating IV!");
|
||||
|
||||
if (!EVP_EncryptInit_ex(ctx.get(), EVP_aes_256_cbc(), nullptr, key.data(), reinterpret_cast<const unsigned char *>(out.data())))
|
||||
if (!EVP_EncryptInit_ex(ctx.get(), EVP_aes_256_cbc(), nullptr, key.data(),
|
||||
reinterpret_cast<const unsigned char *>(out.data())))
|
||||
throw Exception("Error encrypting!");
|
||||
|
||||
int outlen = static_cast<int>(out.size()) - 32;
|
||||
|
||||
if (!EVP_EncryptUpdate(ctx.get(), reinterpret_cast<unsigned char *>(out.data() + 32), &outlen, reinterpret_cast<const unsigned char *>(in.data()), static_cast<int>(in.size())))
|
||||
if (!EVP_EncryptUpdate(ctx.get(), reinterpret_cast<unsigned char *>(out.data() + 32), &outlen,
|
||||
reinterpret_cast<const unsigned char *>(in.data()), static_cast<int>(in.size())))
|
||||
throw Exception("Error encrypting!");
|
||||
|
||||
int finlen = 0;
|
||||
@@ -52,11 +53,13 @@ std::vector<char> AES::decrypt(const std::vector<char> &in, const std::array<uin
|
||||
std::vector<char> out(in.size() - 32);
|
||||
int outlen = static_cast<int>(out.size());
|
||||
|
||||
if (!EVP_DecryptInit_ex(ctx.get(), EVP_aes_256_cbc(), nullptr, key.data(), reinterpret_cast<const unsigned char *>(in.data())))
|
||||
if (!EVP_DecryptInit_ex(ctx.get(), EVP_aes_256_cbc(), nullptr, key.data(),
|
||||
reinterpret_cast<const unsigned char *>(in.data())))
|
||||
throw Exception("Error decrypting!");
|
||||
|
||||
|
||||
if (!EVP_DecryptUpdate(ctx.get(), reinterpret_cast<unsigned char *>(out.data()), &outlen, reinterpret_cast<const unsigned char *>(in.data() + 32), static_cast<int>(in.size() - 32)))
|
||||
if (!EVP_DecryptUpdate(ctx.get(), reinterpret_cast<unsigned char *>(out.data()), &outlen,
|
||||
reinterpret_cast<const unsigned char *>(in.data() + 32), static_cast<int>(in.size() - 32)))
|
||||
throw Exception("Error decrypting!");
|
||||
|
||||
int finlen = 0;
|
||||
@@ -70,13 +73,9 @@ std::vector<char> AES::decrypt(const std::vector<char> &in, const std::array<uin
|
||||
|
||||
std::array<uint8_t, 32> AES::deriveKey(const std::string &password, const std::string &salt) {
|
||||
std::array<uint8_t, 32> key;//NOLINT
|
||||
if (!PKCS5_PBKDF2_HMAC_SHA1(password.data(),
|
||||
static_cast<int>(password.length()),
|
||||
reinterpret_cast<const unsigned char *>(salt.data()),
|
||||
static_cast<int>(salt.length()),
|
||||
10000,
|
||||
32,
|
||||
key.data()))
|
||||
if (!PKCS5_PBKDF2_HMAC_SHA1(password.data(), static_cast<int>(password.length()),
|
||||
reinterpret_cast<const unsigned char *>(salt.data()), static_cast<int>(salt.length()),
|
||||
10000, 32, key.data()))
|
||||
throw Exception("Error deriving key!");
|
||||
return key;
|
||||
}
|
||||
|
||||
@@ -13,17 +13,14 @@ std::string SHA::calculate(const std::vector<char> &in) {
|
||||
}
|
||||
|
||||
SHA::SHA() {
|
||||
if (!mdctx)
|
||||
throw Exception("Can't create hashing context!");
|
||||
if (!mdctx) throw Exception("Can't create hashing context!");
|
||||
|
||||
if (!EVP_DigestInit_ex(mdctx.get(), EVP_sha256(), nullptr))
|
||||
throw Exception("Can't create hashing context!");
|
||||
if (!EVP_DigestInit_ex(mdctx.get(), EVP_sha256(), nullptr)) throw Exception("Can't create hashing context!");
|
||||
}
|
||||
|
||||
void SHA::feedData(const std::vector<char> &in) {
|
||||
if (in.empty()) return;
|
||||
if (!EVP_DigestUpdate(mdctx.get(), in.data(), in.size()))
|
||||
throw Exception("Error hashing!");
|
||||
if (!EVP_DigestUpdate(mdctx.get(), in.data(), in.size())) throw Exception("Error hashing!");
|
||||
}
|
||||
|
||||
std::string SHA::getHash() {
|
||||
@@ -33,11 +30,9 @@ std::string SHA::getHash() {
|
||||
if (!EVP_DigestFinal_ex(mdctx.get(), reinterpret_cast<unsigned char *>(out.data()), &s))
|
||||
throw Exception("Error hashing!");
|
||||
|
||||
if (s != out.size())
|
||||
throw Exception("Error hashing!");
|
||||
if (s != out.size()) throw Exception("Error hashing!");
|
||||
|
||||
if (!EVP_MD_CTX_reset(mdctx.get()))
|
||||
throw Exception("Error hashing!");
|
||||
if (!EVP_MD_CTX_reset(mdctx.get())) throw Exception("Error hashing!");
|
||||
|
||||
return {out.begin(), out.end()};
|
||||
}
|
||||
|
||||
@@ -6,13 +6,9 @@
|
||||
#include "CRC32.h"
|
||||
#include "Serialize.h"
|
||||
|
||||
std::vector<char> CheckFilter::filterWrite(std::vector<char> from) const {
|
||||
return filterWriteStatic(std::move(from));
|
||||
}
|
||||
std::vector<char> CheckFilter::filterWrite(std::vector<char> from) const { return filterWriteStatic(std::move(from)); }
|
||||
|
||||
std::vector<char> CheckFilter::filterRead(std::vector<char> from) const {
|
||||
return filterReadStatic(std::move(from));
|
||||
}
|
||||
std::vector<char> CheckFilter::filterRead(std::vector<char> from) const { return filterReadStatic(std::move(from)); }
|
||||
|
||||
std::vector<char> CheckFilter::filterWriteStatic(std::vector<char> from) {
|
||||
auto out = magic;
|
||||
|
||||
@@ -6,12 +6,8 @@
|
||||
|
||||
#include "AES.h"
|
||||
|
||||
std::vector<char> FilterAES::filterWrite(std::vector<char> from) const {
|
||||
return AES::encrypt(from, key);
|
||||
}
|
||||
std::vector<char> FilterAES::filterWrite(std::vector<char> from) const { return AES::encrypt(from, key); }
|
||||
|
||||
std::vector<char> FilterAES::filterRead(std::vector<char> from) const {
|
||||
return AES::decrypt(from, key);
|
||||
}
|
||||
std::vector<char> FilterAES::filterRead(std::vector<char> from) const { return AES::decrypt(from, key); }
|
||||
|
||||
FilterAES::FilterAES(const std::string &password, const std::string &salt) : key(AES::deriveKey(password, salt)) {}
|
||||
|
||||
@@ -17,7 +17,6 @@ std::vector<char> FilterContainer::filterWrite(std::vector<char> from) const {
|
||||
}
|
||||
|
||||
std::vector<char> FilterContainer::filterRead(std::vector<char> from) const {
|
||||
for (auto f = filters.crbegin(); f != filters.crend(); f++)
|
||||
from = (*f)->filterRead(std::move(from));
|
||||
for (auto f = filters.crbegin(); f != filters.crend(); f++) from = (*f)->filterRead(std::move(from));
|
||||
return from;
|
||||
}
|
||||
|
||||
@@ -19,8 +19,8 @@ std::vector<char> FilterZlib::filterWrite(std::vector<char> from) const {
|
||||
|
||||
out.resize(sizeSize + outSize);
|
||||
|
||||
if (compress2(reinterpret_cast<Bytef *>(out.data() + sizeSize), &outSize, reinterpret_cast<const Bytef *>(from.data()), from.size(), level) !=
|
||||
Z_OK)
|
||||
if (compress2(reinterpret_cast<Bytef *>(out.data() + sizeSize), &outSize,
|
||||
reinterpret_cast<const Bytef *>(from.data()), from.size(), level) != Z_OK)
|
||||
throw Exception("Error compressing!");
|
||||
|
||||
out.resize(outSize + sizeSize);
|
||||
@@ -40,8 +40,8 @@ std::vector<char> FilterZlib::filterRead(std::vector<char> from) const {
|
||||
|
||||
if (desI >= from.cend()) throw Exception("Unexpected end of archive!");
|
||||
|
||||
if (uncompress(reinterpret_cast<Bytef *>(out.data()), &size, reinterpret_cast<const Bytef *>(&(*desI)), std::distance(desI, from.cend())) !=
|
||||
Z_OK)
|
||||
if (uncompress(reinterpret_cast<Bytef *>(out.data()), &size, reinterpret_cast<const Bytef *>(&(*desI)),
|
||||
std::distance(desI, from.cend())) != Z_OK)
|
||||
throw Exception("Error decompressing!");
|
||||
|
||||
return out;
|
||||
|
||||
@@ -18,9 +18,7 @@ DirEntry *getf(std::string path) {
|
||||
auto p = std::filesystem::relative(std::filesystem::u8path(path), "/");
|
||||
DirEntry *entry = RepoFS::root.get();
|
||||
if (p != ".")
|
||||
for (auto const &subp: p) {
|
||||
entry = entry->children.at(subp).get();
|
||||
}
|
||||
for (auto const &subp: p) { entry = entry->children.at(subp).get(); }
|
||||
return entry;
|
||||
}
|
||||
|
||||
@@ -53,14 +51,12 @@ static int rfsGetattr(const char *path, struct stat *stbuf) {
|
||||
return res;
|
||||
}
|
||||
|
||||
static int rfsReaddir(const char *path, void *buf, fuse_fill_dir_t filler,
|
||||
off_t offset, struct fuse_file_info *fi) {
|
||||
static int rfsReaddir(const char *path, void *buf, fuse_fill_dir_t filler, off_t offset, struct fuse_file_info *fi) {
|
||||
(void) offset;
|
||||
(void) fi;
|
||||
|
||||
DirEntry *entry = RepoFS::root.get();
|
||||
if (std::string(path) != "/")
|
||||
try {
|
||||
if (std::string(path) != "/") try {
|
||||
entry = getf(path);
|
||||
} catch (...) { return -ENOENT; }
|
||||
|
||||
@@ -77,37 +73,30 @@ static int rfsReaddir(const char *path, void *buf, fuse_fill_dir_t filler,
|
||||
|
||||
static int rfsOpen(const char *path, struct fuse_file_info *fi) {
|
||||
DirEntry *entry = RepoFS::root.get();
|
||||
if (std::string(path) != "/")
|
||||
try {
|
||||
if (std::string(path) != "/") try {
|
||||
entry = getf(path);
|
||||
} catch (...) { return -ENOENT; }
|
||||
|
||||
if ((fi->flags & 3) != O_RDONLY)
|
||||
return -EACCES;
|
||||
if ((fi->flags & 3) != O_RDONLY) return -EACCES;
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int rfsRead(const char *path, char *buf, size_t size, off_t offset,
|
||||
struct fuse_file_info *fi) {
|
||||
static int rfsRead(const char *path, char *buf, size_t size, off_t offset, struct fuse_file_info *fi) {
|
||||
size_t len;
|
||||
(void) fi;
|
||||
DirEntry *entry = RepoFS::root.get();
|
||||
if (std::string(path) != "/")
|
||||
try {
|
||||
if (std::string(path) != "/") try {
|
||||
entry = getf(path);
|
||||
} catch (...) { return -ENOENT; }
|
||||
|
||||
|
||||
len = entry->file->bytes;
|
||||
if (offset < len) {
|
||||
if (offset + size > len)
|
||||
size = len - offset;
|
||||
if (offset + size > len) size = len - offset;
|
||||
|
||||
auto curchunk = entry->file->chunks.upper_bound(offset);
|
||||
if (curchunk == entry->file->chunks.begin()) {
|
||||
std::abort();
|
||||
}
|
||||
if (curchunk == entry->file->chunks.begin()) { std::abort(); }
|
||||
--curchunk;
|
||||
size_t curInBuf = 0;
|
||||
size_t curInChunk = offset - curchunk->first;
|
||||
@@ -127,8 +116,7 @@ static int rfsRead(const char *path, char *buf, size_t size, off_t offset,
|
||||
|
||||
static int rfsReadlink(const char *path, char *buf, size_t size) {
|
||||
DirEntry *entry = RepoFS::root.get();
|
||||
if (std::string(path) != "/")
|
||||
try {
|
||||
if (std::string(path) != "/") try {
|
||||
entry = getf(path);
|
||||
} catch (...) { return -ENOENT; }
|
||||
|
||||
@@ -161,8 +149,7 @@ void RepoFS::start(Repository *repo, std::string path) {
|
||||
entry->isFakeDir = true;
|
||||
entry->name = std::to_string(a.id);
|
||||
for (auto const &subp: path) {
|
||||
entry = entry->children[subp].get()
|
||||
? entry->children[subp].get()
|
||||
entry = entry->children[subp].get() ? entry->children[subp].get()
|
||||
: (entry->children[subp] = std::make_unique<DirEntry>()).get();
|
||||
}
|
||||
entry->file.emplace(file);
|
||||
|
||||
19
src/main.cpp
19
src/main.cpp
@@ -36,8 +36,7 @@ Config getConf(int argc, char *argv[]) {
|
||||
int help() {
|
||||
for (auto const &o: Config::keys) {
|
||||
std::cout << "--" << o.first << " <" << Config::KeyTypeToStr.at(o.second.type) << ">" << std::endl;
|
||||
if (o.second.defaultval.has_value())
|
||||
std::cout << " Default: " << o.second.defaultval.value() << std::endl;
|
||||
if (o.second.defaultval.has_value()) std::cout << " Default: " << o.second.defaultval.value() << std::endl;
|
||||
std::cout << " Is saved in repository: " << (o.second.remember ? "yes" : "no") << std::endl;
|
||||
std::cout << " Info: " << o.second.info << std::endl;
|
||||
}
|
||||
@@ -80,23 +79,18 @@ int main(int argc, char *argv[]) {
|
||||
}
|
||||
|
||||
std::string opt = argv[1];
|
||||
if (opt == "help") {
|
||||
return help();
|
||||
}
|
||||
if (opt == "help") { return help(); }
|
||||
|
||||
Config conf;
|
||||
|
||||
try {
|
||||
conf = getConf(argc - 2, argv + 2);
|
||||
} catch (std::exception &e) {
|
||||
std::cerr << "Error reading config!" << std::endl
|
||||
<< e.what() << std::endl;
|
||||
std::cerr << "Error reading config!" << std::endl << e.what() << std::endl;
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (opt == "init") {
|
||||
return init(conf);
|
||||
}
|
||||
if (opt == "init") { return init(conf); }
|
||||
|
||||
auto repo = openRepo(conf);
|
||||
|
||||
@@ -122,10 +116,7 @@ int main(int argc, char *argv[]) {
|
||||
} else {
|
||||
commands.at(opt)->run(ctx);
|
||||
}
|
||||
} catch (std::exception &e) {
|
||||
std::cerr << "Error!" << std::endl
|
||||
<< e.what() << std::endl;
|
||||
} catch (...) {
|
||||
} catch (std::exception &e) { std::cerr << "Error!" << std::endl << e.what() << std::endl; } catch (...) {
|
||||
std::cerr << "Something very bad happened!" << std::endl;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -72,7 +72,8 @@ private:
|
||||
/// \param size Amount of bytes to read (no more than absoluteMaxFileLimit)
|
||||
/// \return Vector of bytes of the file
|
||||
/// \throws Exception on any error, or when absoluteMaxFileLimit is reached
|
||||
std::vector<char> readFile(const std::filesystem::path &file, unsigned long long offset, unsigned long long size) const;
|
||||
std::vector<char> readFile(const std::filesystem::path &file, unsigned long long offset,
|
||||
unsigned long long size) const;
|
||||
static constexpr unsigned long long absoluteMaxFileLimit{4ULL * 1024 * 1024 * 1024};///<Max file read size (4GB)
|
||||
|
||||
/// Writes \p data to \p file
|
||||
@@ -107,15 +108,17 @@ private:
|
||||
std::mutex writeCacheLock; ///< Write cache lock
|
||||
std::map<Object::idType, std::vector<char>> writeCache;///< Write cache, map of Object ids and their serialized data
|
||||
unsigned long long writeCacheSize = 0; ///< Current byte size of the write cache
|
||||
const unsigned long long writeCacheMax; ///< Target size of the write cache, it is automatically flushed after this is reached
|
||||
const unsigned long long
|
||||
writeCacheMax;///< Target size of the write cache, it is automatically flushed after this is reached
|
||||
|
||||
/// Flushes the write cache
|
||||
/// Takes the cache lock, swaps the cache with an empty one and unlocks it
|
||||
/// \param lockW Write cache lock
|
||||
void flushWriteCache(std::unique_lock<std::mutex> &&lockW);
|
||||
|
||||
Object::idType largestUnusedId = 1; ///< Largest available objectID
|
||||
std::unordered_map<Object::ObjectType, std::unordered_map<std::string, Object::idType>> keyIndex;///< Maps Object%'s keys to their ID's
|
||||
Object::idType largestUnusedId = 1;///< Largest available objectID
|
||||
std::unordered_map<Object::ObjectType, std::unordered_map<std::string, Object::idType>>
|
||||
keyIndex;///< Maps Object%'s keys to their ID's
|
||||
};
|
||||
|
||||
|
||||
|
||||
@@ -17,12 +17,7 @@ class Object {
|
||||
public:
|
||||
using idType = uint64_t;///< Type alias for Object%'s ID
|
||||
|
||||
enum class ObjectType {
|
||||
Archive,
|
||||
File,
|
||||
Chunk,
|
||||
END
|
||||
};
|
||||
enum class ObjectType { Archive, File, Chunk, END };
|
||||
|
||||
/// Serializes the object to \p out
|
||||
virtual void serialize(std::vector<char> &out) const;
|
||||
|
||||
@@ -17,16 +17,14 @@
|
||||
/// Object representing a saved file
|
||||
class File : public Object {
|
||||
public:
|
||||
enum class Type {
|
||||
Normal,
|
||||
Symlink,
|
||||
Directory,
|
||||
END
|
||||
};
|
||||
enum class Type { Normal, Symlink, Directory, END };
|
||||
|
||||
static inline const std::unordered_map<Type, std::string> TypeToStr{{Type::Normal, "normal"}, {Type::Symlink, "symlink"}, {Type::Directory, "directory"}};
|
||||
static inline const std::unordered_map<Type, std::string> TypeToStr{{Type::Normal, "normal"},
|
||||
{Type::Symlink, "symlink"},
|
||||
{Type::Directory, "directory"}};
|
||||
|
||||
File(Object::idType id, std::string name, unsigned long long bytes, unsigned long long mtime, std::string SHA, std::map<size_t, idType> chunks, Type fileType);
|
||||
File(Object::idType id, std::string name, unsigned long long bytes, unsigned long long mtime, std::string SHA,
|
||||
std::map<size_t, idType> chunks, Type fileType);
|
||||
|
||||
/// Deserialization constructor
|
||||
File(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end);
|
||||
|
||||
@@ -13,11 +13,11 @@
|
||||
#include "Object.h"
|
||||
#include "Serialize.h"
|
||||
|
||||
FileRepository::FileRepository(Config config) : Repository(std::move(config)), root(std::filesystem::path(this->config.getStr("repo"))), writeCacheMax(config.getInt("repo-target") * 1024 * 1024) {}
|
||||
FileRepository::FileRepository(Config config)
|
||||
: Repository(std::move(config)), root(std::filesystem::path(this->config.getStr("repo"))),
|
||||
writeCacheMax(config.getInt("repo-target") * 1024 * 1024) {}
|
||||
|
||||
bool FileRepository::exists() {
|
||||
return std::filesystem::is_directory(root) && std::filesystem::exists(root / "info");
|
||||
}
|
||||
bool FileRepository::exists() { return std::filesystem::is_directory(root) && std::filesystem::exists(root / "info"); }
|
||||
|
||||
bool FileRepository::flush() {
|
||||
flushWriteCache(std::unique_lock(writeCacheLock));
|
||||
@@ -31,14 +31,20 @@ bool FileRepository::open() {
|
||||
std::swap(config, readConf);
|
||||
config.merge(readConf);
|
||||
|
||||
if (config.getStr("compression") != "none") filters.addFilter(FilterFactory::makeFilter(config.getStr("compression"), config));
|
||||
if (config.getStr("encryption") != "none") filters.addFilter(FilterFactory::makeFilter(config.getStr("encryption"), config));
|
||||
if (config.getStr("compression") != "none")
|
||||
filters.addFilter(FilterFactory::makeFilter(config.getStr("compression"), config));
|
||||
if (config.getStr("encryption") != "none")
|
||||
filters.addFilter(FilterFactory::makeFilter(config.getStr("encryption"), config));
|
||||
filters.addFilter(FilterFactory::makeFilter("crc", config));
|
||||
|
||||
ready = true;
|
||||
try {
|
||||
std::tie(maxFileId, offsetIndex) = Serialize::deserialize<std::pair<decltype(maxFileId), decltype(offsetIndex)>>(filters.filterRead(readFile(root / "offsets")));
|
||||
std::tie(keyIndex, largestUnusedId) = Serialize::deserialize<std::pair<decltype(keyIndex), decltype(largestUnusedId)>>(filters.filterRead(readFile(root / "index")));
|
||||
std::tie(maxFileId, offsetIndex) =
|
||||
Serialize::deserialize<std::pair<decltype(maxFileId), decltype(offsetIndex)>>(
|
||||
filters.filterRead(readFile(root / "offsets")));
|
||||
std::tie(keyIndex, largestUnusedId) =
|
||||
Serialize::deserialize<std::pair<decltype(keyIndex), decltype(largestUnusedId)>>(
|
||||
filters.filterRead(readFile(root / "index")));
|
||||
} catch (const std::exception &e) {
|
||||
ready = false;
|
||||
throw;
|
||||
@@ -56,8 +62,10 @@ bool FileRepository::init() {
|
||||
|
||||
writeFile(root / "info", CheckFilter::filterWriteStatic(Serialize::serialize(config)));
|
||||
|
||||
if (config.getStr("compression") != "none") filters.addFilter(FilterFactory::makeFilter(config.getStr("compression"), config));
|
||||
if (config.getStr("encryption") != "none") filters.addFilter(FilterFactory::makeFilter(config.getStr("encryption"), config));
|
||||
if (config.getStr("compression") != "none")
|
||||
filters.addFilter(FilterFactory::makeFilter(config.getStr("compression"), config));
|
||||
if (config.getStr("encryption") != "none")
|
||||
filters.addFilter(FilterFactory::makeFilter(config.getStr("encryption"), config));
|
||||
filters.addFilter(FilterFactory::makeFilter("crc", config));
|
||||
|
||||
ready = true;
|
||||
@@ -78,8 +86,7 @@ std::vector<char> FileRepository::getObject(Object::idType id) const {
|
||||
if (!ready) throw Exception("Tried working with uninitialized repo!");
|
||||
|
||||
std::unique_lock lock(repoLock);
|
||||
if (offsetIndex.count(id) == 0)
|
||||
throw Exception("Object with id " + std::to_string(id) + " doesn't exist!");
|
||||
if (offsetIndex.count(id) == 0) throw Exception("Object with id " + std::to_string(id) + " doesn't exist!");
|
||||
auto entry = offsetIndex.at(id);
|
||||
lock.unlock();
|
||||
|
||||
@@ -95,9 +102,7 @@ bool FileRepository::writeObject(const Object &obj) {
|
||||
writeCache[obj.id] = std::move(tmp);
|
||||
|
||||
// If we have reached the target file size, flush the cache
|
||||
if (writeCacheSize >= writeCacheMax) {
|
||||
flushWriteCache(std::move(lockW));
|
||||
}
|
||||
if (writeCacheSize >= writeCacheMax) { flushWriteCache(std::move(lockW)); }
|
||||
}
|
||||
return true;
|
||||
}
|
||||
@@ -149,9 +154,10 @@ bool FileRepository::deleteObject(const Object &obj) {
|
||||
throw Exception("Deletion not implemented!");
|
||||
}
|
||||
|
||||
std::vector<char> FileRepository::readFile(const std::filesystem::path &file, unsigned long long offset, unsigned long long size) const {
|
||||
if (size > absoluteMaxFileLimit) throw Exception("Tried to read " + std::to_string(size) +
|
||||
" bytes from " + file.u8string() +
|
||||
std::vector<char> FileRepository::readFile(const std::filesystem::path &file, unsigned long long offset,
|
||||
unsigned long long size) const {
|
||||
if (size > absoluteMaxFileLimit)
|
||||
throw Exception("Tried to read " + std::to_string(size) + " bytes from " + file.u8string() +
|
||||
" which is more than absoluteMaxFileLimit");
|
||||
|
||||
std::ifstream ifstream(file, std::ios::binary | std::ios::in);
|
||||
@@ -159,7 +165,8 @@ std::vector<char> FileRepository::readFile(const std::filesystem::path &file, un
|
||||
|
||||
std::vector<char> buf(size);
|
||||
|
||||
if (ifstream.rdbuf()->pubseekpos(offset) == std::streampos(std::streamoff(-1))) throw Exception("Unexpected end of file " + file.u8string());
|
||||
if (ifstream.rdbuf()->pubseekpos(offset) == std::streampos(std::streamoff(-1)))
|
||||
throw Exception("Unexpected end of file " + file.u8string());
|
||||
if (ifstream.rdbuf()->sgetn(buf.data(), size) != size) throw Exception("Unexpected end of file " + file.u8string());
|
||||
|
||||
return buf;
|
||||
@@ -195,8 +202,7 @@ std::vector<std::pair<std::string, Object::idType>> FileRepository::getObjects(O
|
||||
std::lock_guard lock(repoLock);
|
||||
std::vector<std::pair<std::string, Object::idType>> out;
|
||||
if (keyIndex.count(type) == 0) return {};
|
||||
for (auto const &i: keyIndex.at(type))
|
||||
out.emplace_back(i);
|
||||
for (auto const &i: keyIndex.at(type)) out.emplace_back(i);
|
||||
return out;
|
||||
}
|
||||
|
||||
@@ -211,11 +217,11 @@ Object::idType FileRepository::getId() {
|
||||
return largestUnusedId++;
|
||||
}
|
||||
|
||||
FileRepository::OffsetEntry::OffsetEntry(std::vector<char, std::allocator<char>>::const_iterator &in, const std::vector<char, std::allocator<char>>::const_iterator &end)
|
||||
FileRepository::OffsetEntry::OffsetEntry(std::vector<char, std::allocator<char>>::const_iterator &in,
|
||||
const std::vector<char, std::allocator<char>>::const_iterator &end)
|
||||
: fileId(Serialize::deserialize<decltype(fileId)>(in, end)),
|
||||
offset(Serialize::deserialize<decltype(offset)>(in, end)),
|
||||
length(Serialize::deserialize<decltype(length)>(in, end)) {
|
||||
}
|
||||
length(Serialize::deserialize<decltype(length)>(in, end)) {}
|
||||
|
||||
void FileRepository::OffsetEntry::serialize(std::vector<char> &out) const {
|
||||
Serialize::serialize(fileId, out);
|
||||
@@ -223,5 +229,6 @@ void FileRepository::OffsetEntry::serialize(std::vector<char> &out) const {
|
||||
Serialize::serialize(length, out);
|
||||
}
|
||||
|
||||
FileRepository::OffsetEntry::OffsetEntry(unsigned long long int fileId, unsigned long long int offset, unsigned long long int length)
|
||||
FileRepository::OffsetEntry::OffsetEntry(unsigned long long int fileId, unsigned long long int offset,
|
||||
unsigned long long int length)
|
||||
: fileId(fileId), offset(offset), length(length) {}
|
||||
|
||||
@@ -9,9 +9,7 @@
|
||||
Object::Object(idType id, ObjectType type) : id(id), type(type) {}
|
||||
|
||||
Object::Object(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end)
|
||||
: id(Serialize::deserialize<idType>(in, end)),
|
||||
type(Serialize::deserialize<ObjectType>(in, end)) {
|
||||
}
|
||||
: id(Serialize::deserialize<idType>(in, end)), type(Serialize::deserialize<ObjectType>(in, end)) {}
|
||||
|
||||
void Object::serialize(std::vector<char> &out) const {
|
||||
Serialize::serialize(id, out);
|
||||
|
||||
@@ -7,6 +7,4 @@ Repository::~Repository() = default;
|
||||
|
||||
Repository::Repository(Config config) : config(std::move(config)) {}
|
||||
|
||||
const Config &Repository::getConfig() const {
|
||||
return config;
|
||||
}
|
||||
const Config &Repository::getConfig() const { return config; }
|
||||
|
||||
@@ -11,8 +11,7 @@ Archive::Archive(Object::idType id, std::string name, unsigned long long mtime,
|
||||
: Object(id, ObjectType::Archive), name(name), mtime(mtime), files(files) {}
|
||||
|
||||
Archive::Archive(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end)
|
||||
: Object(in, end),
|
||||
name(Serialize::deserialize<std::string>(in, end)),
|
||||
: Object(in, end), name(Serialize::deserialize<std::string>(in, end)),
|
||||
mtime(Serialize::deserialize<unsigned long long>(in, end)),
|
||||
files(Serialize::deserialize<std::remove_const<decltype(files)>::type>(in, end)) {
|
||||
if (type != ObjectType::Archive) throw Exception("Type mismatch for Archive!");
|
||||
@@ -28,6 +27,4 @@ void Archive::serialize(std::vector<char> &out) const {
|
||||
Serialize::serialize(files.size(), out);
|
||||
}
|
||||
|
||||
std::string Archive::getKey() const {
|
||||
return name;
|
||||
}
|
||||
std::string Archive::getKey() const { return name; }
|
||||
|
||||
@@ -7,11 +7,11 @@
|
||||
#include "Exception.h"
|
||||
#include "Serialize.h"
|
||||
|
||||
Chunk::Chunk(idType id, std::string SHA, std::vector<char> data) : Object(id, ObjectType::Chunk), data(std::move(data)), SHA(std::move(SHA)), length(this->data.size()) {}
|
||||
Chunk::Chunk(idType id, std::string SHA, std::vector<char> data)
|
||||
: Object(id, ObjectType::Chunk), data(std::move(data)), SHA(std::move(SHA)), length(this->data.size()) {}
|
||||
|
||||
Chunk::Chunk(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end)
|
||||
: Object(in, end),
|
||||
SHA(Serialize::deserialize<std::remove_const<decltype(SHA)>::type>(in, end)),
|
||||
: Object(in, end), SHA(Serialize::deserialize<std::remove_const<decltype(SHA)>::type>(in, end)),
|
||||
data(Serialize::deserialize<std::remove_const<decltype(data)>::type>(in, end)),
|
||||
length(Serialize::deserialize<std::remove_const<decltype(length)>::type>(in, end)) {
|
||||
if (type != ObjectType::Chunk) throw Exception("Type mismatch for Chunk!");
|
||||
@@ -25,6 +25,4 @@ void Chunk::serialize(std::vector<char> &out) const {
|
||||
Serialize::serialize(length, out);
|
||||
}
|
||||
|
||||
std::string Chunk::getKey() const {
|
||||
return SHA;
|
||||
}
|
||||
std::string Chunk::getKey() const { return SHA; }
|
||||
|
||||
@@ -11,12 +11,13 @@
|
||||
#include "Exception.h"
|
||||
#include "Serialize.h"
|
||||
|
||||
File::File(Object::idType id, std::string name, unsigned long long bytes, unsigned long long mtime, std::string SHA, std::map<size_t, idType> chunks, Type fileType)
|
||||
: Object(id, ObjectType::File), name(name), bytes(bytes), mtime(mtime), SHA(SHA), fileType(fileType), chunks(std::move(chunks)) {}
|
||||
File::File(Object::idType id, std::string name, unsigned long long bytes, unsigned long long mtime, std::string SHA,
|
||||
std::map<size_t, idType> chunks, Type fileType)
|
||||
: Object(id, ObjectType::File), name(name), bytes(bytes), mtime(mtime), SHA(SHA), fileType(fileType),
|
||||
chunks(std::move(chunks)) {}
|
||||
|
||||
File::File(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end)
|
||||
: Object(in, end),
|
||||
name(Serialize::deserialize<std::string>(in, end)),
|
||||
: Object(in, end), name(Serialize::deserialize<std::string>(in, end)),
|
||||
bytes(Serialize::deserialize<unsigned long long>(in, end)),
|
||||
mtime(Serialize::deserialize<unsigned long long>(in, end)),
|
||||
SHA(Serialize::deserialize<std::remove_const<decltype(SHA)>::type>(in, end)),
|
||||
@@ -35,9 +36,7 @@ void File::serialize(std::vector<char> &out) const {
|
||||
Serialize::serialize(chunks, out);
|
||||
}
|
||||
|
||||
std::string File::getKey() const {
|
||||
return name;
|
||||
}
|
||||
std::string File::getKey() const { return name; }
|
||||
|
||||
File::Type File::getFileType(const std::filesystem::path &p) {
|
||||
if (std::filesystem::is_symlink(p)) return Type::Symlink;
|
||||
@@ -49,9 +48,7 @@ File::Type File::getFileType(const std::filesystem::path &p) {
|
||||
std::vector<char> File::getFileContents(const std::filesystem::path &p) {
|
||||
auto type = getFileType(p);
|
||||
if (type == Type::Normal) throw Exception(p.u8string() + " is a normal file!");
|
||||
if (type == Type::Directory) {
|
||||
return {};
|
||||
}
|
||||
if (type == Type::Directory) { return {}; }
|
||||
if (type == Type::Symlink) {
|
||||
auto target = std::filesystem::read_symlink(p).u8string();
|
||||
std::vector<char> target_null_term = {target.begin(), target.end()};
|
||||
@@ -64,7 +61,9 @@ std::vector<char> File::getFileContents(const std::filesystem::path &p) {
|
||||
unsigned long long File::getFileMtime(const std::filesystem::path &p) {
|
||||
auto type = getFileType(p);
|
||||
if (type == Type::Normal || type == Type::Directory)
|
||||
return static_cast<const unsigned long long int>(std::chrono::duration_cast<std::chrono::seconds>(std::filesystem::last_write_time(p).time_since_epoch()).count());
|
||||
return static_cast<const unsigned long long int>(
|
||||
std::chrono::duration_cast<std::chrono::seconds>(std::filesystem::last_write_time(p).time_since_epoch())
|
||||
.count());
|
||||
else if (type == Type::Symlink) {
|
||||
auto path = p.u8string();
|
||||
struct stat sb;
|
||||
|
||||
@@ -6,13 +6,12 @@
|
||||
|
||||
#include "Serialize.h"
|
||||
|
||||
FileBuffer::FileBuffer(const Repository *repo, Object::idType fileId) : repo(repo), file(Serialize::deserialize<File>(repo->getObject(fileId))), chunksQueue() {
|
||||
FileBuffer::FileBuffer(const Repository *repo, Object::idType fileId)
|
||||
: repo(repo), file(Serialize::deserialize<File>(repo->getObject(fileId))), chunksQueue() {
|
||||
for (auto const &id: file.chunks) chunksQueue.emplace(id.second);
|
||||
};
|
||||
|
||||
int FileBuffer::sync() {
|
||||
return 0;
|
||||
}
|
||||
int FileBuffer::sync() { return 0; }
|
||||
|
||||
std::streamsize FileBuffer::xsgetn(char *s, std::streamsize countr) {
|
||||
if (underflow() == std::char_traits<char>::eof()) return 0;
|
||||
@@ -28,8 +27,7 @@ std::streamsize FileBuffer::xsgetn(char *s, std::streamsize countr) {
|
||||
|
||||
int FileBuffer::uflow() {
|
||||
auto out = underflow();
|
||||
if (out != traits_type::eof())
|
||||
curGetBufPos++;
|
||||
if (out != traits_type::eof()) curGetBufPos++;
|
||||
return out;
|
||||
}
|
||||
|
||||
@@ -44,8 +42,7 @@ int FileBuffer::underflow() {
|
||||
}
|
||||
}
|
||||
|
||||
if (!getBuf.empty())
|
||||
return traits_type::to_int_type(getBuf[curGetBufPos]);
|
||||
if (!getBuf.empty()) return traits_type::to_int_type(getBuf[curGetBufPos]);
|
||||
else
|
||||
return traits_type::eof();
|
||||
}
|
||||
|
||||
@@ -64,11 +64,7 @@ public:
|
||||
|
||||
using serializable = std::true_type;
|
||||
|
||||
enum class KeyType {
|
||||
STRING,
|
||||
INT,
|
||||
LIST
|
||||
};
|
||||
enum class KeyType { STRING, INT, LIST };
|
||||
|
||||
/// Struct to record key options
|
||||
struct keyopts {
|
||||
@@ -79,7 +75,9 @@ public:
|
||||
};
|
||||
|
||||
/// Used for printing help
|
||||
const static inline std::unordered_map<KeyType, std::string> KeyTypeToStr{{KeyType::STRING, "string"}, {KeyType::INT, "number"}, {KeyType::LIST, "comma-separated list"}};
|
||||
const static inline std::unordered_map<KeyType, std::string> KeyTypeToStr{{KeyType::STRING, "string"},
|
||||
{KeyType::INT, "number"},
|
||||
{KeyType::LIST, "comma-separated list"}};
|
||||
|
||||
/// Default values and their metadata
|
||||
const static inline std::unordered_map<std::string, keyopts> keys{
|
||||
@@ -98,7 +96,9 @@ public:
|
||||
{"chunker", {"buzhash", KeyType::STRING, true, "Chunker to use (const, buzhash)"}},
|
||||
{"chunker-min", {"256", KeyType::INT, true, "Min chunk size in KB"}},
|
||||
{"chunker-max", {"4096", KeyType::INT, true, "Max chunk size in KB"}},
|
||||
{"chunker-mask", {"20", KeyType::INT, true, "Chunker hash bit mask (mask of n bits results in average chunk size of 2^n bytes)"}},
|
||||
{"chunker-mask",
|
||||
{"20", KeyType::INT, true,
|
||||
"Chunker hash bit mask (mask of n bits results in average chunk size of 2^n bytes)"}},
|
||||
{"repo-target", {"128", KeyType::INT, true, "Target size of files for FileRepository"}},
|
||||
{"progress", {"pretty", KeyType::STRING, false, "How to print progress (simple, pretty, none)"}},
|
||||
{"verbose", {"1", KeyType::INT, false, "Message verbosity (0 - error, 1 - info, -1 - quiet)"}},
|
||||
|
||||
@@ -22,7 +22,9 @@ public:
|
||||
/// \param out Function to call for output
|
||||
/// \param format Format of the progress string, vector of strings or functions that return strings
|
||||
/// \param conf Config, used to specify format (`pretty` for line rewriting, `simple` for normal line printing, or `none`)
|
||||
Progress(std::function<void(std::string, int)> out, std::vector<std::variant<std::function<std::string()>, std::string>> format, const Config &conf, int level = 1);
|
||||
Progress(std::function<void(std::string, int)> out,
|
||||
std::vector<std::variant<std::function<std::string()>, std::string>> format, const Config &conf,
|
||||
int level = 1);
|
||||
|
||||
Progress &operator=(Progress rhs) = delete;
|
||||
Progress(const Progress &orig) = delete;
|
||||
|
||||
@@ -40,7 +40,8 @@ namespace Serialize {
|
||||
struct is_pair : std::false_type {};
|
||||
|
||||
template<typename P>
|
||||
struct is_pair<P, std::void_t<decltype(std::declval<P>().first)>, std::void_t<decltype(std::declval<P>().second)>> : std::true_type {};
|
||||
struct is_pair<P, std::void_t<decltype(std::declval<P>().first)>, std::void_t<decltype(std::declval<P>().second)>>
|
||||
: std::true_type {};
|
||||
|
||||
template<typename, typename, typename = void>
|
||||
struct has_emplace_back : std::false_type {};
|
||||
@@ -104,22 +105,19 @@ namespace Serialize {
|
||||
} else if constexpr (std::is_enum<T>::value) {
|
||||
// If the object is an enum, deserialize an int and cast it to the enum
|
||||
auto tmp = deserialize<uint32_t>(in, end);
|
||||
if (tmp >= 0 && tmp < static_cast<uint32_t>(T::END))
|
||||
return static_cast<T>(tmp);
|
||||
if (tmp >= 0 && tmp < static_cast<uint32_t>(T::END)) return static_cast<T>(tmp);
|
||||
else
|
||||
throw Exception("Enum out of range!");
|
||||
} else if constexpr (sizeof(T) == 1) {
|
||||
// If it's a single byte, just copy it
|
||||
if (std::distance(in, end) < sizeof(T))
|
||||
throw Exception("Unexpected end of object!");
|
||||
if (std::distance(in, end) < sizeof(T)) throw Exception("Unexpected end of object!");
|
||||
return *(in++);
|
||||
} else if constexpr (std::is_integral<T>::value) {
|
||||
uint64_t tmp;
|
||||
static_assert(sizeof(tmp) == 8);
|
||||
|
||||
// If the object is a number, copy it byte-by-byte
|
||||
if (std::distance(in, end) < sizeof(tmp))
|
||||
throw Exception("Unexpected end of object!");
|
||||
if (std::distance(in, end) < sizeof(tmp)) throw Exception("Unexpected end of object!");
|
||||
|
||||
std::copy(in, in + sizeof(tmp), reinterpret_cast<char *>(&tmp));
|
||||
in += sizeof(tmp);
|
||||
@@ -134,8 +132,7 @@ namespace Serialize {
|
||||
T out;
|
||||
if constexpr (sizeof(typename T::value_type) == 1) {
|
||||
// Optimization for char vectors
|
||||
if (std::distance(in, end) < size)
|
||||
throw Exception("Unexpected end of object!");
|
||||
if (std::distance(in, end) < size) throw Exception("Unexpected end of object!");
|
||||
out.insert(out.end(), in, in + size);
|
||||
in += size;
|
||||
} else
|
||||
@@ -143,8 +140,7 @@ namespace Serialize {
|
||||
using V = typename T::value_type;
|
||||
V v = deserialize<V>(in, end);
|
||||
// Try either emplace_back or emplace if it doesn't exist
|
||||
if constexpr (has_emplace_back<T, V>::value)
|
||||
out.emplace_back(std::move(v));
|
||||
if constexpr (has_emplace_back<T, V>::value) out.emplace_back(std::move(v));
|
||||
else
|
||||
out.emplace(std::move(v));
|
||||
}
|
||||
@@ -175,7 +171,8 @@ namespace Serialize {
|
||||
// If the object is a number, copy it byte-by-byte
|
||||
uint64_t tmp = htobe64(static_cast<uint64_t>(what));
|
||||
static_assert(sizeof(tmp) == 8);
|
||||
out.insert(out.end(), (reinterpret_cast<const char *>(&tmp)), (reinterpret_cast<const char *>(&tmp) + sizeof(tmp)));
|
||||
out.insert(out.end(), (reinterpret_cast<const char *>(&tmp)),
|
||||
(reinterpret_cast<const char *>(&tmp) + sizeof(tmp)));
|
||||
} else {
|
||||
// Otherwise we treat it as a container, in format of <number of elements>b<elements>e
|
||||
serialize(what.size(), out);
|
||||
@@ -184,9 +181,7 @@ namespace Serialize {
|
||||
// Optimization for char vectors
|
||||
out.insert(out.end(), what.begin(), what.end());
|
||||
} else
|
||||
for (auto const &i: what) {
|
||||
serialize(i, out);
|
||||
}
|
||||
for (auto const &i: what) { serialize(i, out); }
|
||||
serialize('e', out);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,9 +20,7 @@ Config &Config::add(const std::string &k, const std::string &v) {
|
||||
case KeyType::INT:
|
||||
try {
|
||||
std::stoi(v);
|
||||
} catch (...) {
|
||||
throw Exception("Can't convert " + k + " to integer!");
|
||||
}
|
||||
} catch (...) { throw Exception("Can't convert " + k + " to integer!"); }
|
||||
break;
|
||||
case KeyType::LIST:
|
||||
break;
|
||||
@@ -32,17 +30,14 @@ Config &Config::add(const std::string &k, const std::string &v) {
|
||||
return *this;
|
||||
}
|
||||
|
||||
int Config::getInt(const std::string &k) const {
|
||||
return std::stoi(getStr(k));
|
||||
}
|
||||
int Config::getInt(const std::string &k) const { return std::stoi(getStr(k)); }
|
||||
|
||||
std::vector<std::string> Config::getList(const std::string &k) const {
|
||||
std::vector<std::string> out;
|
||||
std::string next;
|
||||
std::stringstream inss(getStr(k));
|
||||
while (std::getline(inss, next, ',')) {
|
||||
if (next != "")
|
||||
out.emplace_back(next);
|
||||
if (next != "") out.emplace_back(next);
|
||||
}
|
||||
return out;
|
||||
}
|
||||
@@ -54,28 +49,23 @@ std::string Config::getStr(const std::string &k) const {
|
||||
throw Exception("Option " + k + " not specified and no default value exists!");
|
||||
}
|
||||
|
||||
bool Config::exists(const std::string &k) const {
|
||||
return (data.count(k) > 0) || (keys.at(k).defaultval.has_value());
|
||||
}
|
||||
bool Config::exists(const std::string &k) const { return (data.count(k) > 0) || (keys.at(k).defaultval.has_value()); }
|
||||
|
||||
Config::Config() = default;
|
||||
|
||||
Config::Config(std::vector<char, std::allocator<char>>::const_iterator &in, const std::vector<char, std::allocator<char>>::const_iterator &end) {
|
||||
Config::Config(std::vector<char, std::allocator<char>>::const_iterator &in,
|
||||
const std::vector<char, std::allocator<char>>::const_iterator &end) {
|
||||
data = Serialize::deserialize<decltype(data)>(in, end);
|
||||
}
|
||||
|
||||
void Config::serialize(std::vector<char> &out) const {
|
||||
std::vector<decltype(data)::value_type> temp;
|
||||
for (const auto &d: data) {
|
||||
if (keys.at(d.first).remember) {
|
||||
temp.emplace_back(d);
|
||||
}
|
||||
if (keys.at(d.first).remember) { temp.emplace_back(d); }
|
||||
}
|
||||
Serialize::serialize(temp, out);
|
||||
}
|
||||
|
||||
void Config::merge(const Config &config) {
|
||||
for (const auto &d: config.data) {
|
||||
add(d.first, d.second);
|
||||
}
|
||||
for (const auto &d: config.data) { add(d.first, d.second); }
|
||||
}
|
||||
|
||||
@@ -23,8 +23,7 @@ std::string Exception::getStacktrace() {
|
||||
|
||||
if (strings != nullptr) {
|
||||
out << "Stacktrace:" << std::endl;
|
||||
for (int i = 0; i < n; i++)
|
||||
out << strings[i] << std::endl;
|
||||
for (int i = 0; i < n; i++) out << strings[i] << std::endl;
|
||||
}
|
||||
|
||||
free(strings);
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
|
||||
#include "Logger.h"
|
||||
|
||||
Logger::Logger(int level, std::ostream &out) : loglevel(level), out(out) {
|
||||
}
|
||||
Logger::Logger(int level, std::ostream &out) : loglevel(level), out(out) {}
|
||||
|
||||
void Logger::write(const std::string &what, int whatlevel) {
|
||||
if (whatlevel <= loglevel) {
|
||||
@@ -14,6 +13,4 @@ void Logger::write(const std::string &what, int whatlevel) {
|
||||
}
|
||||
}
|
||||
|
||||
void Logger::setLevel(int level) {
|
||||
loglevel = level;
|
||||
}
|
||||
void Logger::setLevel(int level) { loglevel = level; }
|
||||
|
||||
@@ -7,7 +7,10 @@
|
||||
#include <sstream>
|
||||
#include <utility>
|
||||
|
||||
Progress::Progress(std::function<void(std::string, int)> out, std::vector<std::variant<std::function<std::string()>, std::string>> format, const Config &conf, int level) : format(std::move(format)), out(std::move(out)), type(conf.getStr("progress")), progresslevel(level) {
|
||||
Progress::Progress(std::function<void(std::string, int)> out,
|
||||
std::vector<std::variant<std::function<std::string()>, std::string>> format, const Config &conf,
|
||||
int level)
|
||||
: format(std::move(format)), out(std::move(out)), type(conf.getStr("progress")), progresslevel(level) {
|
||||
if (type != "none") {
|
||||
this->out("\n\n", level);
|
||||
thread = std::thread(&Progress::showProgress, this);
|
||||
@@ -16,16 +19,13 @@ Progress::Progress(std::function<void(std::string, int)> out, std::vector<std::v
|
||||
|
||||
Progress::~Progress() {
|
||||
stop = true;
|
||||
if (thread.joinable())
|
||||
thread.join();
|
||||
if (thread.joinable()) thread.join();
|
||||
}
|
||||
|
||||
void Progress::showProgress() {
|
||||
while (!stop) {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(100));
|
||||
{
|
||||
update(std::unique_lock(refreshM));
|
||||
}
|
||||
{ update(std::unique_lock(refreshM)); }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -38,8 +38,7 @@ void Progress::print(const std::string &s, int level) {
|
||||
void Progress::update(std::unique_lock<std::mutex> &&lock) {
|
||||
std::stringstream outs;
|
||||
|
||||
if (type == "pretty")
|
||||
outs << "\r\33[2K ";
|
||||
if (type == "pretty") outs << "\r\33[2K ";
|
||||
|
||||
for (auto const &l: format) {
|
||||
if (std::holds_alternative<std::string>(l)) outs << std::get<std::string>(l);
|
||||
@@ -47,8 +46,7 @@ void Progress::update(std::unique_lock<std::mutex> &&lock) {
|
||||
outs << std::get<std::function<std::string()>>(l)();
|
||||
}
|
||||
|
||||
if (type == "pretty")
|
||||
outs << "\r";
|
||||
if (type == "pretty") outs << "\r";
|
||||
else
|
||||
outs << "\n";
|
||||
|
||||
|
||||
@@ -5,8 +5,7 @@
|
||||
#include "RunningAverage.h"
|
||||
|
||||
RunningAverage::RunningAverage(std::function<unsigned long long int()> getFunc, int max, int ms)
|
||||
: getFunc(std::move(getFunc)), max(max), ms(ms), thread(&RunningAverage::loop, this) {
|
||||
}
|
||||
: getFunc(std::move(getFunc)), max(max), ms(ms), thread(&RunningAverage::loop, this) {}
|
||||
|
||||
void RunningAverage::loop() {
|
||||
while (!stop) {
|
||||
|
||||
@@ -12,9 +12,6 @@ RunningDiffAverage::RunningDiffAverage(std::function<unsigned long long int()> g
|
||||
prev = cur;
|
||||
return calc;
|
||||
},
|
||||
max, ms) {
|
||||
}
|
||||
max, ms) {}
|
||||
|
||||
unsigned long long RunningDiffAverage::get() {
|
||||
return runningAverage.get();
|
||||
}
|
||||
unsigned long long RunningDiffAverage::get() { return runningAverage.get(); }
|
||||
|
||||
@@ -3,10 +3,6 @@
|
||||
//
|
||||
#include "Signals.h"
|
||||
|
||||
void Signals::setup() {
|
||||
signal(SIGINT, handle);
|
||||
}
|
||||
void Signals::setup() { signal(SIGINT, handle); }
|
||||
|
||||
void Signals::handle(int signum) {
|
||||
shouldQuit = true;
|
||||
}
|
||||
void Signals::handle(int signum) { shouldQuit = true; }
|
||||
|
||||
@@ -10,9 +10,7 @@ ThreadPool::ThreadPool(std::function<void(std::string)> onError, std::size_t wor
|
||||
ThreadPool::~ThreadPool() {
|
||||
stop = true;
|
||||
somethingNew.notify_all();
|
||||
for (auto &t: threads) {
|
||||
t.join();
|
||||
}
|
||||
for (auto &t: threads) { t.join(); }
|
||||
}
|
||||
|
||||
void ThreadPool::push(std::function<void()> &&func) {
|
||||
@@ -48,9 +46,7 @@ void ThreadPool::loop() {
|
||||
|
||||
try {
|
||||
task();
|
||||
} catch (std::exception &e) {
|
||||
onError(std::string(e.what()));
|
||||
}
|
||||
} catch (std::exception &e) { onError(std::string(e.what())); }
|
||||
|
||||
{
|
||||
std::lock_guard qLock(queueLock);
|
||||
|
||||
@@ -7,7 +7,12 @@
|
||||
|
||||
// Demonstrate some basic assertions.
|
||||
TEST(BuzhashTest, SimpleTest) {
|
||||
std::string loremipsum = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
|
||||
std::string loremipsum =
|
||||
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et "
|
||||
"dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip "
|
||||
"ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu "
|
||||
"fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt "
|
||||
"mollit anim id est laborum.";
|
||||
|
||||
for (int i = 15; i < 49; i++) {
|
||||
Buzhash b(i);
|
||||
|
||||
@@ -41,12 +41,12 @@ TEST(CryptoTests, AES) {
|
||||
|
||||
TEST(CryptoTests, SHA) {
|
||||
std::vector<char> data{'h', 'e', 'l', 'l', 'o'};
|
||||
std::array<unsigned char, 32> excepted{0x2c, 0xf2, 0x4d, 0xba, 0x5f, 0xb0, 0xa3, 0x0e, 0x26, 0xe8, 0x3b, 0x2a, 0xc5, 0xb9, 0xe2, 0x9e, 0x1b, 0x16, 0x1e, 0x5c, 0x1f, 0xa7, 0x42, 0x5e, 0x73, 0x04, 0x33, 0x62, 0x93, 0x8b, 0x98, 0x24};
|
||||
std::array<unsigned char, 32> excepted{0x2c, 0xf2, 0x4d, 0xba, 0x5f, 0xb0, 0xa3, 0x0e, 0x26, 0xe8, 0x3b,
|
||||
0x2a, 0xc5, 0xb9, 0xe2, 0x9e, 0x1b, 0x16, 0x1e, 0x5c, 0x1f, 0xa7,
|
||||
0x42, 0x5e, 0x73, 0x04, 0x33, 0x62, 0x93, 0x8b, 0x98, 0x24};
|
||||
|
||||
auto out = SHA::calculate(data);
|
||||
|
||||
EXPECT_EQ(out.size(), 32);
|
||||
for (int i = 0; i < out.size(); i++) {
|
||||
EXPECT_EQ(static_cast<uint8_t>(out[i]), excepted[i]);
|
||||
}
|
||||
for (int i = 0; i < out.size(); i++) { EXPECT_EQ(static_cast<uint8_t>(out[i]), excepted[i]); }
|
||||
}
|
||||
|
||||
@@ -27,7 +27,8 @@ TEST(FullTest, Simple) {
|
||||
{
|
||||
std::filesystem::create_directories("Simple/testfrom");
|
||||
for (int i = 0; i < 257; i++) {
|
||||
std::ofstream o(std::filesystem::path("Simple/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
std::ofstream o(std::filesystem::path("Simple/testfrom") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
for (int j = 0; j < i; j++) o.put(static_cast<char>(j % 256));
|
||||
}
|
||||
std::filesystem::create_directories("Simple/testfrom/testdir");
|
||||
@@ -67,7 +68,8 @@ TEST(FullTest, Simple) {
|
||||
}
|
||||
|
||||
for (int i = 0; i < 257; i++) {
|
||||
std::ifstream o(std::filesystem::path("Simple/testtores") / ("f" + std::to_string(i)), std::ios::binary | std::ios::in);
|
||||
std::ifstream o(std::filesystem::path("Simple/testtores") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::in);
|
||||
try {
|
||||
EXPECT_EQ(o.is_open(), true);
|
||||
for (int j = 0; j < i; j++) {
|
||||
@@ -89,7 +91,8 @@ TEST(FullTest, SimpleWithIgnore) {
|
||||
{
|
||||
std::filesystem::create_directories("SimpleWithIgnore/testfrom");
|
||||
for (int i = 0; i < 257; i++) {
|
||||
std::ofstream o(std::filesystem::path("SimpleWithIgnore/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
std::ofstream o(std::filesystem::path("SimpleWithIgnore/testfrom") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
for (int j = 0; j < i; j++) o.put(static_cast<char>(j % 256));
|
||||
}
|
||||
std::filesystem::create_directories("SimpleWithIgnore/testfrom/testdir");
|
||||
@@ -134,7 +137,9 @@ TEST(FullTest, SimpleWithIgnore) {
|
||||
}
|
||||
{
|
||||
Config conf;
|
||||
conf.add("repo", "SimpleWithIgnore/testto").add("aid", std::to_string(aid)).add("to", "SimpleWithIgnore/testtores");
|
||||
conf.add("repo", "SimpleWithIgnore/testto")
|
||||
.add("aid", std::to_string(aid))
|
||||
.add("to", "SimpleWithIgnore/testtores");
|
||||
|
||||
auto repo = std::make_unique<FileRepository>(conf);
|
||||
repo->open();
|
||||
@@ -148,7 +153,8 @@ TEST(FullTest, SimpleWithIgnore) {
|
||||
EXPECT_EQ(std::filesystem::is_directory("SimpleWithIgnore/testtores/testdir"), true);
|
||||
|
||||
for (int i = 0; i < 257; i++) {
|
||||
std::ifstream o(std::filesystem::path("SimpleWithIgnore/testtores") / ("f" + std::to_string(i)), std::ios::binary | std::ios::in);
|
||||
std::ifstream o(std::filesystem::path("SimpleWithIgnore/testtores") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::in);
|
||||
EXPECT_EQ(o.is_open(), true);
|
||||
for (int j = 0; j < i; j++) {
|
||||
char c;
|
||||
@@ -196,13 +202,16 @@ TEST(FullTest, SimpleWithCompress) {
|
||||
{
|
||||
std::filesystem::create_directories("SimpleWithCompress/testfrom");
|
||||
for (int i = 0; i < 257; i++) {
|
||||
std::ofstream o(std::filesystem::path("SimpleWithCompress/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
std::ofstream o(std::filesystem::path("SimpleWithCompress/testfrom") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
for (int j = 0; j < i; j++) o.put(static_cast<char>(j % 256));
|
||||
}
|
||||
std::filesystem::create_directories("SimpleWithCompress/testfrom/testdir");
|
||||
|
||||
Config conf;
|
||||
conf.add("repo", "SimpleWithCompress/testto").add("compression", "zlib").add("from", "SimpleWithCompress/testfrom");
|
||||
conf.add("repo", "SimpleWithCompress/testto")
|
||||
.add("compression", "zlib")
|
||||
.add("from", "SimpleWithCompress/testfrom");
|
||||
|
||||
auto repo = std::make_unique<FileRepository>(conf);
|
||||
repo->init();
|
||||
@@ -215,7 +224,9 @@ TEST(FullTest, SimpleWithCompress) {
|
||||
}
|
||||
{
|
||||
Config conf;
|
||||
conf.add("repo", "SimpleWithCompress/testto").add("aid", std::to_string(aid)).add("to", "SimpleWithCompress/testtores");
|
||||
conf.add("repo", "SimpleWithCompress/testto")
|
||||
.add("aid", std::to_string(aid))
|
||||
.add("to", "SimpleWithCompress/testtores");
|
||||
|
||||
auto repo = std::make_unique<FileRepository>(conf);
|
||||
repo->open();
|
||||
@@ -229,7 +240,8 @@ TEST(FullTest, SimpleWithCompress) {
|
||||
EXPECT_EQ(std::filesystem::is_directory("SimpleWithCompress/testtores/testdir"), true);
|
||||
|
||||
for (int i = 0; i < 257; i++) {
|
||||
std::ifstream o(std::filesystem::path("SimpleWithCompress/testtores") / ("f" + std::to_string(i)), std::ios::binary | std::ios::in);
|
||||
std::ifstream o(std::filesystem::path("SimpleWithCompress/testtores") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::in);
|
||||
EXPECT_EQ(o.is_open(), true);
|
||||
for (int j = 0; j < i; j++) {
|
||||
char c;
|
||||
@@ -246,13 +258,19 @@ TEST(FullTest, SimpleWithCompEnd) {
|
||||
{
|
||||
std::filesystem::create_directories("SimpleWithCompEnd/testfrom");
|
||||
for (int i = 0; i < 257; i++) {
|
||||
std::ofstream o(std::filesystem::path("SimpleWithCompEnd/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
std::ofstream o(std::filesystem::path("SimpleWithCompEnd/testfrom") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
for (int j = 0; j < i; j++) o.put(static_cast<char>(j % 256));
|
||||
}
|
||||
std::filesystem::create_directories("SimpleWithCompEnd/testfrom/testdir");
|
||||
|
||||
Config conf;
|
||||
conf.add("repo", "SimpleWithCompEnd/testto").add("compression", "zlib").add("from", "SimpleWithCompEnd/testfrom").add("encryption", "aes").add("password", "testp").add("salt", "tests");
|
||||
conf.add("repo", "SimpleWithCompEnd/testto")
|
||||
.add("compression", "zlib")
|
||||
.add("from", "SimpleWithCompEnd/testfrom")
|
||||
.add("encryption", "aes")
|
||||
.add("password", "testp")
|
||||
.add("salt", "tests");
|
||||
|
||||
auto repo = std::make_unique<FileRepository>(conf);
|
||||
repo->init();
|
||||
@@ -279,7 +297,10 @@ TEST(FullTest, SimpleWithCompEnd) {
|
||||
}
|
||||
{
|
||||
Config conf;
|
||||
conf.add("repo", "SimpleWithCompEnd/testto").add("password", "testp").add("aid", std::to_string(aid)).add("to", "SimpleWithCompEnd/testtores");
|
||||
conf.add("repo", "SimpleWithCompEnd/testto")
|
||||
.add("password", "testp")
|
||||
.add("aid", std::to_string(aid))
|
||||
.add("to", "SimpleWithCompEnd/testtores");
|
||||
|
||||
auto repo = std::make_unique<FileRepository>(conf);
|
||||
repo->open();
|
||||
@@ -294,7 +315,8 @@ TEST(FullTest, SimpleWithCompEnd) {
|
||||
EXPECT_EQ(std::filesystem::is_directory("SimpleWithCompEnd/testtores/testdir"), true);
|
||||
|
||||
for (int i = 0; i < 257; i++) {
|
||||
std::ifstream o(std::filesystem::path("SimpleWithCompEnd/testtores") / ("f" + std::to_string(i)), std::ios::binary | std::ios::in);
|
||||
std::ifstream o(std::filesystem::path("SimpleWithCompEnd/testtores") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::in);
|
||||
EXPECT_EQ(o.is_open(), true);
|
||||
for (int j = 0; j < i; j++) {
|
||||
char c;
|
||||
@@ -310,16 +332,40 @@ TEST(FullTest, Fuzz) {
|
||||
srand(time(nullptr));
|
||||
std::vector<Config> confs;
|
||||
Config conf;
|
||||
conf.add("repo", "Fuzz/testto").add("compression", "none").add("from", "Fuzz/testfrom").add("encryption", "none").add("password", "testp").add("salt", "tests").add("progress", "none");
|
||||
conf.add("repo", "Fuzz/testto")
|
||||
.add("compression", "none")
|
||||
.add("from", "Fuzz/testfrom")
|
||||
.add("encryption", "none")
|
||||
.add("password", "testp")
|
||||
.add("salt", "tests")
|
||||
.add("progress", "none");
|
||||
confs.emplace_back(conf);
|
||||
conf = Config();
|
||||
conf.add("repo", "Fuzz/testto").add("compression", "zlib").add("from", "Fuzz/testfrom").add("encryption", "none").add("password", "testp").add("salt", "tests").add("progress", "none");
|
||||
conf.add("repo", "Fuzz/testto")
|
||||
.add("compression", "zlib")
|
||||
.add("from", "Fuzz/testfrom")
|
||||
.add("encryption", "none")
|
||||
.add("password", "testp")
|
||||
.add("salt", "tests")
|
||||
.add("progress", "none");
|
||||
confs.emplace_back(conf);
|
||||
conf = Config();
|
||||
conf.add("repo", "Fuzz/testto").add("compression", "none").add("from", "Fuzz/testfrom").add("encryption", "zlib").add("password", "testp").add("salt", "tests").add("progress", "none");
|
||||
conf.add("repo", "Fuzz/testto")
|
||||
.add("compression", "none")
|
||||
.add("from", "Fuzz/testfrom")
|
||||
.add("encryption", "zlib")
|
||||
.add("password", "testp")
|
||||
.add("salt", "tests")
|
||||
.add("progress", "none");
|
||||
confs.emplace_back(conf);
|
||||
conf = Config();
|
||||
conf.add("repo", "Fuzz/testto").add("compression", "zlib").add("from", "Fuzz/testfrom").add("encryption", "aes").add("password", "testp").add("salt", "tests").add("progress", "none");
|
||||
conf.add("repo", "Fuzz/testto")
|
||||
.add("compression", "zlib")
|
||||
.add("from", "Fuzz/testfrom")
|
||||
.add("encryption", "aes")
|
||||
.add("password", "testp")
|
||||
.add("salt", "tests")
|
||||
.add("progress", "none");
|
||||
confs.emplace_back(conf);
|
||||
|
||||
for (auto const &conf: confs) {
|
||||
@@ -332,7 +378,8 @@ TEST(FullTest, Fuzz) {
|
||||
{
|
||||
std::filesystem::create_directories("Fuzz/testfrom");
|
||||
for (int i = 0; i < 2; i++) {
|
||||
std::ofstream o(std::filesystem::path("Fuzz/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
std::ofstream o(std::filesystem::path("Fuzz/testfrom") / ("f" + std::to_string(i)),
|
||||
std::ios::binary | std::ios::out | std::ios::trunc);
|
||||
for (int j = 0; j < i; j++) o.put(j % 2);
|
||||
}
|
||||
|
||||
@@ -350,28 +397,28 @@ TEST(FullTest, Fuzz) {
|
||||
|
||||
{
|
||||
if (filetobreak & 0b00000001) {
|
||||
for (int i = 0; i < cutoff; i++)
|
||||
randomChange("Fuzz/testto/1");
|
||||
for (int i = 0; i < cutoff; i++) randomChange("Fuzz/testto/1");
|
||||
if (cutoff > 5)
|
||||
std::filesystem::resize_file("Fuzz/testto/1", std::filesystem::file_size("Fuzz/testto/1") - cutoff);
|
||||
std::filesystem::resize_file("Fuzz/testto/1",
|
||||
std::filesystem::file_size("Fuzz/testto/1") - cutoff);
|
||||
}
|
||||
if (filetobreak & 0b00000010) {
|
||||
for (int i = 0; i < cutoff; i++)
|
||||
randomChange("Fuzz/testto/index");
|
||||
for (int i = 0; i < cutoff; i++) randomChange("Fuzz/testto/index");
|
||||
if (cutoff > 5)
|
||||
std::filesystem::resize_file("Fuzz/testto/index", std::filesystem::file_size("Fuzz/testto/index") - cutoff);
|
||||
std::filesystem::resize_file("Fuzz/testto/index",
|
||||
std::filesystem::file_size("Fuzz/testto/index") - cutoff);
|
||||
}
|
||||
if (filetobreak & 0b00000100) {
|
||||
for (int i = 0; i < cutoff; i++)
|
||||
randomChange("Fuzz/testto/offsets");
|
||||
for (int i = 0; i < cutoff; i++) randomChange("Fuzz/testto/offsets");
|
||||
if (cutoff > 5)
|
||||
std::filesystem::resize_file("Fuzz/testto/offsets", std::filesystem::file_size("Fuzz/testto/offsets") - cutoff);
|
||||
std::filesystem::resize_file("Fuzz/testto/offsets",
|
||||
std::filesystem::file_size("Fuzz/testto/offsets") - cutoff);
|
||||
}
|
||||
if (filetobreak & 0b00001000) {
|
||||
for (int i = 0; i < cutoff; i++)
|
||||
randomChange("Fuzz/testto/info");
|
||||
for (int i = 0; i < cutoff; i++) randomChange("Fuzz/testto/info");
|
||||
if (cutoff > 5)
|
||||
std::filesystem::resize_file("Fuzz/testto/info", std::filesystem::file_size("Fuzz/testto/info") - cutoff);
|
||||
std::filesystem::resize_file("Fuzz/testto/info",
|
||||
std::filesystem::file_size("Fuzz/testto/info") - cutoff);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -391,8 +438,7 @@ TEST(FullTest, Fuzz) {
|
||||
CommandRestore cmd;
|
||||
cmd.run(Context{&logger, repo.get()});
|
||||
auto outstr = runnerout.str();
|
||||
if (outstr.find("Error") == std::string::npos)
|
||||
ok = false;
|
||||
if (outstr.find("Error") == std::string::npos) ok = false;
|
||||
} catch (...) {}
|
||||
EXPECT_EQ(ok, true);
|
||||
}
|
||||
|
||||
@@ -36,19 +36,11 @@ TEST(Chunk, Deserialize) {
|
||||
|
||||
EXPECT_EQ(o1.data.size(), o1e.data.size());
|
||||
EXPECT_EQ(o2.data.size(), o2e.data.size());
|
||||
for (int i = 0; i < o1.data.size(); i++) {
|
||||
EXPECT_EQ(o1.data[i], o1e.data[i]);
|
||||
}
|
||||
for (int i = 0; i < o2.data.size(); i++) {
|
||||
EXPECT_EQ(o2.data[i], o2e.data[i]);
|
||||
}
|
||||
for (int i = 0; i < o1.data.size(); i++) { EXPECT_EQ(o1.data[i], o1e.data[i]); }
|
||||
for (int i = 0; i < o2.data.size(); i++) { EXPECT_EQ(o2.data[i], o2e.data[i]); }
|
||||
|
||||
for (int i = 0; i < o1.SHA.size(); i++) {
|
||||
EXPECT_EQ(o1.SHA[i], o1e.SHA[i]);
|
||||
}
|
||||
for (int i = 0; i < o2.SHA.size(); i++) {
|
||||
EXPECT_EQ(o2.SHA[i], o2e.SHA[i]);
|
||||
}
|
||||
for (int i = 0; i < o1.SHA.size(); i++) { EXPECT_EQ(o1.SHA[i], o1e.SHA[i]); }
|
||||
for (int i = 0; i < o2.SHA.size(); i++) { EXPECT_EQ(o2.SHA[i], o2e.SHA[i]); }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -57,9 +49,7 @@ TEST(Chunk, Garbage) {
|
||||
auto eb = e.cbegin();
|
||||
try {
|
||||
Chunk o1(eb, e.cend());
|
||||
} catch (...) {
|
||||
return;
|
||||
}
|
||||
} catch (...) { return; }
|
||||
FAIL() << "Object constructed with garbage data!";
|
||||
}
|
||||
|
||||
@@ -68,9 +58,7 @@ TEST(Chunk, Garbage2) {
|
||||
auto eb = e.cbegin();
|
||||
try {
|
||||
Chunk o1(eb, e.cend());
|
||||
} catch (...) {
|
||||
return;
|
||||
}
|
||||
} catch (...) { return; }
|
||||
FAIL() << "Object constructed with garbage data!";
|
||||
}
|
||||
|
||||
@@ -93,16 +81,13 @@ TEST(Chunk, Garbage3) {
|
||||
try {
|
||||
Chunk o1 = Serialize::deserialize<Chunk>(s1);
|
||||
fail = true;
|
||||
} catch (...) {
|
||||
}
|
||||
} catch (...) {}
|
||||
|
||||
try {
|
||||
Chunk o2 = Serialize::deserialize<Chunk>(s2);
|
||||
fail = true;
|
||||
} catch (...) {
|
||||
}
|
||||
} catch (...) {}
|
||||
|
||||
if (fail)
|
||||
FAIL() << "Object constructed with garbage data!";
|
||||
if (fail) FAIL() << "Object constructed with garbage data!";
|
||||
}
|
||||
}
|
||||
@@ -17,8 +17,7 @@ TEST(FileRepository, Deserialize) {
|
||||
conf.add("repo", "Deserizlize/testrepo");
|
||||
FileRepository repo(conf);
|
||||
repo.init();
|
||||
std::vector<char>
|
||||
data1{'a', 'b', 'c', 'e'};
|
||||
std::vector<char> data1{'a', 'b', 'c', 'e'};
|
||||
|
||||
std::string o1k(16, '\0');
|
||||
std::string o2k(16, '\0');
|
||||
@@ -72,12 +71,8 @@ TEST(FileRepository, Deserialize) {
|
||||
auto o2ed = o2e.data;
|
||||
EXPECT_EQ(o1.data.size(), o1e.data.size());
|
||||
EXPECT_EQ(o2.data.size(), o2e.data.size());
|
||||
for (int i = 0; i < o1.data.size(); i++) {
|
||||
EXPECT_EQ(o1.data[i], o1e.data[i]);
|
||||
}
|
||||
for (int i = 0; i < o2.data.size(); i++) {
|
||||
EXPECT_EQ(o2.data[i], o2e.data[i]);
|
||||
}
|
||||
for (int i = 0; i < o1.data.size(); i++) { EXPECT_EQ(o1.data[i], o1e.data[i]); }
|
||||
for (int i = 0; i < o2.data.size(); i++) { EXPECT_EQ(o2.data[i], o2e.data[i]); }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -99,8 +94,7 @@ TEST(FileRepository, Filters) {
|
||||
std::string o2k(16, '\0');
|
||||
o2k[0] = 1;
|
||||
o2k[1] = 2;
|
||||
std::vector<char>
|
||||
data1{'a', 'b', 'c', 'e'};
|
||||
std::vector<char> data1{'a', 'b', 'c', 'e'};
|
||||
Chunk o1(666, o1k, data1);
|
||||
std::vector<char> data2{'q', 'w', 'e', 'r', 'b'};
|
||||
Chunk o2(777, o2k, data2);
|
||||
@@ -147,8 +141,7 @@ TEST(FileRepository, Filters) {
|
||||
err = true;
|
||||
} catch (...) {}
|
||||
|
||||
if (err)
|
||||
throw Exception("Object constructed with garbage data!");
|
||||
if (err) throw Exception("Object constructed with garbage data!");
|
||||
}
|
||||
{
|
||||
Config conf;
|
||||
@@ -193,12 +186,8 @@ TEST(FileRepository, Filters) {
|
||||
auto o2ed = o2e.data;
|
||||
EXPECT_EQ(o1.data.size(), o1e.data.size());
|
||||
EXPECT_EQ(o2.data.size(), o2e.data.size());
|
||||
for (int i = 0; i < o1.data.size(); i++) {
|
||||
EXPECT_EQ(o1.data[i], o1e.data[i]);
|
||||
}
|
||||
for (int i = 0; i < o2.data.size(); i++) {
|
||||
EXPECT_EQ(o2.data[i], o2e.data[i]);
|
||||
}
|
||||
for (int i = 0; i < o1.data.size(); i++) { EXPECT_EQ(o1.data[i], o1e.data[i]); }
|
||||
for (int i = 0; i < o2.data.size(); i++) { EXPECT_EQ(o2.data[i], o2e.data[i]); }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -215,8 +204,7 @@ TEST(FileRepository, IDsDisabled) {
|
||||
std::string o2k(16, '\0');
|
||||
o2k[0] = 1;
|
||||
|
||||
std::vector<char>
|
||||
data1{'a', 'b', 'c', 'e'};
|
||||
std::vector<char> data1{'a', 'b', 'c', 'e'};
|
||||
Chunk o1(repo.getId(), o1k, data1);
|
||||
std::vector<char> data2{'q', 'w', 'e', 'r', 'b'};
|
||||
Chunk o2(repo.getId(), o2k, data2);
|
||||
@@ -261,12 +249,8 @@ TEST(FileRepository, IDsDisabled) {
|
||||
auto o2ed = o2e.data;
|
||||
EXPECT_EQ(o1.data.size(), o1e.data.size());
|
||||
EXPECT_EQ(o2.data.size(), o2e.data.size());
|
||||
for (int i = 0; i < o1.data.size(); i++) {
|
||||
EXPECT_EQ(o1.data[i], o1e.data[i]);
|
||||
}
|
||||
for (int i = 0; i < o2.data.size(); i++) {
|
||||
EXPECT_EQ(o2.data[i], o2e.data[i]);
|
||||
}
|
||||
for (int i = 0; i < o1.data.size(); i++) { EXPECT_EQ(o1.data[i], o1e.data[i]); }
|
||||
for (int i = 0; i < o2.data.size(); i++) { EXPECT_EQ(o2.data[i], o2e.data[i]); }
|
||||
|
||||
|
||||
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 1);
|
||||
@@ -287,8 +271,7 @@ TEST(FileRepository, IDsDisabled) {
|
||||
|
||||
auto id = repo.getId();
|
||||
EXPECT_EQ(id, 1);
|
||||
std::vector<char>
|
||||
data1{'a', 'b', 'c', 'e'};
|
||||
std::vector<char> data1{'a', 'b', 'c', 'e'};
|
||||
Chunk o1(id, o2k, data1);
|
||||
EXPECT_EQ(repo.getId(), 3);
|
||||
}
|
||||
|
||||
@@ -5,13 +5,9 @@
|
||||
#include "Cleaner.h"
|
||||
|
||||
Cleaner::Cleaner(std::vector<std::filesystem::path> toClean) : toClean(std::move(toClean)) {
|
||||
for (const auto &p: this->toClean) {
|
||||
std::filesystem::remove_all(p);
|
||||
}
|
||||
for (const auto &p: this->toClean) { std::filesystem::remove_all(p); }
|
||||
}
|
||||
|
||||
Cleaner::~Cleaner() {
|
||||
for (const auto &p: toClean) {
|
||||
std::filesystem::remove_all(p);
|
||||
}
|
||||
for (const auto &p: toClean) { std::filesystem::remove_all(p); }
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user