Compare commits

...

25 Commits

Author SHA1 Message Date
8c1e9fe4bf draft 2024-03-18 23:38:30 +01:00
1dad7d1501 move in filters 2024-03-18 23:08:42 +01:00
0b2040603e getRefs on objects 2024-03-18 22:39:48 +01:00
476904b5aa getObject => getObjectRaw 2024-03-18 22:39:42 +01:00
061e164c6a Make objects final
Might be a slight optimization
2024-03-18 22:39:19 +01:00
e996e93431 switch to c++20 2024-03-18 22:39:10 +01:00
8e9ed2b715 ASSERT_EQ instead of EXPECT_EQ in tests
because that's what we actually want, to crash right now, no point in continuing
2024-03-15 22:37:26 +01:00
0095f9ff69 Small ThreadPool improvements 2024-03-15 22:37:02 +01:00
a14eb1c501 fix for act runner
Some checks failed
CMake / build (push) Failing after 31m10s
i guess its ctest is too old and doesn't have the option?
2024-01-06 09:59:56 +01:00
5ea76e566e add discovery timeout to gtests
All checks were successful
CMake / build (push) Successful in 24m18s
2024-01-05 22:53:18 +01:00
275f1208c5 it was actually because sys ptrace wasn't allowed for gitea runner 2024-01-05 22:32:33 +01:00
d424adc389 fix diff
Some checks failed
CMake / build (push) Failing after 29m54s
2023-12-24 14:14:54 +01:00
1f72814b99 Update cmake.yml
Some checks failed
CMake / build (push) Failing after 26m7s
2023-11-26 14:13:51 +01:00
8f63891aee set cmake min for 3.18
Some checks failed
CMake / build (push) Has been cancelled
for gitea actions runner
2023-11-26 13:49:34 +01:00
b3123bb70d Update cmake.yml
Some checks failed
CMake / build (push) Failing after 1m17s
2023-11-26 13:47:54 +01:00
77cb49a32c Update cmake.yml
Some checks failed
CMake / build (push) Failing after 1m23s
2023-11-26 13:44:37 +01:00
e0ea1c1ce9 Update cmake.yml 2023-11-26 13:42:37 +01:00
c838ac0238 Update cmake.yml
Some checks failed
CMake / build (push) Failing after 3m44s
2023-11-26 13:36:49 +01:00
507f4d0dc0 Update cmake.yml 2023-11-26 13:36:02 +01:00
57f2f331ec Update cmake.yml 2023-11-26 13:35:20 +01:00
53f908f05f fix ignore test
Some checks failed
CMake / build (push) Failing after 3m23s
2023-11-26 13:05:36 +01:00
802c2d70e0 clang format 120 length 2023-08-06 19:42:03 +02:00
3ef8c796a4 fix backup test mkdir 2023-08-06 15:42:25 +02:00
f517b0deab remove unused repo cache functions 2023-07-15 22:26:15 +02:00
dba5f06c10 install target 2023-07-15 13:43:33 +02:00
87 changed files with 855 additions and 771 deletions

View File

@@ -34,7 +34,7 @@ BreakBeforeBinaryOperators: None
BreakBeforeTernaryOperators: true
BreakConstructorInitializers: BeforeColon
BreakInheritanceList: BeforeColon
ColumnLimit: 0
ColumnLimit: 120
CompactNamespaces: true
ContinuationIndentWidth: 8
IndentCaseLabels: true

View File

@@ -21,8 +21,11 @@ jobs:
steps:
- uses: actions/checkout@v3
- run: apt-get update && apt-get install -y sudo
if: env.ACT=='true'
- name: install everything
run: sudo apt-get install -y fuse libfuse-dev
run: sudo apt-get update && sudo apt-get install -y fuse libfuse-dev cmake build-essential gcc g++ libssl-dev zlib1g-dev
- name: Configure CMake
# Configure CMake in a 'build' subdirectory. `CMAKE_BUILD_TYPE` is only required if you are using a single-configuration generator such as make.
@@ -31,10 +34,9 @@ jobs:
- name: Build
# Build your program with the given configuration
run: cmake --build ${{github.workspace}}/build --config ${{env.BUILD_TYPE}} --parallel $(nproc)
run: cmake --build ${{github.workspace}}/build --config ${{env.BUILD_TYPE}} --parallel $(( $(nproc) - 2 ))
- name: Test
working-directory: ${{github.workspace}}/build
# Execute tests defined by the CMake configuration.
# See https://cmake.org/cmake/help/latest/manual/ctest.1.html for more detail
run: ctest --verbose --parallel $(nproc) -C ${{env.BUILD_TYPE}}
run: cd ${{github.workspace}}/build && ctest --test-dir ${{github.workspace}}/build --verbose --parallel $(nproc) -C ${{env.BUILD_TYPE}}

7
.idea/misc.xml generated
View File

@@ -1,4 +1,11 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CMakeWorkspace" PROJECT_DIR="$PROJECT_DIR$" />
<component name="CidrRootsConfiguration">
<excludeRoots>
<file path="$PROJECT_DIR$/cmake-build-debug" />
<file path="$PROJECT_DIR$/cmake-build-relwithdebinfo" />
<file path="$PROJECT_DIR$/cmake-build-relwithsan" />
</excludeRoots>
</component>
</project>

View File

@@ -1,4 +1,6 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
project(backup)
if (SANITIZE STREQUAL "YES")
message(WARNING "Enabling sanitizers!")
@@ -12,11 +14,14 @@ if (CMAKE_BUILD_TYPE STREQUAL "Release")
add_link_options(-flto)
endif ()
if (NOT CMAKE_BUILD_TYPE STREQUAL "Debug")
add_compile_options(-O3)
add_link_options(-O3)
endif ()
add_compile_options(-rdynamic)
add_link_options(-rdynamic)
project(backup)
enable_testing()
add_subdirectory(src)

View File

@@ -1,6 +1,6 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
add_subdirectory(change_detectors)
@@ -14,3 +14,5 @@ add_subdirectory(fuse)
add_executable(backup main.cpp)
target_link_libraries(backup PRIVATE change_detectors chunkers commands crypto filters repo utils)
install(TARGETS backup DESTINATION bin)

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_library(change_detectors srcs/ChangeDetector.cpp srcs/ChangeDetectorContainer.cpp srcs/ChangeDetectorFactory.cpp srcs/ComparableFile.cpp srcs/ContentsChangeDetector.cpp srcs/EditTimeChangeDetector.cpp srcs/SizeChangeDetector.cpp srcs/TypeChangeDetector.cpp)

View File

@@ -34,9 +34,10 @@ struct ComparableFile {
const std::string path; ///< Relative path to the file
const File::Type type; ///< File type
const unsigned long long bytes; ///< Number of bytes in the file
const unsigned long long mtime; ///< Timestamp of last file modification
const std::function<std::unique_ptr<std::streambuf>()> contents;///< Function that returns a unique pointer to a std::streambuf instance linked to the contents of the file
const unsigned long long bytes;///< Number of bytes in the file
const unsigned long long mtime;///< Timestamp of last file modification
const std::function<std::unique_ptr<std::streambuf>()>
contents;///< Function that returns a unique pointer to a std::streambuf instance linked to the contents of the file
};

View File

@@ -8,9 +8,8 @@
bool ChangeDetectorContainer::check(const ComparableFile &f1, const ComparableFile &f2) const {
return std::any_of(changeDetectors.begin(), changeDetectors.end(),
[&](const auto &changeDetector) {
return changeDetector->check(f1, f2);
});
[&](const auto &changeDetector) { return changeDetector->check(f1, f2); });
}
ChangeDetectorContainer::ChangeDetectorContainer(std::vector<std::unique_ptr<ChangeDetector>> &&changeDetectors) : changeDetectors(std::move(changeDetectors)) {}
ChangeDetectorContainer::ChangeDetectorContainer(std::vector<std::unique_ptr<ChangeDetector>> &&changeDetectors)
: changeDetectors(std::move(changeDetectors)) {}

View File

@@ -12,26 +12,17 @@
#include "objects/FileBuffer.h"
ComparableFile::ComparableFile(const File &file, const Repository *repo)
: path(file.name),
type(file.fileType),
bytes(file.bytes),
mtime(file.mtime),
contents(
[file, repo]() {
return std::make_unique<FileBuffer>(repo, file.id);
}) {}
: path(file.name), type(file.fileType), bytes(file.bytes), mtime(file.mtime),
contents([file, repo]() { return std::make_unique<FileBuffer>(repo, file.id); }) {}
ComparableFile::ComparableFile(const std::filesystem::path &p, const std::filesystem::path &base)
: path(p.lexically_relative(base).u8string()),
type(File::getFileType(p)),
bytes(File::getFileSize(p)),
: path(p.lexically_relative(base).string()), type(File::getFileType(p)), bytes(File::getFileSize(p)),
mtime(File::getFileMtime(p)),
contents(
[p, path = this->path, type = this->type]() -> std::unique_ptr<std::streambuf> {
contents([p, path = this->path, type = this->type]() -> std::unique_ptr<std::streambuf> {
if (type == File::Type::Normal) {
auto fb = std::make_unique<std::filebuf>();
fb->open(p, std::ios::in | std::ios::binary);
if (!fb->is_open()) throw Exception("Can't open " + p.u8string() + " for reading!");
if (!fb->is_open()) throw Exception("Can't open " + p.string() + " for reading!");
return fb;
}

View File

@@ -12,8 +12,6 @@ bool ContentsChangeDetector::check(const ComparableFile &f1, const ComparableFil
auto b1 = f1.contents();
auto b2 = f2.contents();
return !std::equal(std::istreambuf_iterator<char>(b1.get()),
std::istreambuf_iterator<char>(),
std::istreambuf_iterator<char>(b2.get()),
std::istreambuf_iterator<char>());
return !std::equal(std::istreambuf_iterator<char>(b1.get()), std::istreambuf_iterator<char>(),
std::istreambuf_iterator<char>(b2.get()), std::istreambuf_iterator<char>());
}

View File

@@ -4,6 +4,4 @@
#include "TypeChangeDetector.h"
bool TypeChangeDetector::check(const ComparableFile &f1, const ComparableFile &f2) const {
return f1.type != f2.type;
}
bool TypeChangeDetector::check(const ComparableFile &f1, const ComparableFile &f2) const { return f1.type != f2.type; }

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_library(chunkers srcs/Buzhash.cpp srcs/BuzhashChunker.cpp srcs/Chunker.cpp srcs/ChunkerFactory.cpp srcs/ConstChunker.cpp)

View File

@@ -31,8 +31,9 @@ public:
private:
uint32_t cur = 0; ///< Current hash value
const uint32_t blockSize; ///< Hashing window size
std::deque<uint32_t> history;///< Bytes used to calculate current hash, used to compute the hash in a rolling fashion (to remove the oldest byte from the hash when blockSize is reached)
const uint32_t blockSize;///< Hashing window size
std::deque<uint32_t>
history;///< Bytes used to calculate current hash, used to compute the hash in a rolling fashion (to remove the oldest byte from the hash when blockSize is reached)
// Circular shift taken from: https://en.wikipedia.org/wiki/Circular_shift
/// Shift \p value \p count bits to the right circularly

View File

@@ -18,7 +18,8 @@ public:
/// \param minBytes Minimum amount of bytes in returned chunks
/// \param mask Amount of trailing zeroes in the rolling hash at which the file is cut (results in average chunk size of 2^mask bytes)
/// \param window Rolling hash window (how many of chunks last bytes are included in the hash, the default is recommended)
BuzhashChunker(std::streambuf *buf, unsigned long long minBytes, unsigned long long maxBytes, unsigned long long mask, uint32_t window = 4095);
BuzhashChunker(std::streambuf *buf, unsigned long long minBytes, unsigned long long maxBytes,
unsigned long long mask, uint32_t window = 4095);
/// \copydoc Chunker::getNext
std::pair<std::string, std::vector<char>> getNext() override;

View File

@@ -6,9 +6,7 @@
Buzhash::Buzhash(uint32_t blockSize) : blockSize(blockSize), history() {}
uint32_t Buzhash::get() const {
return cur;
}
uint32_t Buzhash::get() const { return cur; }
uint32_t Buzhash::feed(uint8_t in) {
cur = rotr32(cur, 1);

View File

@@ -7,7 +7,9 @@
#include "Exception.h"
#include "SHA.h"
BuzhashChunker::BuzhashChunker(std::streambuf *buf, unsigned long long minBytes, unsigned long long maxBytes, unsigned long long mask, uint32_t window) : Chunker(buf, maxBytes), window(window), minBytes(minBytes), mask(mask), buzhash(window) {}
BuzhashChunker::BuzhashChunker(std::streambuf *buf, unsigned long long minBytes, unsigned long long maxBytes,
unsigned long long mask, uint32_t window)
: Chunker(buf, maxBytes), window(window), minBytes(minBytes), mask(mask), buzhash(window) {}
std::pair<std::string, std::vector<char>> BuzhashChunker::getNext() {
if (eof) throw Exception("Trying to read from a file that is finished!");
@@ -21,9 +23,7 @@ std::pair<std::string, std::vector<char>> BuzhashChunker::getNext() {
return {SHA::calculate(rbuf), rbuf};
}
for (auto c: rbuf) {
buzhash.feed(static_cast<uint8_t>(c));
}
for (auto c: rbuf) { buzhash.feed(static_cast<uint8_t>(c)); }
// Continue reading the file until either the last mask bits are zero of we exceed the maxSize
while (((buzhash.get() & (~0UL >> (sizeof(unsigned long long) * 8 - mask))) != 0) && rbuf.size() < maxBytes) {

View File

@@ -8,44 +8,31 @@
Chunker::Chunker(std::streambuf *buf, unsigned long long maxBytes) : buf(buf), maxBytes(maxBytes) {}
bool Chunker::getEof() const {
return eof;
}
bool Chunker::getEof() const { return eof; }
Chunker::~Chunker() = default;
Chunker::ChunkerIterator Chunker::begin() {
return {this};
}
Chunker::ChunkerIterator Chunker::begin() { return {this}; }
Chunker::ChunkerIterator Chunker::end() {
return {nullptr};
}
Chunker::ChunkerIterator Chunker::end() { return {nullptr}; }
Chunker::ChunkerIterator &Chunker::ChunkerIterator::operator++() {
if (pastEOF) throw Exception("Trying to increment pastEOF ChunkerIterator!");
if (source->getEof())
pastEOF = true;
if (source->getEof()) pastEOF = true;
else
buf = source->getNext();
return *this;
}
bool Chunker::ChunkerIterator::operator!=(const Chunker::ChunkerIterator &rhs) const {
return pastEOF != rhs.pastEOF;
}
bool Chunker::ChunkerIterator::operator!=(const Chunker::ChunkerIterator &rhs) const { return pastEOF != rhs.pastEOF; }
Chunker::ChunkerIterator::value_type Chunker::ChunkerIterator::operator*() const {
if (pastEOF) throw Exception("Trying to dereference pastEOF ChunkerIterator!");
return buf.value();
}
bool Chunker::ChunkerIterator::operator==(const Chunker::ChunkerIterator &rhs) const {
return pastEOF == rhs.pastEOF;
}
bool Chunker::ChunkerIterator::operator==(const Chunker::ChunkerIterator &rhs) const { return pastEOF == rhs.pastEOF; }
Chunker::ChunkerIterator::ChunkerIterator(Chunker *source)
: source(source), pastEOF(source == nullptr) {
if (source)
operator++();
Chunker::ChunkerIterator::ChunkerIterator(Chunker *source) : source(source), pastEOF(source == nullptr) {
if (source) operator++();
}

View File

@@ -12,7 +12,8 @@ std::unique_ptr<Chunker> ChunkerFactory::getChunker(const Config &config, std::s
if (config.getStr("chunker") == "const") {
return std::make_unique<ConstChunker>(buf, config.getInt("chunker-max") * 1024);
} else if (config.getStr("chunker") == "buzhash") {
return std::make_unique<BuzhashChunker>(buf, config.getInt("chunker-min") * 1024, config.getInt("chunker-max") * 1024, config.getInt("chunker-mask"));
return std::make_unique<BuzhashChunker>(buf, config.getInt("chunker-min") * 1024,
config.getInt("chunker-max") * 1024, config.getInt("chunker-mask"));
} else {
throw Exception("Unknown chunker type!");
}

View File

@@ -1,6 +1,16 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_library(commands srcs/CommandDiff.cpp srcs/CommandList.cpp srcs/CommandListFiles.cpp srcs/CommandRestore.cpp srcs/CommandRun.cpp srcs/CommandsCommon.cpp srcs/Diff.cpp srcs/CommandMount.cpp)
add_library(commands
srcs/CommandDiff.cpp
srcs/CommandList.cpp
srcs/CommandListFiles.cpp
srcs/CommandRestore.cpp
srcs/CommandRun.cpp
srcs/CommandsCommon.cpp
srcs/Diff.cpp
srcs/CommandMount.cpp
srcs/CommandDelete.cpp
)
target_include_directories(commands PUBLIC includes)

View File

@@ -0,0 +1,18 @@
//
// Created by Stepan Usatiuk on 06.08.2023.
//
#ifndef BACKUP_COMMANDDELETE_H
#define BACKUP_COMMANDDELETE_H
#include "Command.h"
class CommandDelete : public Command {
public:
CommandDelete();
void run(Context ctx) override;
static constexpr std::string_view name{"delete"};
};
#endif//BACKUP_COMMANDDELETE_H

View File

@@ -24,7 +24,8 @@ private:
/// \param base Base directory to restore to
/// \param callback Stats callback
/// \return Name of the restored file
std::string backupRestoreFile(const File &file, const std::filesystem::path &base, CommandsCommon::workerStatsFunction &callback, Context ctx);
std::string backupRestoreFile(const File &file, const std::filesystem::path &base,
CommandsCommon::workerStatsFunction &callback, Context ctx);
};

View File

@@ -22,7 +22,8 @@ private:
/// \param saveAs UTF-8 encoded file name to save as
/// \param callback Stats callback
/// \return ID of the saved file
Object::idType backupChunkFile(const std::filesystem::path &orig, const std::string &saveAs, CommandsCommon::workerStatsFunction &callback, Context ctx);
Object::idType backupChunkFile(const std::filesystem::path &orig, const std::string &saveAs,
CommandsCommon::workerStatsFunction &callback, Context ctx);
};

View File

@@ -18,7 +18,9 @@ namespace CommandsCommon {
/// \param ignore List of files to ignore
/// \param spawner Function to spawn other tasks
/// \param processFile Task to spawn on found files
void processDirWithIgnore(const std::filesystem::path &dir, std::vector<std::string> ignore, const std::function<void(std::function<void()>)> &spawner, std::function<void(std::filesystem::directory_entry)> processFile);
void processDirWithIgnore(const std::filesystem::path &dir, std::vector<std::string> ignore,
const std::function<void(std::function<void()>)> &spawner,
std::function<void(std::filesystem::directory_entry)> processFile);
struct WorkerStats {
public:
@@ -40,7 +42,8 @@ namespace CommandsCommon {
/// \return True if \p p contains \p prefix at its prefix, False otherwise
bool isSubpath(const std::filesystem::path &prefix, const std::filesystem::path &p);
void workerCallback(unsigned long long bytesWritten, unsigned long long bytesSkipped, unsigned long long filesWritten, WorkerStats &to);
void workerCallback(unsigned long long bytesWritten, unsigned long long bytesSkipped,
unsigned long long filesWritten, WorkerStats &to);
};// namespace CommandsCommon

View File

@@ -0,0 +1,15 @@
//
// Created by Stepan Usatiuk on 06.08.2023.
//
#include "CommandDelete.h"
#include "CommandsCommon.h"
using namespace CommandsCommon;
CommandDelete::CommandDelete() {}
void CommandDelete::run(Context ctx) {
ctx.repo->deleteObjects({static_cast<unsigned long long>(ctx.repo->getConfig().getInt("aid"))});
}

View File

@@ -27,24 +27,24 @@ void CommandDiff::run(Context ctx) {
Object::idType archive1;
if (!ctx.repo->getConfig().exists("aid")) {
auto archives = ctx.repo->getObjects(Object::ObjectType::Archive);
archive1 = std::max_element(archives.begin(), archives.end(), [](const auto &a1, const auto &a2) { return a1.second < a2.second; })->second;
archive1 = std::max_element(archives.begin(), archives.end(), [](const auto &a1, const auto &a2) {
return a1.second < a2.second;
})->second;
} else {
archive1 = ctx.repo->getConfig().getInt("aid");
}
ThreadPool threadPool([&](const std::string &error) {
ctx.logger->write("Error: " + error, 0);
},
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads") : std::thread::hardware_concurrency());
ThreadPool threadPool([&](const std::string &error) { ctx.logger->write("Error: " + error, 0); },
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads")
: std::thread::hardware_concurrency());
auto archiveO1 = Serialize::deserialize<Archive>(ctx.repo->getObject(archive1));
auto archiveO1 = Serialize::deserialize<Archive>(ctx.repo->getObjectRaw(archive1));
std::mutex filesLock;
std::map<std::filesystem::path, File> files;///< Files in the first archive
for (auto id: archiveO1.files) {
auto file = Serialize::deserialize<File>(ctx.repo->getObject(id));
auto path = std::filesystem::u8path(file.name);
if (isSubpath(ctx.repo->getConfig().getStr("prefix"), path))
files.emplace(file.getKey(), std::move(file));
auto file = Serialize::deserialize<File>(ctx.repo->getObjectRaw(id));
auto path = std::filesystem::path(file.name);
if (isSubpath(ctx.repo->getConfig().getStr("prefix"), path)) files.emplace(file.getKey(), std::move(file));
}
/// Container of ChangeDetectors built using the config of the repository
@@ -63,8 +63,7 @@ void CommandDiff::run(Context ctx) {
if (changeDetector.check({repoFile, ctx.repo}, p)) {
ctx.logger->write(relPath + " is different " + Diff::diff({repoFile, ctx.repo}, p) + "\n", 1);
} else {
if (diffMode == "file")
ctx.logger->write(relPath + " are same ", 0);
if (diffMode == "file") ctx.logger->write(relPath + " are same ", 0);
}
}
@@ -76,17 +75,16 @@ void CommandDiff::run(Context ctx) {
if (diffMode == "normal") {
/// If a second archive is given, run the task for each of its files, otherwise use the "from" config option
if (ctx.repo->getConfig().exists("aid2")) {
archiveO2.emplace(Serialize::deserialize<Archive>(ctx.repo->getObject(ctx.repo->getConfig().getInt("aid2"))));
archiveO2.emplace(
Serialize::deserialize<Archive>(ctx.repo->getObjectRaw(ctx.repo->getConfig().getInt("aid2"))));
threadPool.push([&]() {
for (auto id: archiveO2.value().files) {
/// Exit when asked to
if (Signals::shouldQuit) throw Exception("Quitting");
auto file = Serialize::deserialize<File>(ctx.repo->getObject(id));
if (isSubpath(ctx.repo->getConfig().getStr("prefix"), std::filesystem::u8path(file.name)))
threadPool.push([&, file]() {
processFile(ComparableFile{file, ctx.repo});
});
auto file = Serialize::deserialize<File>(ctx.repo->getObjectRaw(id));
if (isSubpath(ctx.repo->getConfig().getStr("prefix"), std::filesystem::path(file.name)))
threadPool.push([&, file]() { processFile(ComparableFile{file, ctx.repo}); });
if (Signals::shouldQuit) break;
}
@@ -97,10 +95,9 @@ void CommandDiff::run(Context ctx) {
/// Start the diff with the root directory and empty ignore list
threadPool.push([&, from]() {
processDirWithIgnore(
from,
{},
[&](std::function<void()> f) { threadPool.push(std::move(f)); },
[processFile, from, prefix = ctx.repo->getConfig().getStr("prefix")](const std::filesystem::directory_entry &dirEntry) {
from, {}, [&](std::function<void()> f) { threadPool.push(std::move(f)); },
[processFile, from, prefix = ctx.repo->getConfig().getStr("prefix")](
const std::filesystem::directory_entry &dirEntry) {
if (isSubpath(prefix, dirEntry.path().lexically_relative(from)))
processFile(ComparableFile{dirEntry, from});
});
@@ -113,11 +110,12 @@ void CommandDiff::run(Context ctx) {
}
if (ctx.repo->getConfig().exists("aid2")) {
archiveO2.emplace(Serialize::deserialize<Archive>(ctx.repo->getObject(ctx.repo->getConfig().getInt("aid2"))));
archiveO2.emplace(
Serialize::deserialize<Archive>(ctx.repo->getObjectRaw(ctx.repo->getConfig().getInt("aid2"))));
std::map<std::filesystem::path, File> files2;///< Files in the first archive
for (auto id: archiveO2->files) {
auto file = Serialize::deserialize<File>(ctx.repo->getObject(id));
auto path = std::filesystem::u8path(file.name);
auto file = Serialize::deserialize<File>(ctx.repo->getObjectRaw(id));
auto path = std::filesystem::path(file.name);
if (isSubpath(ctx.repo->getConfig().getStr("prefix"), path))
files2.emplace(file.getKey(), std::move(file));
}
@@ -146,7 +144,5 @@ void CommandDiff::run(Context ctx) {
std::unique_lock finishedLock(threadPool.finishedLock);
threadPool.finished.wait(finishedLock, [&threadPool] { return threadPool.empty(); });
if (diffMode == "normal")
for (auto const &s: files) {
ctx.logger->write(s.first.u8string() + " is removed\n", 0);
}
for (auto const &s: files) { ctx.logger->write(s.first.string() + " is removed\n", 0); }
}

View File

@@ -4,13 +4,10 @@
#include "CommandList.h"
CommandList::CommandList() : Command() {
}
CommandList::CommandList() : Command() {}
void CommandList::run(Context ctx) {
auto list = ctx.repo->getObjects(Object::ObjectType::Archive);
std::sort(list.begin(), list.end(), [](const auto &l, const auto &r) { return l.second < r.second; });
for (auto const &aid: list) {
std::cout << "Name: " << aid.first << " Id: " << aid.second << std::endl;
}
for (auto const &aid: list) { std::cout << "Name: " << aid.first << " Id: " << aid.second << std::endl; }
}

View File

@@ -10,13 +10,13 @@
#include "objects/Chunk.h"
#include "objects/File.h"
CommandListFiles::CommandListFiles() : Command() {
}
CommandListFiles::CommandListFiles() : Command() {}
void CommandListFiles::run(Context ctx) {
auto archive = Serialize::deserialize<Archive>(ctx.repo->getObject(ctx.repo->getConfig().getInt("aid")));
auto archive = Serialize::deserialize<Archive>(ctx.repo->getObjectRaw(ctx.repo->getConfig().getInt("aid")));
for (auto const &fid: archive.files) {
auto file = Serialize::deserialize<File>(ctx.repo->getObject(fid));
std::cout << "Name: " << file.name << " type: " << File::TypeToStr.at(file.fileType) << " size: " << BytesFormatter::formatStr(file.bytes) << std::endl;
auto file = Serialize::deserialize<File>(ctx.repo->getObjectRaw(fid));
std::cout << "Name: " << file.name << " type: " << File::TypeToStr.at(file.fileType)
<< " size: " << BytesFormatter::formatStr(file.bytes) << std::endl;
}
}

View File

@@ -5,9 +5,6 @@
#include "CommandMount.h"
#include "RepoFS.h"
CommandMount::CommandMount() : Command() {
}
CommandMount::CommandMount() : Command() {}
void CommandMount::run(Context ctx) {
RepoFS::start(ctx.repo, ctx.repo->getConfig().getStr("to"));
}
void CommandMount::run(Context ctx) { RepoFS::start(ctx.repo, ctx.repo->getConfig().getStr("to")); }

View File

@@ -20,12 +20,11 @@
using namespace CommandsCommon;
CommandRestore::CommandRestore() : Command() {
}
CommandRestore::CommandRestore() : Command() {}
void CommandRestore::run(Context ctx) {
Object::idType archive = ctx.repo->getConfig().getInt("aid");
std::filesystem::path to = std::filesystem::u8path(ctx.repo->getConfig().getStr("to"));
std::filesystem::path to = std::filesystem::path(ctx.repo->getConfig().getStr("to"));
std::atomic<unsigned long long> filesToRestoreCount = 0;
std::atomic<unsigned long long> bytesToRestore = 0;
@@ -33,14 +32,14 @@ void CommandRestore::run(Context ctx) {
WorkerStats workerStats;///< Backup statistics of the worker threads
/// Worker callback, bound to the local workerStats variable
workerStatsFunction workerCallback = [&workerStats](unsigned long long bytesWritten, unsigned long long bytesSkipped, unsigned long long filesWritten) {
workerStatsFunction workerCallback = [&workerStats](unsigned long long bytesWritten,
unsigned long long bytesSkipped,
unsigned long long filesWritten) {
CommandsCommon::workerCallback(bytesWritten, bytesSkipped, filesWritten, workerStats);
};
{
/// Calculate the average speed of backup
RunningDiffAverage avg(
[&]() { return workerStats.bytesWritten.load(); },
100, 100);
RunningDiffAverage avg([&]() { return workerStats.bytesWritten.load(); }, 100, 100);
/// Show restore progress
Progress progress([this, ctx](const std::string &s, int l) { ctx.logger->write(s, l); },
@@ -49,7 +48,10 @@ void CommandRestore::run(Context ctx) {
"/",
[&filesToRestoreCount]() { return std::to_string(filesToRestoreCount); },
" files saved, ",
[&workerStats]() { return BytesFormatter::formatStr(workerStats.bytesWritten.load() + workerStats.bytesSkipped.load()); },
[&workerStats]() {
return BytesFormatter::formatStr(workerStats.bytesWritten.load() +
workerStats.bytesSkipped.load());
},
" / ",
[&bytesToRestore]() { return BytesFormatter::formatStr(bytesToRestore); },
" saved @ ",
@@ -59,22 +61,21 @@ void CommandRestore::run(Context ctx) {
ctx.repo->getConfig());
/// Thread pool for restore tasks
ThreadPool threadPool([&](const std::string &error) {
progress.print("Error: " + error, 0);
},
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads") : std::thread::hardware_concurrency());
ThreadPool threadPool([&](const std::string &error) { progress.print("Error: " + error, 0); },
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads")
: std::thread::hardware_concurrency());
/// Add the main restore task
threadPool.push([&, this]() {
/// Get the archive and its file IDs
auto archiveO = Serialize::deserialize<Archive>(ctx.repo->getObject(archive));
auto archiveO = Serialize::deserialize<Archive>(ctx.repo->getObjectRaw(archive));
std::vector<Object::idType> files = archiveO.files;
/// For each file...
for (const auto fid: files) {
/// Stop when asked to
if (Signals::shouldQuit) break;
auto file = Serialize::deserialize<File>(ctx.repo->getObject(fid));
auto file = Serialize::deserialize<File>(ctx.repo->getObjectRaw(fid));
filesToRestoreCount++;
bytesToRestore += file.bytes;
/// Spawn a restore task
@@ -92,28 +93,30 @@ void CommandRestore::run(Context ctx) {
ctx.logger->write("\n", 1);
}
std::string CommandRestore::backupRestoreFile(const File &file, const std::filesystem::path &baseDir, workerStatsFunction &callback, Context ctx) {
auto fullpath = baseDir / std::filesystem::u8path(file.name);
std::string CommandRestore::backupRestoreFile(const File &file, const std::filesystem::path &baseDir,
workerStatsFunction &callback, Context ctx) {
auto fullpath = baseDir / std::filesystem::path(file.name);
std::filesystem::create_directories(fullpath.parent_path());
if (file.fileType == File::Type::Directory) {
std::filesystem::create_directory(fullpath);
callback(0, 0, 1);
return fullpath.u8string();
return fullpath.string();
}
if (file.fileType == File::Type::Symlink) {
auto dest = Serialize::deserialize<Chunk>(ctx.repo->getObject(file.chunks.at(0)));
std::filesystem::create_symlink(std::filesystem::u8path(std::string{dest.data.begin(), dest.data.end()}), fullpath);
auto dest = Serialize::deserialize<Chunk>(ctx.repo->getObjectRaw(file.chunks.at(0)));
std::filesystem::create_symlink(std::filesystem::path(std::string{dest.data.begin(), dest.data.end()}),
fullpath);
callback(0, 0, 1);
return fullpath.u8string();
return fullpath.string();
}
std::ofstream ostream(fullpath, std::ios::binary | std::ios::out | std::ios::trunc);
for (const auto cid: file.chunks) {
if (Signals::shouldQuit) throw Exception("Quitting!");
Chunk c = Serialize::deserialize<Chunk>(ctx.repo->getObject(cid.second));
Chunk c = Serialize::deserialize<Chunk>(ctx.repo->getObjectRaw(cid.second));
if (!c.data.empty()) {
ostream.rdbuf()->sputn(c.data.data(), c.data.size());
callback(c.data.size(), 0, 0);
@@ -121,5 +124,5 @@ std::string CommandRestore::backupRestoreFile(const File &file, const std::files
}
callback(0, 0, 1);
return fullpath.u8string();
return fullpath.string();
}

View File

@@ -26,8 +26,7 @@
using namespace CommandsCommon;
CommandRun::CommandRun() : Command() {
}
CommandRun::CommandRun() : Command() {}
void CommandRun::run(Context ctx) {
WorkerStats workerStats;///< Backup statistics of the worker threads
@@ -36,66 +35,63 @@ void CommandRun::run(Context ctx) {
std::filesystem::path from = ctx.repo->getConfig().getStr("from");///< Directory to back up from
/// Worker callback, bound to the local workerStats variable
workerStatsFunction workerCallback = [&](unsigned long long bytesWritten, unsigned long long bytesSkipped, unsigned long long filesWritten) {
workerStatsFunction workerCallback = [&](unsigned long long bytesWritten, unsigned long long bytesSkipped,
unsigned long long filesWritten) {
CommandsCommon::workerCallback(bytesWritten, bytesSkipped, filesWritten, workerStats);
};
std::vector<Object::idType> files;///< File ids so far added to the archive
std::mutex filesLock; ///< Files vector lock
/// Function to safely add new file ids to `files`
std::function addFile = [&](Object::idType id) {std::lock_guard lock(filesLock); files.emplace_back(id); };
std::function addFile = [&](Object::idType id) {
std::lock_guard lock(filesLock);
files.emplace_back(id);
};
{
/// Calculate the average speed of backup
RunningDiffAverage avg(
[&]() { return workerStats.bytesWritten.load(); },
100, 100);
RunningDiffAverage avg([&]() { return workerStats.bytesWritten.load(); }, 100, 100);
/// Show the progress of backup
Progress progress([this, ctx](const std::string &s, int l) { ctx.logger->write(s, l); },
{[&]() { return std::to_string(workerStats.filesWritten.load()); },
"/",
[&]() { return std::to_string(runnerStats.filesToSaveCount); },
" files saved, ",
[&]() { return std::to_string(runnerStats.filesSkipped); },
" files skipped, ",
[&]() { return BytesFormatter::formatStr((workerStats.bytesWritten.load() + workerStats.bytesSkipped.load())); },
" / ",
[&]() { return BytesFormatter::formatStr(runnerStats.bytesToSave); },
" read @ ",
[&]() { return BytesFormatter::formatStr(avg.get() * 10); },
"/s"},
{[&]() { return std::to_string(workerStats.filesWritten.load()); }, "/",
[&]() { return std::to_string(runnerStats.filesToSaveCount); }, " files saved, ",
[&]() { return std::to_string(runnerStats.filesSkipped); }, " files skipped, ",
[&]() {
return BytesFormatter::formatStr(
(workerStats.bytesWritten.load() + workerStats.bytesSkipped.load()));
},
" / ", [&]() { return BytesFormatter::formatStr(runnerStats.bytesToSave); }, " read @ ",
[&]() { return BytesFormatter::formatStr(avg.get() * 10); }, "/s"},
ctx.repo->getConfig());
/// Thread pool for backup tasks, prints to progress on any errors
ThreadPool threadPool([&](const std::string &error) {
progress.print("Error: " + error, 0);
},
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads") : std::thread::hardware_concurrency());
ThreadPool threadPool([&](const std::string &error) { progress.print("Error: " + error, 0); },
ctx.repo->getConfig().exists("threads") ? ctx.repo->getConfig().getInt("threads")
: std::thread::hardware_concurrency());
/// Container of ChangeDetectors built using the config of the repository
ChangeDetectorContainer changeDetector = ChangeDetectorFactory::getChangeDetectors(ctx.repo->getConfig());
/// Function to spawn a rechunking task
auto saveFile = [&, this](const std::filesystem::path &absPath, const std::filesystem::path &relPath) {
runnerStats.bytesToSave += File::getFileType(absPath) == File::Type::Normal ? std::filesystem::file_size(absPath) : 0;
runnerStats.bytesToSave +=
File::getFileType(absPath) == File::Type::Normal ? std::filesystem::file_size(absPath) : 0;
runnerStats.filesToSaveCount++;
threadPool.push([&, relPath, absPath]() {
addFile(backupChunkFile(absPath, relPath.u8string(), workerCallback, ctx));
progress.print("Copied: " + relPath.u8string(), 1);
addFile(backupChunkFile(absPath, relPath.string(), workerCallback, ctx));
progress.print("Copied: " + relPath.string(), 1);
});
};
/// Task to process an individual file in the backup
std::function<void(std::filesystem::path)> processFile =
[&, this](const std::filesystem::path &p) {
auto relPath = p.lexically_relative(from).u8string();
std::function<void(std::filesystem::path)> processFile = [&, this](const std::filesystem::path &p) {
auto relPath = p.lexically_relative(from).string();
if (ctx.repo->exists(Object::ObjectType::File, relPath) != 0) {
File repoFile = Serialize::deserialize<File>(ctx.repo->getObject(Object::ObjectType::File, relPath));
File repoFile = Serialize::deserialize<File>(ctx.repo->getObjectRaw(Object::ObjectType::File, relPath));
if (!changeDetector.check({repoFile, ctx.repo}, {p, from})) {
addFile(repoFile.id);
ctx.repo->addToCache(repoFile);
progress.print("Skipped: " + relPath, 1);
runnerStats.filesSkipped++;
return;
@@ -109,10 +105,7 @@ void CommandRun::run(Context ctx) {
/// Start the backup with the root directory and empty ignore list
threadPool.push([&]() {
processDirWithIgnore(
from,
{},
[&](std::function<void()> f) { threadPool.push(std::move(f)); },
processFile);
from, {}, [&](std::function<void()> f) { threadPool.push(std::move(f)); }, processFile);
});
/// Wait for all the tasks to finish
@@ -138,22 +131,23 @@ void CommandRun::run(Context ctx) {
ctx.repo->putObject(a);
}
Object::idType CommandRun::backupChunkFile(const std::filesystem::path &orig, const std::string &saveAs, workerStatsFunction &callback, Context ctx) {
Object::idType CommandRun::backupChunkFile(const std::filesystem::path &orig, const std::string &saveAs,
workerStatsFunction &callback, Context ctx) {
/// If it's a symlink or directory, treat it specially
/// The order of checks is important, because is_directory follows the symlink
if (std::filesystem::is_symlink(orig) || std::filesystem::is_directory(orig)) {
auto contents = File::getFileContents(orig);
Chunk c(ctx.repo->getId(), SHA::calculate(contents), contents);
File f(ctx.repo->getId(), saveAs, c.length, File::getFileMtime(orig), c.SHA, {{0, c.id}}, File::getFileType(orig));
File f(ctx.repo->getId(), saveAs, c.length, File::getFileMtime(orig), c.SHA, {{0, c.id}},
File::getFileType(orig));
ctx.repo->putObject(c);
ctx.repo->putObject(f);
return f.id;
}
if (!std::filesystem::is_regular_file(orig))
throw Exception(orig.u8string() + "is a special file, not saving");
if (!std::filesystem::is_regular_file(orig)) throw Exception(orig.string() + "is a special file, not saving");
std::ifstream ifstream(orig, std::ios::in | std::ios::binary);
if (!ifstream) throw Exception("Couldn't open " + orig.u8string() + " for reading");
if (!ifstream) throw Exception("Couldn't open " + orig.string() + " for reading");
std::unique_ptr<Chunker> chunker = ChunkerFactory::getChunker(ctx.repo->getConfig(), ifstream.rdbuf());
SHA fileHash;
@@ -185,9 +179,10 @@ Object::idType CommandRun::backupChunkFile(const std::filesystem::path &orig, co
/// We might have exited in the loop before, so we don't save an incomplete file
if (Signals::shouldQuit) throw Exception("Quitting!");
if (size != File::getFileSize(orig)) {
throw Exception("Something really bad happened or file " + orig.u8string() + " changed during backup");
throw Exception("Something really bad happened or file " + orig.string() + " changed during backup");
}
File f(ctx.repo->getId(), saveAs, size, File::getFileMtime(orig), fileHash.getHash(), fileChunks, File::getFileType(orig));
File f(ctx.repo->getId(), saveAs, size, File::getFileMtime(orig), fileHash.getHash(), fileChunks,
File::getFileType(orig));
ctx.repo->putObject(f);
callback(0, 0, 1);

View File

@@ -10,21 +10,24 @@
#include "Exception.h"
#include "Signals.h"
void CommandsCommon::workerCallback(unsigned long long int bytesWritten, unsigned long long int bytesSkipped, unsigned long long int filesWritten, WorkerStats &to) {
void CommandsCommon::workerCallback(unsigned long long int bytesWritten, unsigned long long int bytesSkipped,
unsigned long long int filesWritten, WorkerStats &to) {
to.bytesWritten += bytesWritten;
to.bytesSkipped += bytesSkipped;
to.filesWritten += filesWritten;
}
bool CommandsCommon::isSubpath(const std::filesystem::path &prefix, const std::filesystem::path &p) {
if (prefix.u8string().size() > p.u8string().size()) return false;
for (int i = 0; i < prefix.u8string().size(); i++)
if (p.u8string()[i] != prefix.u8string()[i]) return false;
if (prefix.string().size() > p.string().size()) return false;
for (int i = 0; i < prefix.string().size(); i++)
if (p.string()[i] != prefix.string()[i]) return false;
return true;
}
void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std::vector<std::string> ignore, const std::function<void(std::function<void()>)> &spawner, std::function<void(std::filesystem::directory_entry)> processFile) {
if (!std::filesystem::is_directory(dir)) throw Exception(dir.u8string() + " is not a directory!");
void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std::vector<std::string> ignore,
const std::function<void(std::function<void()>)> &spawner,
std::function<void(std::filesystem::directory_entry)> processFile) {
if (!std::filesystem::is_directory(dir)) throw Exception(dir.string() + " is not a directory!");
/// Don't process the directory if it has a ".nobackup" file
if (std::filesystem::exists(dir / ".nobackup")) return;
@@ -33,9 +36,7 @@ void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std:
if (std::filesystem::exists(dir / ".ignore")) {
std::ifstream ignorefile(dir / ".ignore", std::ios::in);
std::string line;
while (std::getline(ignorefile, line)) {
ignore.emplace_back(line);
}
while (std::getline(ignorefile, line)) { ignore.emplace_back(line); }
}
/// For each directory entry...
@@ -46,9 +47,10 @@ void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std:
/// Don't process the entry if it matches any of the ignore rules
if (std::any_of(ignore.begin(), ignore.end(), [dirEntry](auto pred) {
std::smatch m;
auto s = dirEntry.path().filename().u8string();
auto s = dirEntry.path().filename().string();
return std::regex_match(s, m, std::regex(pred));
})) continue;
}))
continue;
/// If it's a directory, spawn a task to process the entries in it
if (!dirEntry.is_symlink() && dirEntry.is_directory()) {
@@ -60,8 +62,6 @@ void CommandsCommon::processDirWithIgnore(const std::filesystem::path &dir, std:
}
/// Spawn a task to process each individual file
spawner([processFile, dirEntry]() {
processFile(dirEntry);
});
spawner([processFile, dirEntry]() { processFile(dirEntry); });
}
}

View File

@@ -51,13 +51,9 @@ std::string Diff::diff(const ComparableFile &c1, const ComparableFile &c2) {
}
out << "\nLines only in first file: " << std::endl;
for (const auto &s: f1lines) {
out << s.second << "<" << s.first << std::endl;
}
for (const auto &s: f1lines) { out << s.second << "<" << s.first << std::endl; }
out << "Lines only in second file: " << std::endl;
for (const auto &s: f2diff) {
out << s.second << ">" << s.first << std::endl;
}
for (const auto &s: f2diff) { out << s.second << ">" << s.first << std::endl; }
out << "^^^\n";
return out.str();
}
@@ -92,12 +88,8 @@ std::string Diff::diffPercent(const ComparableFile &c1, const ComparableFile &c2
unsigned long long diff = 0;
for (const auto &c: ch1hashes) {
diff += hashsize[c];
}
for (const auto &c: ch2diff) {
diff += hashsize[c];
}
for (const auto &c: ch1hashes) { diff += hashsize[c]; }
for (const auto &c: ch2diff) { diff += hashsize[c]; }
return "at most " + BytesFormatter::formatStr(diff);
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
find_package(OpenSSL REQUIRED)

View File

@@ -41,7 +41,8 @@ public:
std::string getHash();
private:
const std::unique_ptr<EVP_MD_CTX, decltype(&EVP_MD_CTX_free)> mdctx{EVP_MD_CTX_new(), &EVP_MD_CTX_free};///< Current hashing context
const std::unique_ptr<EVP_MD_CTX, decltype(&EVP_MD_CTX_free)> mdctx{EVP_MD_CTX_new(),
&EVP_MD_CTX_free};///< Current hashing context
};

View File

@@ -23,15 +23,16 @@ std::vector<char> AES::encrypt(const std::vector<char> &in, const std::array<uin
if (!ctx) throw Exception("Error initializing encryption context!");
std::vector<char> out(in.size() + AES_BLOCK_SIZE + 32);
if (!RAND_bytes(reinterpret_cast<unsigned char *>(out.data()), 32))
throw Exception("Error generating IV!");
if (!RAND_bytes(reinterpret_cast<unsigned char *>(out.data()), 32)) throw Exception("Error generating IV!");
if (!EVP_EncryptInit_ex(ctx.get(), EVP_aes_256_cbc(), nullptr, key.data(), reinterpret_cast<const unsigned char *>(out.data())))
if (!EVP_EncryptInit_ex(ctx.get(), EVP_aes_256_cbc(), nullptr, key.data(),
reinterpret_cast<const unsigned char *>(out.data())))
throw Exception("Error encrypting!");
int outlen = static_cast<int>(out.size()) - 32;
if (!EVP_EncryptUpdate(ctx.get(), reinterpret_cast<unsigned char *>(out.data() + 32), &outlen, reinterpret_cast<const unsigned char *>(in.data()), static_cast<int>(in.size())))
if (!EVP_EncryptUpdate(ctx.get(), reinterpret_cast<unsigned char *>(out.data() + 32), &outlen,
reinterpret_cast<const unsigned char *>(in.data()), static_cast<int>(in.size())))
throw Exception("Error encrypting!");
int finlen = 0;
@@ -52,11 +53,13 @@ std::vector<char> AES::decrypt(const std::vector<char> &in, const std::array<uin
std::vector<char> out(in.size() - 32);
int outlen = static_cast<int>(out.size());
if (!EVP_DecryptInit_ex(ctx.get(), EVP_aes_256_cbc(), nullptr, key.data(), reinterpret_cast<const unsigned char *>(in.data())))
if (!EVP_DecryptInit_ex(ctx.get(), EVP_aes_256_cbc(), nullptr, key.data(),
reinterpret_cast<const unsigned char *>(in.data())))
throw Exception("Error decrypting!");
if (!EVP_DecryptUpdate(ctx.get(), reinterpret_cast<unsigned char *>(out.data()), &outlen, reinterpret_cast<const unsigned char *>(in.data() + 32), static_cast<int>(in.size() - 32)))
if (!EVP_DecryptUpdate(ctx.get(), reinterpret_cast<unsigned char *>(out.data()), &outlen,
reinterpret_cast<const unsigned char *>(in.data() + 32), static_cast<int>(in.size() - 32)))
throw Exception("Error decrypting!");
int finlen = 0;
@@ -70,13 +73,9 @@ std::vector<char> AES::decrypt(const std::vector<char> &in, const std::array<uin
std::array<uint8_t, 32> AES::deriveKey(const std::string &password, const std::string &salt) {
std::array<uint8_t, 32> key;//NOLINT
if (!PKCS5_PBKDF2_HMAC_SHA1(password.data(),
static_cast<int>(password.length()),
reinterpret_cast<const unsigned char *>(salt.data()),
static_cast<int>(salt.length()),
10000,
32,
key.data()))
if (!PKCS5_PBKDF2_HMAC_SHA1(password.data(), static_cast<int>(password.length()),
reinterpret_cast<const unsigned char *>(salt.data()), static_cast<int>(salt.length()),
10000, 32, key.data()))
throw Exception("Error deriving key!");
return key;
}

View File

@@ -13,17 +13,14 @@ std::string SHA::calculate(const std::vector<char> &in) {
}
SHA::SHA() {
if (!mdctx)
throw Exception("Can't create hashing context!");
if (!mdctx) throw Exception("Can't create hashing context!");
if (!EVP_DigestInit_ex(mdctx.get(), EVP_sha256(), nullptr))
throw Exception("Can't create hashing context!");
if (!EVP_DigestInit_ex(mdctx.get(), EVP_sha256(), nullptr)) throw Exception("Can't create hashing context!");
}
void SHA::feedData(const std::vector<char> &in) {
if (in.empty()) return;
if (!EVP_DigestUpdate(mdctx.get(), in.data(), in.size()))
throw Exception("Error hashing!");
if (!EVP_DigestUpdate(mdctx.get(), in.data(), in.size())) throw Exception("Error hashing!");
}
std::string SHA::getHash() {
@@ -33,11 +30,9 @@ std::string SHA::getHash() {
if (!EVP_DigestFinal_ex(mdctx.get(), reinterpret_cast<unsigned char *>(out.data()), &s))
throw Exception("Error hashing!");
if (s != out.size())
throw Exception("Error hashing!");
if (s != out.size()) throw Exception("Error hashing!");
if (!EVP_MD_CTX_reset(mdctx.get()))
throw Exception("Error hashing!");
if (!EVP_MD_CTX_reset(mdctx.get())) throw Exception("Error hashing!");
return {out.begin(), out.end()};
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
find_package(ZLIB REQUIRED)

View File

@@ -6,13 +6,9 @@
#include "CRC32.h"
#include "Serialize.h"
std::vector<char> CheckFilter::filterWrite(std::vector<char> from) const {
return filterWriteStatic(std::move(from));
}
std::vector<char> CheckFilter::filterWrite(std::vector<char> from) const { return filterWriteStatic(std::move(from)); }
std::vector<char> CheckFilter::filterRead(std::vector<char> from) const {
return filterReadStatic(std::move(from));
}
std::vector<char> CheckFilter::filterRead(std::vector<char> from) const { return filterReadStatic(std::move(from)); }
std::vector<char> CheckFilter::filterWriteStatic(std::vector<char> from) {
auto out = magic;

View File

@@ -6,12 +6,8 @@
#include "AES.h"
std::vector<char> FilterAES::filterWrite(std::vector<char> from) const {
return AES::encrypt(from, key);
}
std::vector<char> FilterAES::filterWrite(std::vector<char> from) const { return AES::encrypt(from, key); }
std::vector<char> FilterAES::filterRead(std::vector<char> from) const {
return AES::decrypt(from, key);
}
std::vector<char> FilterAES::filterRead(std::vector<char> from) const { return AES::decrypt(from, key); }
FilterAES::FilterAES(const std::string &password, const std::string &salt) : key(AES::deriveKey(password, salt)) {}

View File

@@ -12,12 +12,11 @@ FilterContainer &FilterContainer::addFilter(std::unique_ptr<Filter> &&f) {
}
std::vector<char> FilterContainer::filterWrite(std::vector<char> from) const {
for (auto const &f: filters) from = f->filterWrite(std::move(from));
for (auto const &f: filters) from = std::move(f->filterWrite(std::move(from)));
return from;
}
std::vector<char> FilterContainer::filterRead(std::vector<char> from) const {
for (auto f = filters.crbegin(); f != filters.crend(); f++)
from = (*f)->filterRead(std::move(from));
for (auto f = filters.crbegin(); f != filters.crend(); f++) from = std::move((*f)->filterRead(std::move(from)));
return from;
}

View File

@@ -19,8 +19,8 @@ std::vector<char> FilterZlib::filterWrite(std::vector<char> from) const {
out.resize(sizeSize + outSize);
if (compress2(reinterpret_cast<Bytef *>(out.data() + sizeSize), &outSize, reinterpret_cast<const Bytef *>(from.data()), from.size(), level) !=
Z_OK)
if (compress2(reinterpret_cast<Bytef *>(out.data() + sizeSize), &outSize,
reinterpret_cast<const Bytef *>(from.data()), from.size(), level) != Z_OK)
throw Exception("Error compressing!");
out.resize(outSize + sizeSize);
@@ -40,8 +40,8 @@ std::vector<char> FilterZlib::filterRead(std::vector<char> from) const {
if (desI >= from.cend()) throw Exception("Unexpected end of archive!");
if (uncompress(reinterpret_cast<Bytef *>(out.data()), &size, reinterpret_cast<const Bytef *>(&(*desI)), std::distance(desI, from.cend())) !=
Z_OK)
if (uncompress(reinterpret_cast<Bytef *>(out.data()), &size, reinterpret_cast<const Bytef *>(&(*desI)),
std::distance(desI, from.cend())) != Z_OK)
throw Exception("Error decompressing!");
return out;

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
find_package(PkgConfig REQUIRED)

View File

@@ -15,12 +15,10 @@
#include "objects/Chunk.h"
DirEntry *getf(std::string path) {
auto p = std::filesystem::relative(std::filesystem::u8path(path), "/");
auto p = std::filesystem::relative(std::filesystem::path(path), "/");
DirEntry *entry = RepoFS::root.get();
if (p != ".")
for (auto const &subp: p) {
entry = entry->children.at(subp).get();
}
for (auto const &subp: p) { entry = entry->children.at(subp).get(); }
return entry;
}
@@ -53,14 +51,12 @@ static int rfsGetattr(const char *path, struct stat *stbuf) {
return res;
}
static int rfsReaddir(const char *path, void *buf, fuse_fill_dir_t filler,
off_t offset, struct fuse_file_info *fi) {
static int rfsReaddir(const char *path, void *buf, fuse_fill_dir_t filler, off_t offset, struct fuse_file_info *fi) {
(void) offset;
(void) fi;
DirEntry *entry = RepoFS::root.get();
if (std::string(path) != "/")
try {
if (std::string(path) != "/") try {
entry = getf(path);
} catch (...) { return -ENOENT; }
@@ -77,42 +73,35 @@ static int rfsReaddir(const char *path, void *buf, fuse_fill_dir_t filler,
static int rfsOpen(const char *path, struct fuse_file_info *fi) {
DirEntry *entry = RepoFS::root.get();
if (std::string(path) != "/")
try {
if (std::string(path) != "/") try {
entry = getf(path);
} catch (...) { return -ENOENT; }
if ((fi->flags & 3) != O_RDONLY)
return -EACCES;
if ((fi->flags & 3) != O_RDONLY) return -EACCES;
return 0;
}
static int rfsRead(const char *path, char *buf, size_t size, off_t offset,
struct fuse_file_info *fi) {
static int rfsRead(const char *path, char *buf, size_t size, off_t offset, struct fuse_file_info *fi) {
size_t len;
(void) fi;
DirEntry *entry = RepoFS::root.get();
if (std::string(path) != "/")
try {
if (std::string(path) != "/") try {
entry = getf(path);
} catch (...) { return -ENOENT; }
len = entry->file->bytes;
if (offset < len) {
if (offset + size > len)
size = len - offset;
if (offset + size > len) size = len - offset;
auto curchunk = entry->file->chunks.upper_bound(offset);
if (curchunk == entry->file->chunks.begin()) {
std::abort();
}
if (curchunk == entry->file->chunks.begin()) { std::abort(); }
--curchunk;
size_t curInBuf = 0;
size_t curInChunk = offset - curchunk->first;
while (curInBuf < size) {
auto chunk = Serialize::deserialize<Chunk>(RepoFS::repo->getObject(curchunk->second));
auto chunk = Serialize::deserialize<Chunk>(RepoFS::repo->getObjectRaw(curchunk->second));
size_t read = std::min((size_t) chunk.length - curInChunk, size - curInBuf);
memcpy(buf + curInBuf, chunk.data.data() + curInChunk, read);
curInBuf += read;
@@ -127,13 +116,12 @@ static int rfsRead(const char *path, char *buf, size_t size, off_t offset,
static int rfsReadlink(const char *path, char *buf, size_t size) {
DirEntry *entry = RepoFS::root.get();
if (std::string(path) != "/")
try {
if (std::string(path) != "/") try {
entry = getf(path);
} catch (...) { return -ENOENT; }
if (entry->file->fileType != File::Type::Symlink) return -ENOENT;
auto dst = Serialize::deserialize<Chunk>(RepoFS::repo->getObject(entry->file->chunks.at(0)));
auto dst = Serialize::deserialize<Chunk>(RepoFS::repo->getObjectRaw(entry->file->chunks.at(0)));
strncpy(buf, dst.data.data(), std::min(dst.data.size(), size));
return 0;
@@ -151,22 +139,21 @@ void RepoFS::start(Repository *repo, std::string path) {
RepoFS::repo = repo;
auto ars = repo->getObjects(Object::ObjectType::Archive);
for (auto const &r: ars) {
auto a = Serialize::deserialize<Archive>(repo->getObject(r.second));
auto a = Serialize::deserialize<Archive>(repo->getObjectRaw(r.second));
for (auto const &f: a.files) {
auto file = Serialize::deserialize<File>(repo->getObject(f));
auto path = std::filesystem::u8path(file.name);
auto file = Serialize::deserialize<File>(repo->getObjectRaw(f));
auto path = std::filesystem::path(file.name);
DirEntry *entry = root->children[std::to_string(a.id)].get()
? root->children[std::to_string(a.id)].get()
: (root->children[std::to_string(a.id)] = std::make_unique<DirEntry>()).get();
entry->isFakeDir = true;
entry->name = std::to_string(a.id);
for (auto const &subp: path) {
entry = entry->children[subp].get()
? entry->children[subp].get()
entry = entry->children[subp].get() ? entry->children[subp].get()
: (entry->children[subp] = std::make_unique<DirEntry>()).get();
}
entry->file.emplace(file);
entry->name = std::filesystem::u8path(file.name).filename().u8string();
entry->name = std::filesystem::path(file.name).filename().string();
}
}

View File

@@ -3,6 +3,7 @@
#include "BytesFormatter.h"
#include "Command.h"
#include "CommandDelete.h"
#include "CommandDiff.h"
#include "CommandList.h"
#include "CommandListFiles.h"
@@ -36,8 +37,7 @@ Config getConf(int argc, char *argv[]) {
int help() {
for (auto const &o: Config::keys) {
std::cout << "--" << o.first << " <" << Config::KeyTypeToStr.at(o.second.type) << ">" << std::endl;
if (o.second.defaultval.has_value())
std::cout << " Default: " << o.second.defaultval.value() << std::endl;
if (o.second.defaultval.has_value()) std::cout << " Default: " << o.second.defaultval.value() << std::endl;
std::cout << " Is saved in repository: " << (o.second.remember ? "yes" : "no") << std::endl;
std::cout << " Info: " << o.second.info << std::endl;
}
@@ -80,23 +80,18 @@ int main(int argc, char *argv[]) {
}
std::string opt = argv[1];
if (opt == "help") {
return help();
}
if (opt == "help") { return help(); }
Config conf;
try {
conf = getConf(argc - 2, argv + 2);
} catch (std::exception &e) {
std::cerr << "Error reading config!" << std::endl
<< e.what() << std::endl;
std::cerr << "Error reading config!" << std::endl << e.what() << std::endl;
return -1;
}
if (opt == "init") {
return init(conf);
}
if (opt == "init") { return init(conf); }
auto repo = openRepo(conf);
@@ -115,6 +110,7 @@ int main(int argc, char *argv[]) {
commands.emplace(CommandListFiles::name, std::make_unique<CommandListFiles>());
commands.emplace(CommandList::name, std::make_unique<CommandList>());
commands.emplace(CommandMount::name, std::make_unique<CommandMount>());
commands.emplace(CommandDelete::name, std::make_unique<CommandDelete>());
if (commands.count(opt) == 0) {
std::cerr << "Unknown argument" << std::endl;
@@ -122,10 +118,7 @@ int main(int argc, char *argv[]) {
} else {
commands.at(opt)->run(ctx);
}
} catch (std::exception &e) {
std::cerr << "Error!" << std::endl
<< e.what() << std::endl;
} catch (...) {
} catch (std::exception &e) { std::cerr << "Error!" << std::endl << e.what() << std::endl; } catch (...) {
std::cerr << "Something very bad happened!" << std::endl;
}
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_library(repo srcs/FileRepository.cpp srcs/Object.cpp srcs/Repository.cpp srcs/objects/Archive.cpp srcs/objects/Chunk.cpp srcs/objects/File.cpp srcs/objects/FileBuffer.cpp)

View File

@@ -35,17 +35,14 @@ public:
bool init() override;
bool flush() override;
std::vector<char> getObject(Object::idType id) const override;
std::vector<char> getObjectRaw(Object::idType id) const override;
bool putObject(const Object &obj) override;
bool deleteObject(const Object &obj) override;
bool deleteObjects(const std::vector<Object::idType> &objs) override;
std::vector<char> getObject(Object::ObjectType type, const std::string &key) const override;
std::vector<char> getObjectRaw(Object::ObjectType type, const std::string &key) const override;
Object::idType getObjectId(Object::ObjectType type, const std::string &key) const override;
std::vector<std::pair<std::string, Object::idType>> getObjects(Object::ObjectType type) const override;
bool clearCache(Object::ObjectType type) override;
bool addToCache(const Object &obj) override;
bool exists(Object::ObjectType type, const std::string &key) const override;
Object::idType getId() override;
@@ -75,7 +72,8 @@ private:
/// \param size Amount of bytes to read (no more than absoluteMaxFileLimit)
/// \return Vector of bytes of the file
/// \throws Exception on any error, or when absoluteMaxFileLimit is reached
std::vector<char> readFile(const std::filesystem::path &file, unsigned long long offset, unsigned long long size) const;
std::vector<char> readFile(const std::filesystem::path &file, unsigned long long offset,
unsigned long long size) const;
static constexpr unsigned long long absoluteMaxFileLimit{4ULL * 1024 * 1024 * 1024};///<Max file read size (4GB)
/// Writes \p data to \p file
@@ -106,11 +104,14 @@ private:
unsigned long long maxFileId = 1; ///< Largest ID of object storage file
std::unordered_map<Object::idType, OffsetEntry> offsetIndex;///< Used to locate Object%s in the filesystem
std::unordered_map<Object::idType, std::set<Object::idType>>
fileToObjs;///< Used to locate Object%s in the filesystem
std::mutex writeCacheLock; ///< Write cache lock
std::map<Object::idType, std::vector<char>> writeCache;///< Write cache, map of Object ids and their serialized data
unsigned long long writeCacheSize = 0; ///< Current byte size of the write cache
const unsigned long long writeCacheMax; ///< Target size of the write cache, it is automatically flushed after this is reached
const unsigned long long
writeCacheMax;///< Target size of the write cache, it is automatically flushed after this is reached
/// Flushes the write cache
/// Takes the cache lock, swaps the cache with an empty one and unlocks it
@@ -118,7 +119,11 @@ private:
void flushWriteCache(std::unique_lock<std::mutex> &&lockW);
Object::idType largestUnusedId = 1; ///< Largest available objectID
std::unordered_map<Object::ObjectType, std::unordered_map<std::string, Object::idType>> keyIndex;///< Maps Object%'s keys to their ID's
std::vector<Object::idType> unusedIds;///< Vector of unused IDs
std::unordered_map<Object::ObjectType, std::unordered_map<std::string, Object::idType>>
keyIndex;///< Maps Object%'s keys to their ID's
std::unordered_map<Object::idType, uint64_t> refCounts;///< Count of references to an object per its id
};

View File

@@ -17,12 +17,7 @@ class Object {
public:
using idType = uint64_t;///< Type alias for Object%'s ID
enum class ObjectType {
Archive,
File,
Chunk,
END
};
enum class ObjectType { Archive, File, Chunk, END };
/// Serializes the object to \p out
virtual void serialize(std::vector<char> &out) const;
@@ -36,9 +31,16 @@ public:
/// All derived objects should implement this method
virtual std::string getKey() const = 0;
/// Returns the keys of that this object refers to
virtual const std::vector<idType> &getRefs() const;
const idType id; ///< Unique numerical of the object
const ObjectType type;///< Type of the object
static std::unique_ptr<Object> deserialize(const std::vector<char> &src);
static std::unique_ptr<Object> deserialize(std::vector<char>::const_iterator &in,
const std::vector<char>::const_iterator &end);
protected:
/// Default constructor
/// \param id Object ID

View File

@@ -45,7 +45,13 @@ public:
/// \param id ID of object to return
/// \return Serialized object
/// \throws Exception on any error or if object doesn't exist
virtual std::vector<char> getObject(Object::idType id) const = 0;
virtual std::vector<char> getObjectRaw(Object::idType id) const = 0;
/// Returns the Object with id \p id
/// \param id ID of object to return
/// \return Serialized object
/// \throws Exception on any error or if object doesn't exist
std::unique_ptr<Object> getObject(Object::idType id) const;
/// Adds the Object \p obj to the Repository
/// \param obj Constant reference to the object
@@ -54,17 +60,17 @@ public:
virtual bool putObject(const Object &obj) = 0;
/// Deletes Object \p obj from the Repository
/// \param obj Constant reference to the object
/// \param obj Constant reference to the vector with ids of objects to delete
/// \return True if successful, False if it didn't exist
/// \throws Exception on any error
virtual bool deleteObject(const Object &obj) = 0;
virtual bool deleteObjects(const std::vector<Object::idType> &objs) = 0;
/// Returns the Object of type \p type and with key \p key
/// \param type Type of the object
/// \param key Constant reference to the key of the object
/// \return Serialized object
/// \throws Exception on any error or if object doesn't exist
virtual std::vector<char> getObject(Object::ObjectType type, const std::string &key) const = 0;
virtual std::vector<char> getObjectRaw(Object::ObjectType type, const std::string &key) const = 0;
/// Returns the id of an Object of type \p type and with key \p key
/// \param type Type of the object
@@ -86,17 +92,6 @@ public:
/// \throws Exception on any error
virtual bool exists(Object::ObjectType type, const std::string &key) const = 0;
/// Erases all the cache entries of object type \p type
/// \param type Type of the objects
/// \return True
virtual bool clearCache(Object::ObjectType type) = 0;
/// Adds the object to the cache, but doesn't change it on disk otherwise
/// \param obj Constant reference to the object
/// \return True
/// \throws Exception on any error, or if the object doesn't exist
virtual bool addToCache(const Object &obj) = 0;
/// Returns the next available object id
virtual Object::idType getId() = 0;

View File

@@ -10,7 +10,7 @@
#include "../Object.h"
/// Object representing a backup
class Archive : public Object {
class Archive final : public Object {
public:
Archive(Object::idType id, std::string name, unsigned long long mtime, std::vector<idType> files);
@@ -22,6 +22,9 @@ public:
/// Returns the name of the archive
std::string getKey() const override;
/// Returns the files in this archive
const std::vector<Object::idType> &getRefs() const override;
const std::string name; ///< Archive name
const unsigned long long mtime; ///< Time of creation
const std::vector<idType> files;///< List of ids of File objects in the Archive

View File

@@ -11,7 +11,7 @@
#include "../Object.h"
/// Object representing a part of a File
class Chunk : public Object {
class Chunk final : public Object {
public:
Chunk(idType id, std::string, std::vector<char> data);

View File

@@ -15,18 +15,16 @@
#include "../Object.h"
/// Object representing a saved file
class File : public Object {
class File final : public Object {
public:
enum class Type {
Normal,
Symlink,
Directory,
END
};
enum class Type { Normal, Symlink, Directory, END };
static inline const std::unordered_map<Type, std::string> TypeToStr{{Type::Normal, "normal"}, {Type::Symlink, "symlink"}, {Type::Directory, "directory"}};
static inline const std::unordered_map<Type, std::string> TypeToStr{{Type::Normal, "normal"},
{Type::Symlink, "symlink"},
{Type::Directory, "directory"}};
File(Object::idType id, std::string name, unsigned long long bytes, unsigned long long mtime, std::string SHA, std::map<size_t, idType> chunks, Type fileType);
File(Object::idType id, std::string name, unsigned long long bytes, unsigned long long mtime, std::string SHA,
std::map<size_t, idType> chunks, Type fileType);
/// Deserialization constructor
File(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end);
@@ -70,6 +68,12 @@ public:
/// List of the chunks in file
/// Normal file has normal chunks as its contents, for Directory it's empty, Symlink has a chunk with its target path
const std::map<size_t, idType> chunks;
const std::vector<idType> &getRefs() const override;
private:
void makeChunksList() const;
mutable std::optional<std::vector<idType>> chunksList{std::nullopt};
};

View File

@@ -5,19 +5,21 @@
#include "FileRepository.h"
#include <exception>
#include <iostream>
#include <iterator>
#include <mutex>
#include <queue>
#include "CheckFilter.h"
#include "FilterFactory.h"
#include "Object.h"
#include "Serialize.h"
FileRepository::FileRepository(Config config) : Repository(std::move(config)), root(std::filesystem::path(this->config.getStr("repo"))), writeCacheMax(config.getInt("repo-target") * 1024 * 1024) {}
FileRepository::FileRepository(Config config)
: Repository(std::move(config)), root(std::filesystem::path(this->config.getStr("repo"))),
writeCacheMax(this->config.getInt("repo-target") * 1024 * 1024) {}
bool FileRepository::exists() {
return std::filesystem::is_directory(root) && std::filesystem::exists(root / "info");
}
bool FileRepository::exists() { return std::filesystem::is_directory(root) && std::filesystem::exists(root / "info"); }
bool FileRepository::flush() {
flushWriteCache(std::unique_lock(writeCacheLock));
@@ -31,14 +33,23 @@ bool FileRepository::open() {
std::swap(config, readConf);
config.merge(readConf);
if (config.getStr("compression") != "none") filters.addFilter(FilterFactory::makeFilter(config.getStr("compression"), config));
if (config.getStr("encryption") != "none") filters.addFilter(FilterFactory::makeFilter(config.getStr("encryption"), config));
if (config.getStr("compression") != "none")
filters.addFilter(FilterFactory::makeFilter(config.getStr("compression"), config));
if (config.getStr("encryption") != "none")
filters.addFilter(FilterFactory::makeFilter(config.getStr("encryption"), config));
filters.addFilter(FilterFactory::makeFilter("crc", config));
ready = true;
try {
std::tie(maxFileId, offsetIndex) = Serialize::deserialize<std::pair<decltype(maxFileId), decltype(offsetIndex)>>(filters.filterRead(readFile(root / "offsets")));
std::tie(keyIndex, largestUnusedId) = Serialize::deserialize<std::pair<decltype(keyIndex), decltype(largestUnusedId)>>(filters.filterRead(readFile(root / "index")));
std::tie(maxFileId, offsetIndex) =
Serialize::deserialize<std::pair<decltype(maxFileId), decltype(offsetIndex)>>(
filters.filterRead(readFile(root / "offsets")));
std::tie(keyIndex, largestUnusedId) =
Serialize::deserialize<std::pair<decltype(keyIndex), decltype(largestUnusedId)>>(
filters.filterRead(readFile(root / "index")));
refCounts = Serialize::deserialize<decltype(refCounts)>(filters.filterRead(readFile(root / "refcounts")));
unusedIds = Serialize::deserialize<decltype(unusedIds)>(filters.filterRead(readFile(root / "unusedIds")));
fileToObjs = Serialize::deserialize<decltype(fileToObjs)>(filters.filterRead(readFile(root / "fileToObjs")));
} catch (const std::exception &e) {
ready = false;
throw;
@@ -52,12 +63,14 @@ bool FileRepository::init() {
if (exists()) throw Exception("Trying to initialize already existing repository!");
if (!std::filesystem::is_directory(root) && !std::filesystem::create_directories(root))
throw Exception("Can't create directory " + root.u8string());
throw Exception("Can't create directory " + root.string());
writeFile(root / "info", CheckFilter::filterWriteStatic(Serialize::serialize(config)));
if (config.getStr("compression") != "none") filters.addFilter(FilterFactory::makeFilter(config.getStr("compression"), config));
if (config.getStr("encryption") != "none") filters.addFilter(FilterFactory::makeFilter(config.getStr("encryption"), config));
if (config.getStr("compression") != "none")
filters.addFilter(FilterFactory::makeFilter(config.getStr("compression"), config));
if (config.getStr("encryption") != "none")
filters.addFilter(FilterFactory::makeFilter(config.getStr("encryption"), config));
filters.addFilter(FilterFactory::makeFilter("crc", config));
ready = true;
@@ -71,15 +84,17 @@ FileRepository::~FileRepository() {
writeFile(root / "offsets", filters.filterWrite(Serialize::serialize(std::make_pair(maxFileId, offsetIndex))));
writeFile(root / "index", filters.filterWrite(Serialize::serialize(std::make_pair(keyIndex, largestUnusedId))));
writeFile(root / "unusedIds", filters.filterWrite(Serialize::serialize(unusedIds)));
writeFile(root / "refcounts", filters.filterWrite(Serialize::serialize(refCounts)));
writeFile(root / "fileToObjs", filters.filterWrite(Serialize::serialize(fileToObjs)));
}
}
std::vector<char> FileRepository::getObject(Object::idType id) const {
std::vector<char> FileRepository::getObjectRaw(Object::idType id) const {
if (!ready) throw Exception("Tried working with uninitialized repo!");
std::unique_lock lock(repoLock);
if (offsetIndex.count(id) == 0)
throw Exception("Object with id " + std::to_string(id) + " doesn't exist!");
if (offsetIndex.count(id) == 0) throw Exception("Object with id " + std::to_string(id) + " doesn't exist!");
auto entry = offsetIndex.at(id);
lock.unlock();
@@ -95,9 +110,7 @@ bool FileRepository::writeObject(const Object &obj) {
writeCache[obj.id] = std::move(tmp);
// If we have reached the target file size, flush the cache
if (writeCacheSize >= writeCacheMax) {
flushWriteCache(std::move(lockW));
}
if (writeCacheSize >= writeCacheMax) { flushWriteCache(std::move(lockW)); }
}
return true;
}
@@ -128,6 +141,7 @@ void FileRepository::flushWriteCache(std::unique_lock<std::mutex> &&lockW) {
{
std::lock_guard lockI(repoLock);
offsetIndex.emplace(i.first, OffsetEntry(currentFileId, offset, i.second.size()));
fileToObjs[currentFileId].emplace(i.first);
}
offset += i.second.size();
ofstream.rdbuf()->sputn(i.second.data(), i.second.size());
@@ -139,34 +153,103 @@ bool FileRepository::putObject(const Object &obj) {
{
std::lock_guard lock(repoLock);
keyIndex[obj.type][obj.getKey()] = obj.id;
for (auto const &i: obj.getRefs()) refCounts[i]++;
}
writeObject(obj);
return true;
}
bool FileRepository::deleteObject(const Object &obj) {
bool FileRepository::deleteObjects(const std::vector<Object::idType> &objs) {
if (!ready) throw Exception("Tried working with uninitialized repo!");
throw Exception("Deletion not implemented!");
std::queue<Object::idType> toVisit;
std::set<Object::idType> toDelete;
for (auto const &o: objs) {
toVisit.emplace(o);
toDelete.emplace(o);
}
std::cout << "Scanning for objects" << std::endl;
while (!toVisit.empty()) {
auto o = toVisit.back();
toVisit.pop();
auto obj = getObject(o);
for (const auto &id: obj->getRefs()) {
std::unique_lock lock(repoLock);
refCounts[id]--;
if (refCounts.at(id) == 0) {
toDelete.emplace(id);
toVisit.emplace(id);
refCounts.erase(id);
}
}
}
std::cout << "Found " << toDelete.size() << " to delete " << std::endl;
std::unordered_map<uint64_t, Object::idType> fileToObj;
std::set<uint64_t> touchedFiles;
for (auto const &id: toDelete) {
fileToObj.emplace(offsetIndex.at(id).fileId, id);
touchedFiles.emplace(offsetIndex.at(id).fileId);
}
std::cout << "Will rewrite " << touchedFiles.size() << " files" << std::endl;
for (auto const &f: touchedFiles) {
std::cout << "Rewriting file " << f << std::endl;
const auto &objs = fileToObjs.at(f);
std::vector<std::unique_ptr<Object>> objects;
for (auto const &o: objs) {
auto obj = getObject(o);
{
std::unique_lock lock(repoLock);
offsetIndex.erase(o);
}
if (toDelete.find(o) == toDelete.end()) putObject(*obj);
}
{
std::unique_lock lock(repoLock);
fileToObjs.erase(f);
}
std::filesystem::remove(root / std::to_string(f));
}
{
std::unique_lock lock(repoLock);
for (auto const &id: toDelete) {
unusedIds.emplace_back(id);
// FIXME: this is a bit inefficient
for (auto &m: keyIndex) erase_if(m.second, [&](const auto &t) { return toDelete.contains(t.second); });
}
}
return true;
}
std::vector<char> FileRepository::readFile(const std::filesystem::path &file, unsigned long long offset, unsigned long long size) const {
if (size > absoluteMaxFileLimit) throw Exception("Tried to read " + std::to_string(size) +
" bytes from " + file.u8string() +
std::vector<char> FileRepository::readFile(const std::filesystem::path &file, unsigned long long offset,
unsigned long long size) const {
if (size > absoluteMaxFileLimit)
throw Exception("Tried to read " + std::to_string(size) + " bytes from " + file.string() +
" which is more than absoluteMaxFileLimit");
std::ifstream ifstream(file, std::ios::binary | std::ios::in);
if (!ifstream.is_open()) throw Exception("Can't open file " + file.u8string() + " for reading!");
if (!ifstream.is_open()) throw Exception("Can't open file " + file.string() + " for reading!");
std::vector<char> buf(size);
if (ifstream.rdbuf()->pubseekpos(offset) == std::streampos(std::streamoff(-1))) throw Exception("Unexpected end of file " + file.u8string());
if (ifstream.rdbuf()->sgetn(buf.data(), size) != size) throw Exception("Unexpected end of file " + file.u8string());
if (ifstream.rdbuf()->pubseekpos(offset) == std::streampos(std::streamoff(-1)))
throw Exception("Unexpected end of file " + file.string());
if (ifstream.rdbuf()->sgetn(buf.data(), size) != size) throw Exception("Unexpected end of file " + file.string());
return buf;
}
std::vector<char> FileRepository::readFile(const std::filesystem::path &file) const {
if (!std::filesystem::is_regular_file(file)) throw Exception("File " + file.u8string() + " is not a regular file!");
if (!std::filesystem::is_regular_file(file)) throw Exception("File " + file.string() + " is not a regular file!");
auto fileSize = std::filesystem::file_size(file);
if (fileSize == 0) return {};
return readFile(file, 0, fileSize);
@@ -174,15 +257,15 @@ std::vector<char> FileRepository::readFile(const std::filesystem::path &file) co
bool FileRepository::writeFile(const std::filesystem::path &file, const std::vector<char> &data) {
std::ofstream ofstream(file, std::ios::binary | std::ios::trunc | std::ios::out);
if (!ofstream.is_open()) throw Exception("Can't open file " + file.u8string() + " for writing!");
if (!ofstream.is_open()) throw Exception("Can't open file " + file.string() + " for writing!");
if (ofstream.rdbuf()->sputn(data.data(), data.size()) != data.size())
throw Exception("Couldn't write all the data for " + file.u8string());
throw Exception("Couldn't write all the data for " + file.string());
return true;
}
std::vector<char> FileRepository::getObject(Object::ObjectType type, const std::string &key) const {
return getObject(getObjectId(type, key));
std::vector<char> FileRepository::getObjectRaw(Object::ObjectType type, const std::string &key) const {
return getObjectRaw(getObjectId(type, key));
}
Object::idType FileRepository::getObjectId(Object::ObjectType type, const std::string &key) const {
@@ -195,8 +278,7 @@ std::vector<std::pair<std::string, Object::idType>> FileRepository::getObjects(O
std::lock_guard lock(repoLock);
std::vector<std::pair<std::string, Object::idType>> out;
if (keyIndex.count(type) == 0) return {};
for (auto const &i: keyIndex.at(type))
out.emplace_back(i);
for (auto const &i: keyIndex.at(type)) out.emplace_back(i);
return out;
}
@@ -208,14 +290,19 @@ bool FileRepository::exists(Object::ObjectType type, const std::string &key) con
Object::idType FileRepository::getId() {
std::lock_guard lock(repoLock);
if (!unusedIds.empty()) {
auto ret = unusedIds.back();
unusedIds.pop_back();
return ret;
}
return largestUnusedId++;
}
FileRepository::OffsetEntry::OffsetEntry(std::vector<char, std::allocator<char>>::const_iterator &in, const std::vector<char, std::allocator<char>>::const_iterator &end)
FileRepository::OffsetEntry::OffsetEntry(std::vector<char, std::allocator<char>>::const_iterator &in,
const std::vector<char, std::allocator<char>>::const_iterator &end)
: fileId(Serialize::deserialize<decltype(fileId)>(in, end)),
offset(Serialize::deserialize<decltype(offset)>(in, end)),
length(Serialize::deserialize<decltype(length)>(in, end)) {
}
length(Serialize::deserialize<decltype(length)>(in, end)) {}
void FileRepository::OffsetEntry::serialize(std::vector<char> &out) const {
Serialize::serialize(fileId, out);
@@ -223,24 +310,6 @@ void FileRepository::OffsetEntry::serialize(std::vector<char> &out) const {
Serialize::serialize(length, out);
}
FileRepository::OffsetEntry::OffsetEntry(unsigned long long int fileId, unsigned long long int offset, unsigned long long int length)
FileRepository::OffsetEntry::OffsetEntry(unsigned long long int fileId, unsigned long long int offset,
unsigned long long int length)
: fileId(fileId), offset(offset), length(length) {}
bool FileRepository::clearCache(Object::ObjectType type) {
keyIndex[type] = {};
return true;
}
bool FileRepository::addToCache(const Object &obj) {
{
std::unique_lock lock(repoLock);
if (offsetIndex.count(obj.id) == 0)
throw Exception("Object with id " + std::to_string(obj.id) + " doesn't exist!");
}
{
std::lock_guard lock(repoLock);
keyIndex[obj.type][obj.getKey()] = obj.id;
}
return true;
}

View File

@@ -6,12 +6,14 @@
#include "Serialize.h"
#include "objects/Archive.h"
#include "objects/Chunk.h"
#include "objects/File.h"
Object::Object(idType id, ObjectType type) : id(id), type(type) {}
Object::Object(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end)
: id(Serialize::deserialize<idType>(in, end)),
type(Serialize::deserialize<ObjectType>(in, end)) {
}
: id(Serialize::deserialize<idType>(in, end)), type(Serialize::deserialize<ObjectType>(in, end)) {}
void Object::serialize(std::vector<char> &out) const {
Serialize::serialize(id, out);
@@ -19,3 +21,32 @@ void Object::serialize(std::vector<char> &out) const {
}
Object::~Object() = default;
static std::vector<Object::idType> emptyRef{};
const std::vector<Object::idType> &Object::getRefs() const { return emptyRef; }
std::unique_ptr<Object> Object::deserialize(std::vector<char>::const_iterator &in,
const std::vector<char>::const_iterator &end) {
auto inCpy = in;
auto id = Serialize::deserialize<idType>(in, end);
auto type = Serialize::deserialize<ObjectType>(in, end);
switch (type) {
case ObjectType::Archive:
return std::make_unique<Archive>(Serialize::deserialize<Archive>(inCpy, end));
case ObjectType::File:
return std::make_unique<File>(Serialize::deserialize<File>(inCpy, end));
case ObjectType::Chunk:
return std::make_unique<Chunk>(Serialize::deserialize<Chunk>(inCpy, end));
case ObjectType::END:
break;
default:
throw Exception("Bad object!");
}
}
std::unique_ptr<Object> Object::deserialize(const std::vector<char> &src) {
auto srcIterator = src.cbegin();
return deserialize(srcIterator, src.end());
}

View File

@@ -7,6 +7,8 @@ Repository::~Repository() = default;
Repository::Repository(Config config) : config(std::move(config)) {}
const Config &Repository::getConfig() const {
return config;
const Config &Repository::getConfig() const { return config; }
std::unique_ptr<Object> Repository::getObject(Object::idType id) const {
return Object::deserialize(this->getObjectRaw(id));
}

View File

@@ -11,8 +11,7 @@ Archive::Archive(Object::idType id, std::string name, unsigned long long mtime,
: Object(id, ObjectType::Archive), name(name), mtime(mtime), files(files) {}
Archive::Archive(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end)
: Object(in, end),
name(Serialize::deserialize<std::string>(in, end)),
: Object(in, end), name(Serialize::deserialize<std::string>(in, end)),
mtime(Serialize::deserialize<unsigned long long>(in, end)),
files(Serialize::deserialize<std::remove_const<decltype(files)>::type>(in, end)) {
if (type != ObjectType::Archive) throw Exception("Type mismatch for Archive!");
@@ -28,6 +27,6 @@ void Archive::serialize(std::vector<char> &out) const {
Serialize::serialize(files.size(), out);
}
std::string Archive::getKey() const {
return name;
}
std::string Archive::getKey() const { return name; }
const std::vector<Object::idType> &Archive::getRefs() const { return files; }

View File

@@ -7,11 +7,11 @@
#include "Exception.h"
#include "Serialize.h"
Chunk::Chunk(idType id, std::string SHA, std::vector<char> data) : Object(id, ObjectType::Chunk), data(std::move(data)), SHA(std::move(SHA)), length(this->data.size()) {}
Chunk::Chunk(idType id, std::string SHA, std::vector<char> data)
: Object(id, ObjectType::Chunk), data(std::move(data)), SHA(std::move(SHA)), length(this->data.size()) {}
Chunk::Chunk(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end)
: Object(in, end),
SHA(Serialize::deserialize<std::remove_const<decltype(SHA)>::type>(in, end)),
: Object(in, end), SHA(Serialize::deserialize<std::remove_const<decltype(SHA)>::type>(in, end)),
data(Serialize::deserialize<std::remove_const<decltype(data)>::type>(in, end)),
length(Serialize::deserialize<std::remove_const<decltype(length)>::type>(in, end)) {
if (type != ObjectType::Chunk) throw Exception("Type mismatch for Chunk!");
@@ -25,6 +25,4 @@ void Chunk::serialize(std::vector<char> &out) const {
Serialize::serialize(length, out);
}
std::string Chunk::getKey() const {
return SHA;
}
std::string Chunk::getKey() const { return SHA; }

View File

@@ -11,12 +11,13 @@
#include "Exception.h"
#include "Serialize.h"
File::File(Object::idType id, std::string name, unsigned long long bytes, unsigned long long mtime, std::string SHA, std::map<size_t, idType> chunks, Type fileType)
: Object(id, ObjectType::File), name(name), bytes(bytes), mtime(mtime), SHA(SHA), fileType(fileType), chunks(std::move(chunks)) {}
File::File(Object::idType id, std::string name, unsigned long long bytes, unsigned long long mtime, std::string SHA,
std::map<size_t, idType> chunks, Type fileType)
: Object(id, ObjectType::File), name(name), bytes(bytes), mtime(mtime), SHA(SHA), fileType(fileType),
chunks(std::move(chunks)) {}
File::File(std::vector<char>::const_iterator &in, const std::vector<char>::const_iterator &end)
: Object(in, end),
name(Serialize::deserialize<std::string>(in, end)),
: Object(in, end), name(Serialize::deserialize<std::string>(in, end)),
bytes(Serialize::deserialize<unsigned long long>(in, end)),
mtime(Serialize::deserialize<unsigned long long>(in, end)),
SHA(Serialize::deserialize<std::remove_const<decltype(SHA)>::type>(in, end)),
@@ -35,47 +36,45 @@ void File::serialize(std::vector<char> &out) const {
Serialize::serialize(chunks, out);
}
std::string File::getKey() const {
return name;
}
std::string File::getKey() const { return name; }
File::Type File::getFileType(const std::filesystem::path &p) {
if (std::filesystem::is_symlink(p)) return Type::Symlink;
if (std::filesystem::is_directory(p)) return Type::Directory;
if (std::filesystem::is_regular_file(p)) return Type::Normal;
throw Exception("Unsupported file type! " + p.u8string());
throw Exception("Unsupported file type! " + p.string());
}
std::vector<char> File::getFileContents(const std::filesystem::path &p) {
auto type = getFileType(p);
if (type == Type::Normal) throw Exception(p.u8string() + " is a normal file!");
if (type == Type::Directory) {
return {};
}
if (type == Type::Normal) throw Exception(p.string() + " is a normal file!");
if (type == Type::Directory) { return {}; }
if (type == Type::Symlink) {
auto target = std::filesystem::read_symlink(p).u8string();
auto target = std::filesystem::read_symlink(p).string();
std::vector<char> target_null_term = {target.begin(), target.end()};
target_null_term.emplace_back('\0');
return target_null_term;
}
throw Exception("Error with file " + p.u8string());
throw Exception("Error with file " + p.string());
}
unsigned long long File::getFileMtime(const std::filesystem::path &p) {
auto type = getFileType(p);
if (type == Type::Normal || type == Type::Directory)
return static_cast<const unsigned long long int>(std::chrono::duration_cast<std::chrono::seconds>(std::filesystem::last_write_time(p).time_since_epoch()).count());
return static_cast<const unsigned long long int>(
std::chrono::duration_cast<std::chrono::seconds>(std::filesystem::last_write_time(p).time_since_epoch())
.count());
else if (type == Type::Symlink) {
auto path = p.u8string();
auto path = p.string();
struct stat sb;
if (lstat(path.c_str(), &sb) != 0) throw Exception("Error reading mtime for " + p.u8string());
if (lstat(path.c_str(), &sb) != 0) throw Exception("Error reading mtime for " + p.string());
#ifdef __APPLE__
return sb.st_mtimespec.tv_sec;
#else
return sb.st_mtime;
#endif
}
throw Exception("Error with file " + p.u8string());
throw Exception("Error with file " + p.string());
}
unsigned long long File::getFileSize(const std::filesystem::path &p) {
@@ -84,3 +83,16 @@ unsigned long long File::getFileSize(const std::filesystem::path &p) {
else
return getFileContents(p).size();
}
void File::makeChunksList() const {
if (chunksList) return;
chunksList.emplace();
chunksList->reserve(chunks.size());
for (auto const &c: chunks) chunksList->emplace_back(c.second);
}
const std::vector<Object::idType> &File::getRefs() const {
if (!chunksList) makeChunksList();
return *chunksList;
}

View File

@@ -6,13 +6,12 @@
#include "Serialize.h"
FileBuffer::FileBuffer(const Repository *repo, Object::idType fileId) : repo(repo), file(Serialize::deserialize<File>(repo->getObject(fileId))), chunksQueue() {
FileBuffer::FileBuffer(const Repository *repo, Object::idType fileId)
: repo(repo), file(Serialize::deserialize<File>(repo->getObjectRaw(fileId))), chunksQueue() {
for (auto const &id: file.chunks) chunksQueue.emplace(id.second);
};
int FileBuffer::sync() {
return 0;
}
int FileBuffer::sync() { return 0; }
std::streamsize FileBuffer::xsgetn(char *s, std::streamsize countr) {
if (underflow() == std::char_traits<char>::eof()) return 0;
@@ -28,8 +27,7 @@ std::streamsize FileBuffer::xsgetn(char *s, std::streamsize countr) {
int FileBuffer::uflow() {
auto out = underflow();
if (out != traits_type::eof())
curGetBufPos++;
if (out != traits_type::eof()) curGetBufPos++;
return out;
}
@@ -37,15 +35,14 @@ int FileBuffer::underflow() {
if (getBuf.empty() || curGetBufPos == getBuf.size()) {
if (chunksQueue.empty()) return traits_type::eof();
else {
auto chunk = Serialize::deserialize<Chunk>(repo->getObject(chunksQueue.front()));
auto chunk = Serialize::deserialize<Chunk>(repo->getObjectRaw(chunksQueue.front()));
getBuf = chunk.data;
chunksQueue.pop();
curGetBufPos = 0;
}
}
if (!getBuf.empty())
return traits_type::to_int_type(getBuf[curGetBufPos]);
if (!getBuf.empty()) return traits_type::to_int_type(getBuf[curGetBufPos]);
else
return traits_type::eof();
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_library(utils srcs/BytesFormatter.cpp srcs/Config.cpp srcs/Exception.cpp srcs/Logger.cpp srcs/Progress.cpp srcs/RunningAverage.cpp srcs/RunningDiffAverage.cpp srcs/Signals.cpp srcs/ThreadPool.cpp)

View File

@@ -64,11 +64,7 @@ public:
using serializable = std::true_type;
enum class KeyType {
STRING,
INT,
LIST
};
enum class KeyType { STRING, INT, LIST };
/// Struct to record key options
struct keyopts {
@@ -79,7 +75,9 @@ public:
};
/// Used for printing help
const static inline std::unordered_map<KeyType, std::string> KeyTypeToStr{{KeyType::STRING, "string"}, {KeyType::INT, "number"}, {KeyType::LIST, "comma-separated list"}};
const static inline std::unordered_map<KeyType, std::string> KeyTypeToStr{{KeyType::STRING, "string"},
{KeyType::INT, "number"},
{KeyType::LIST, "comma-separated list"}};
/// Default values and their metadata
const static inline std::unordered_map<std::string, keyopts> keys{
@@ -98,7 +96,9 @@ public:
{"chunker", {"buzhash", KeyType::STRING, true, "Chunker to use (const, buzhash)"}},
{"chunker-min", {"256", KeyType::INT, true, "Min chunk size in KB"}},
{"chunker-max", {"4096", KeyType::INT, true, "Max chunk size in KB"}},
{"chunker-mask", {"20", KeyType::INT, true, "Chunker hash bit mask (mask of n bits results in average chunk size of 2^n bytes)"}},
{"chunker-mask",
{"20", KeyType::INT, true,
"Chunker hash bit mask (mask of n bits results in average chunk size of 2^n bytes)"}},
{"repo-target", {"128", KeyType::INT, true, "Target size of files for FileRepository"}},
{"progress", {"pretty", KeyType::STRING, false, "How to print progress (simple, pretty, none)"}},
{"verbose", {"1", KeyType::INT, false, "Message verbosity (0 - error, 1 - info, -1 - quiet)"}},

View File

@@ -22,7 +22,9 @@ public:
/// \param out Function to call for output
/// \param format Format of the progress string, vector of strings or functions that return strings
/// \param conf Config, used to specify format (`pretty` for line rewriting, `simple` for normal line printing, or `none`)
Progress(std::function<void(std::string, int)> out, std::vector<std::variant<std::function<std::string()>, std::string>> format, const Config &conf, int level = 1);
Progress(std::function<void(std::string, int)> out,
std::vector<std::variant<std::function<std::string()>, std::string>> format, const Config &conf,
int level = 1);
Progress &operator=(Progress rhs) = delete;
Progress(const Progress &orig) = delete;

View File

@@ -40,7 +40,8 @@ namespace Serialize {
struct is_pair : std::false_type {};
template<typename P>
struct is_pair<P, std::void_t<decltype(std::declval<P>().first)>, std::void_t<decltype(std::declval<P>().second)>> : std::true_type {};
struct is_pair<P, std::void_t<decltype(std::declval<P>().first)>, std::void_t<decltype(std::declval<P>().second)>>
: std::true_type {};
template<typename, typename, typename = void>
struct has_emplace_back : std::false_type {};
@@ -104,22 +105,19 @@ namespace Serialize {
} else if constexpr (std::is_enum<T>::value) {
// If the object is an enum, deserialize an int and cast it to the enum
auto tmp = deserialize<uint32_t>(in, end);
if (tmp >= 0 && tmp < static_cast<uint32_t>(T::END))
return static_cast<T>(tmp);
if (tmp >= 0 && tmp < static_cast<uint32_t>(T::END)) return static_cast<T>(tmp);
else
throw Exception("Enum out of range!");
} else if constexpr (sizeof(T) == 1) {
// If it's a single byte, just copy it
if (std::distance(in, end) < sizeof(T))
throw Exception("Unexpected end of object!");
if (std::distance(in, end) < sizeof(T)) throw Exception("Unexpected end of object!");
return *(in++);
} else if constexpr (std::is_integral<T>::value) {
uint64_t tmp;
static_assert(sizeof(tmp) == 8);
// If the object is a number, copy it byte-by-byte
if (std::distance(in, end) < sizeof(tmp))
throw Exception("Unexpected end of object!");
if (std::distance(in, end) < sizeof(tmp)) throw Exception("Unexpected end of object!");
std::copy(in, in + sizeof(tmp), reinterpret_cast<char *>(&tmp));
in += sizeof(tmp);
@@ -134,8 +132,7 @@ namespace Serialize {
T out;
if constexpr (sizeof(typename T::value_type) == 1) {
// Optimization for char vectors
if (std::distance(in, end) < size)
throw Exception("Unexpected end of object!");
if (std::distance(in, end) < size) throw Exception("Unexpected end of object!");
out.insert(out.end(), in, in + size);
in += size;
} else
@@ -143,8 +140,7 @@ namespace Serialize {
using V = typename T::value_type;
V v = deserialize<V>(in, end);
// Try either emplace_back or emplace if it doesn't exist
if constexpr (has_emplace_back<T, V>::value)
out.emplace_back(std::move(v));
if constexpr (has_emplace_back<T, V>::value) out.emplace_back(std::move(v));
else
out.emplace(std::move(v));
}
@@ -175,7 +171,8 @@ namespace Serialize {
// If the object is a number, copy it byte-by-byte
uint64_t tmp = htobe64(static_cast<uint64_t>(what));
static_assert(sizeof(tmp) == 8);
out.insert(out.end(), (reinterpret_cast<const char *>(&tmp)), (reinterpret_cast<const char *>(&tmp) + sizeof(tmp)));
out.insert(out.end(), (reinterpret_cast<const char *>(&tmp)),
(reinterpret_cast<const char *>(&tmp) + sizeof(tmp)));
} else {
// Otherwise we treat it as a container, in format of <number of elements>b<elements>e
serialize(what.size(), out);
@@ -184,9 +181,7 @@ namespace Serialize {
// Optimization for char vectors
out.insert(out.end(), what.begin(), what.end());
} else
for (auto const &i: what) {
serialize(i, out);
}
for (auto const &i: what) { serialize(i, out); }
serialize('e', out);
}
}

View File

@@ -20,9 +20,7 @@ Config &Config::add(const std::string &k, const std::string &v) {
case KeyType::INT:
try {
std::stoi(v);
} catch (...) {
throw Exception("Can't convert " + k + " to integer!");
}
} catch (...) { throw Exception("Can't convert " + k + " to integer!"); }
break;
case KeyType::LIST:
break;
@@ -32,17 +30,14 @@ Config &Config::add(const std::string &k, const std::string &v) {
return *this;
}
int Config::getInt(const std::string &k) const {
return std::stoi(getStr(k));
}
int Config::getInt(const std::string &k) const { return std::stoi(getStr(k)); }
std::vector<std::string> Config::getList(const std::string &k) const {
std::vector<std::string> out;
std::string next;
std::stringstream inss(getStr(k));
while (std::getline(inss, next, ',')) {
if (next != "")
out.emplace_back(next);
if (next != "") out.emplace_back(next);
}
return out;
}
@@ -54,28 +49,23 @@ std::string Config::getStr(const std::string &k) const {
throw Exception("Option " + k + " not specified and no default value exists!");
}
bool Config::exists(const std::string &k) const {
return (data.count(k) > 0) || (keys.at(k).defaultval.has_value());
}
bool Config::exists(const std::string &k) const { return (data.count(k) > 0) || (keys.at(k).defaultval.has_value()); }
Config::Config() = default;
Config::Config(std::vector<char, std::allocator<char>>::const_iterator &in, const std::vector<char, std::allocator<char>>::const_iterator &end) {
Config::Config(std::vector<char, std::allocator<char>>::const_iterator &in,
const std::vector<char, std::allocator<char>>::const_iterator &end) {
data = Serialize::deserialize<decltype(data)>(in, end);
}
void Config::serialize(std::vector<char> &out) const {
std::vector<decltype(data)::value_type> temp;
for (const auto &d: data) {
if (keys.at(d.first).remember) {
temp.emplace_back(d);
}
if (keys.at(d.first).remember) { temp.emplace_back(d); }
}
Serialize::serialize(temp, out);
}
void Config::merge(const Config &config) {
for (const auto &d: config.data) {
add(d.first, d.second);
}
for (const auto &d: config.data) { add(d.first, d.second); }
}

View File

@@ -23,8 +23,7 @@ std::string Exception::getStacktrace() {
if (strings != nullptr) {
out << "Stacktrace:" << std::endl;
for (int i = 0; i < n; i++)
out << strings[i] << std::endl;
for (int i = 0; i < n; i++) out << strings[i] << std::endl;
}
free(strings);

View File

@@ -4,8 +4,7 @@
#include "Logger.h"
Logger::Logger(int level, std::ostream &out) : loglevel(level), out(out) {
}
Logger::Logger(int level, std::ostream &out) : loglevel(level), out(out) {}
void Logger::write(const std::string &what, int whatlevel) {
if (whatlevel <= loglevel) {
@@ -14,6 +13,4 @@ void Logger::write(const std::string &what, int whatlevel) {
}
}
void Logger::setLevel(int level) {
loglevel = level;
}
void Logger::setLevel(int level) { loglevel = level; }

View File

@@ -7,7 +7,10 @@
#include <sstream>
#include <utility>
Progress::Progress(std::function<void(std::string, int)> out, std::vector<std::variant<std::function<std::string()>, std::string>> format, const Config &conf, int level) : format(std::move(format)), out(std::move(out)), type(conf.getStr("progress")), progresslevel(level) {
Progress::Progress(std::function<void(std::string, int)> out,
std::vector<std::variant<std::function<std::string()>, std::string>> format, const Config &conf,
int level)
: format(std::move(format)), out(std::move(out)), type(conf.getStr("progress")), progresslevel(level) {
if (type != "none") {
this->out("\n\n", level);
thread = std::thread(&Progress::showProgress, this);
@@ -16,16 +19,13 @@ Progress::Progress(std::function<void(std::string, int)> out, std::vector<std::v
Progress::~Progress() {
stop = true;
if (thread.joinable())
thread.join();
if (thread.joinable()) thread.join();
}
void Progress::showProgress() {
while (!stop) {
std::this_thread::sleep_for(std::chrono::milliseconds(100));
{
update(std::unique_lock(refreshM));
}
{ update(std::unique_lock(refreshM)); }
}
}
@@ -38,8 +38,7 @@ void Progress::print(const std::string &s, int level) {
void Progress::update(std::unique_lock<std::mutex> &&lock) {
std::stringstream outs;
if (type == "pretty")
outs << "\r\33[2K ";
if (type == "pretty") outs << "\r\33[2K ";
for (auto const &l: format) {
if (std::holds_alternative<std::string>(l)) outs << std::get<std::string>(l);
@@ -47,8 +46,7 @@ void Progress::update(std::unique_lock<std::mutex> &&lock) {
outs << std::get<std::function<std::string()>>(l)();
}
if (type == "pretty")
outs << "\r";
if (type == "pretty") outs << "\r";
else
outs << "\n";

View File

@@ -5,8 +5,7 @@
#include "RunningAverage.h"
RunningAverage::RunningAverage(std::function<unsigned long long int()> getFunc, int max, int ms)
: getFunc(std::move(getFunc)), max(max), ms(ms), thread(&RunningAverage::loop, this) {
}
: getFunc(std::move(getFunc)), max(max), ms(ms), thread(&RunningAverage::loop, this) {}
void RunningAverage::loop() {
while (!stop) {

View File

@@ -12,9 +12,6 @@ RunningDiffAverage::RunningDiffAverage(std::function<unsigned long long int()> g
prev = cur;
return calc;
},
max, ms) {
}
max, ms) {}
unsigned long long RunningDiffAverage::get() {
return runningAverage.get();
}
unsigned long long RunningDiffAverage::get() { return runningAverage.get(); }

View File

@@ -3,10 +3,6 @@
//
#include "Signals.h"
void Signals::setup() {
signal(SIGINT, handle);
}
void Signals::setup() { signal(SIGINT, handle); }
void Signals::handle(int signum) {
shouldQuit = true;
}
void Signals::handle(int signum) { shouldQuit = true; }

View File

@@ -8,18 +8,17 @@ ThreadPool::ThreadPool(std::function<void(std::string)> onError, std::size_t wor
}
ThreadPool::~ThreadPool() {
{
std::lock_guard lock(queueLock);
stop = true;
somethingNew.notify_all();
for (auto &t: threads) {
t.join();
}
for (auto &t: threads) { t.join(); }
}
void ThreadPool::push(std::function<void()> &&func) {
{
std::lock_guard lock(queueLock);
queue.push(std::move(func));
}
somethingNew.notify_one();
}
@@ -45,19 +44,14 @@ void ThreadPool::loop() {
queue.pop();
qLock.unlock();
try {
task();
} catch (std::exception &e) {
onError(std::string(e.what()));
}
} catch (std::exception &e) { onError(std::string(e.what())); }
qLock.lock();
{
std::lock_guard qLock(queueLock);
running--;
if (queue.empty() && running == 0) { finished.notify_all(); }
}
}
}
bool ThreadPool::empty() {

View File

@@ -1,7 +1,7 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
# GoogleTest requires at least C++14
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD 20)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
if (CMAKE_VERSION VERSION_GREATER_EQUAL "3.24.0")
@@ -16,6 +16,7 @@ FetchContent_Declare(
)
# For Windows: Prevent overriding the parent project's compiler/linker settings
set(gtest_force_shared_crt ON CACHE BOOL "" FORCE)
set(INSTALL_GTEST OFF)
FetchContent_MakeAvailable(googletest)
include(GoogleTest)

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_executable(
BuzhashTest
@@ -8,5 +8,5 @@ target_link_libraries(
BuzhashTest PRIVATE
GTest::gtest_main chunkers
)
gtest_discover_tests(BuzhashTest)
gtest_discover_tests(BuzhashTest DISCOVERY_TIMEOUT 600)

View File

@@ -7,7 +7,12 @@
// Demonstrate some basic assertions.
TEST(BuzhashTest, SimpleTest) {
std::string loremipsum = "Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
std::string loremipsum =
"Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et "
"dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip "
"ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu "
"fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt "
"mollit anim id est laborum.";
for (int i = 15; i < 49; i++) {
Buzhash b(i);
@@ -21,11 +26,11 @@ TEST(BuzhashTest, SimpleTest) {
for (int i = 0; i < loremipsum.length(); i++) {
b2.feed((uint8_t) loremipsum[i]);
if (b2.get() == h1) {
EXPECT_EQ(i, loremipsum.find("e eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p"));
ASSERT_EQ(i, loremipsum.find("e eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non p"));
h1found = true;
break;
}
}
EXPECT_EQ(h1found, true);
ASSERT_EQ(h1found, true);
}
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_executable(
@@ -11,5 +11,5 @@ target_link_libraries(
CLITests PRIVATE
GTest::gtest_main testUtils
)
gtest_discover_tests(CLITests)
gtest_discover_tests(CLITests DISCOVERY_TIMEOUT 600)

View File

@@ -123,7 +123,7 @@ AIDS=()
OUT=$($CMD list --repo "$TESTDATADIR"/testdir/to1 --password asdff)
echo "$OUT"
mkdir "$TESTDATADIR"/testmount
mkdir -p "$TESTDATADIR"/testmount
$CMD mount --repo "$TESTDATADIR"/testdir/to1 --password asdff --to "$TESTDATADIR"/testmount &
while IFS= read -r l; do

View File

@@ -61,13 +61,13 @@ echo "testtestasdf9uuu" > "$TESTDATADIR"/testdata/4/filexd
echo "Data created"
if ! $CMD init --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --compression zlib --compression-level 4 --encryption aes --password asdff --salt e; then
if ! $CMD init --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --compression zlib --compression-level 4 --encryption aes --password asdff --salt e; then
echo "Error creating repo!"
exit 1
fi
echo "Repo created"
OUT=$($CMD run --from "$TESTDATADIR"/testdata/1 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress simple --verbose 1)
OUT=$($CMD run --from "$TESTDATADIR"/testdata/1 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress simple --verbose 1)
echo "$OUT"
if ! ( ( echo "$OUT" | grep -q 'Copied: notempty/testfile' )\
&& ( echo "$OUT" | grep -q 'Copied: notempty' )\
@@ -87,7 +87,7 @@ if ! ( ( echo "$OUT" | grep -q 'Copied: notempty/testfile' )\
fi
echo "Backup 1 ok"
OUT=$($CMD run --from "$TESTDATADIR"/testdata/2 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress simple --verbose 1)
OUT=$($CMD run --from "$TESTDATADIR"/testdata/2 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress simple --verbose 1)
echo "$OUT"
if ! ( ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4 ' )\
&& ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test5 ' )\
@@ -100,7 +100,7 @@ if ! ( ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4 ' )\
fi
echo "Backup 2 ok"
OUT=$($CMD run --from "$TESTDATADIR"/testdata/3 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress simple --verbose 1)
OUT=$($CMD run --from "$TESTDATADIR"/testdata/3 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress simple --verbose 1)
echo "$OUT"
if ! ( ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4 ' )\
&& ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test5 ' )\
@@ -116,7 +116,7 @@ echo "Backup 3 ok"
i=$((0))
AIDS=()
OUT=$($CMD list --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff)
OUT=$($CMD list --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff)
echo "$OUT"
while IFS= read -r l; do
((i++))
@@ -124,7 +124,7 @@ while IFS= read -r l; do
AIDS+=("$aid")
done <<< "$OUT"
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress none --verbose 1 --aid ${AIDS[0]} --aid2 ${AIDS[1]})
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress none --verbose 1 --aid ${AIDS[0]} --aid2 ${AIDS[1]})
echo "$OUT"
if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && ( echo "$OUT" | grep -q 'filexd is different' ) && ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test7 is different' ) \
&& ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4' ) && ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test5' ) && ! ( echo "$OUT" | grep -q 'notemptyi2/ignoredir/testa' ) ); then
@@ -132,7 +132,7 @@ if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && (
exit 1
fi
echo "OK comparing archive 1 and 2"
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress none --verbose 1 --aid ${AIDS[1]} --aid2 ${AIDS[2]})
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress none --verbose 1 --aid ${AIDS[1]} --aid2 ${AIDS[2]})
echo "$OUT"
if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && ( echo "$OUT" | grep -q 'filexd is different' ) && ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test7 is different' ) \
&& ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4' ) && ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test5' ) && ! ( echo "$OUT" | grep -q 'notemptyi2/ignoredir/testa' ) ); then
@@ -141,7 +141,7 @@ if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && (
fi
echo "OK comparing archive 2 and 3"
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress none --verbose 1 --aid ${AIDS[2]})
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress none --verbose 1 --aid ${AIDS[2]})
echo "$OUT"
if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && ( echo "$OUT" | grep -q 'filexd is different' ) && ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test7 is different' ) \
&& ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4' ) && ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test5' ) && ! ( echo "$OUT" | grep -q 'notemptyi2/ignoredir/testa' ) ); then
@@ -150,7 +150,7 @@ if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && (
fi
echo "OK comparing archive 3 and current"
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress none --verbose 1 )
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress none --verbose 1 )
echo "$OUT"
if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && ( echo "$OUT" | grep -q 'filexd is different' ) && ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test7 is different' ) \
&& ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4' ) && ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test5' ) && ! ( echo "$OUT" | grep -q 'notemptyi2/ignoredir/testa' ) ); then
@@ -159,7 +159,7 @@ if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && (
fi
echo "OK comparing archive last (3) and current"
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress none --verbose 1 --aid ${AIDS[2]} --prefix notempty2/notemptyi2 )
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress none --verbose 1 --aid ${AIDS[2]} --prefix notempty2/notemptyi2 )
echo "$OUT"
if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && ! ( echo "$OUT" | grep -q 'filexd is different' ) && ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test7 is different' ) \
&& ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4' ) && ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test5' ) && ! ( echo "$OUT" | grep -q 'notemptyi2/ignoredir/testa' ) ); then
@@ -168,7 +168,7 @@ if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && !
fi
echo "OK comparing archive 3 and current with prefix "
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$"$TESTDATADIR"/testdataDIR"/testdir/to1 --password asdff --progress none --verbose 1 --prefix notempty2/notemptyi2 )
OUT=$($CMD diff --from "$TESTDATADIR"/testdata/4 --repo "$TESTDATADIR"/testdataDIR/testdir/to1 --password asdff --progress none --verbose 1 --prefix notempty2/notemptyi2 )
echo "$OUT"
if ! ( ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test6 is different' ) && ! ( echo "$OUT" | grep -q 'filexd is different' ) && ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test7 is different' ) \
&& ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test4' ) && ! ( echo "$OUT" | grep -q 'notempty2/notemptyi2/test5' ) && ! ( echo "$OUT" | grep -q 'notemptyi2/ignoredir/testa' ) ); then
@@ -178,6 +178,6 @@ fi
echo "OK comparing archive last (3) and current with prefix "
rm -rf "$"$TESTDATADIR"/testdataDIR"
rm -rf "$TESTDATADIR"/testdataDIR
exit 0

View File

@@ -85,6 +85,6 @@ if ! ( ( echo "$OUT" | grep -q ' notempty/testfile' )\
fi
echo "List 1 ok"
rm -rf "$"$TESTDATADIR"/testdataDIR"
rm -rf "$TESTDATADIR"/testdataDIR
exit 0

View File

@@ -9,15 +9,15 @@
TEST(CLITest, Backup) {
int ret = system("../../../tests/clitests/scripts/backup.sh");
EXPECT_EQ(WEXITSTATUS(ret), 0);
ASSERT_EQ(WEXITSTATUS(ret), 0);
}
TEST(CLITest, Ignore) {
int ret = system("../../../tests/clitests/scripts/ignore.sh");
EXPECT_EQ(WEXITSTATUS(ret), 0);
ASSERT_EQ(WEXITSTATUS(ret), 0);
}
TEST(CLITest, Diff) {
int ret = system("../../../tests/clitests/scripts/diff.sh");
EXPECT_EQ(WEXITSTATUS(ret), 0);
ASSERT_EQ(WEXITSTATUS(ret), 0);
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_executable(
CryptoTests
@@ -10,5 +10,5 @@ target_link_libraries(
GTest::gtest_main crypto
)
gtest_discover_tests(CryptoTests)
gtest_discover_tests(CryptoTests DISCOVERY_TIMEOUT 600)

View File

@@ -11,42 +11,42 @@ TEST(CryptoTests, AES) {
std::string in = "hello1";
auto enc = AES::encrypt(std::vector<char>(in.begin(), in.end()), "p1", "e");
auto dec = AES::decrypt(enc, "p1", "e");
EXPECT_EQ(in, std::string(dec.begin(), dec.end()));
ASSERT_EQ(in, std::string(dec.begin(), dec.end()));
in = "";
enc = AES::encrypt(std::vector<char>(in.begin(), in.end()), "p1", "e");
dec = AES::decrypt(enc, "p1", "e");
EXPECT_EQ(in, std::string(dec.begin(), dec.end()));
ASSERT_EQ(in, std::string(dec.begin(), dec.end()));
in = "1234567890asdfg";
enc = AES::encrypt(std::vector<char>(in.begin(), in.end()), "p1", "e");
dec = AES::decrypt(enc, "p1", "e");
EXPECT_EQ(in, std::string(dec.begin(), dec.end()));
ASSERT_EQ(in, std::string(dec.begin(), dec.end()));
in = "1234567890asdfgh";
enc = AES::encrypt(std::vector<char>(in.begin(), in.end()), "p1", "e");
dec = AES::decrypt(enc, "p1", "e");
EXPECT_EQ(in, std::string(dec.begin(), dec.end()));
ASSERT_EQ(in, std::string(dec.begin(), dec.end()));
in = "1234567890asdfghe";
enc = AES::encrypt(std::vector<char>(in.begin(), in.end()), "p1", "e");
dec = AES::decrypt(enc, "p1", "e");
EXPECT_EQ(in, std::string(dec.begin(), dec.end()));
ASSERT_EQ(in, std::string(dec.begin(), dec.end()));
in = "1234567890asdfgheq";
enc = AES::encrypt(std::vector<char>(in.begin(), in.end()), "p1", "e");
dec = AES::decrypt(enc, "p1", "e");
EXPECT_EQ(in, std::string(dec.begin(), dec.end()));
ASSERT_EQ(in, std::string(dec.begin(), dec.end()));
}
TEST(CryptoTests, SHA) {
std::vector<char> data{'h', 'e', 'l', 'l', 'o'};
std::array<unsigned char, 32> excepted{0x2c, 0xf2, 0x4d, 0xba, 0x5f, 0xb0, 0xa3, 0x0e, 0x26, 0xe8, 0x3b, 0x2a, 0xc5, 0xb9, 0xe2, 0x9e, 0x1b, 0x16, 0x1e, 0x5c, 0x1f, 0xa7, 0x42, 0x5e, 0x73, 0x04, 0x33, 0x62, 0x93, 0x8b, 0x98, 0x24};
std::array<unsigned char, 32> excepted{0x2c, 0xf2, 0x4d, 0xba, 0x5f, 0xb0, 0xa3, 0x0e, 0x26, 0xe8, 0x3b,
0x2a, 0xc5, 0xb9, 0xe2, 0x9e, 0x1b, 0x16, 0x1e, 0x5c, 0x1f, 0xa7,
0x42, 0x5e, 0x73, 0x04, 0x33, 0x62, 0x93, 0x8b, 0x98, 0x24};
auto out = SHA::calculate(data);
EXPECT_EQ(out.size(), 32);
for (int i = 0; i < out.size(); i++) {
EXPECT_EQ(static_cast<uint8_t>(out[i]), excepted[i]);
}
ASSERT_EQ(out.size(), 32);
for (int i = 0; i < out.size(); i++) { ASSERT_EQ(static_cast<uint8_t>(out[i]), excepted[i]); }
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_executable(
FullTest
@@ -10,5 +10,5 @@ target_link_libraries(
GTest::gtest_main commands utils testUtils repo
)
gtest_discover_tests(FullTest)
gtest_discover_tests(FullTest DISCOVERY_TIMEOUT 600)

View File

@@ -27,7 +27,8 @@ TEST(FullTest, Simple) {
{
std::filesystem::create_directories("Simple/testfrom");
for (int i = 0; i < 257; i++) {
std::ofstream o(std::filesystem::path("Simple/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
std::ofstream o(std::filesystem::path("Simple/testfrom") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::out | std::ios::trunc);
for (int j = 0; j < i; j++) o.put(static_cast<char>(j % 256));
}
std::filesystem::create_directories("Simple/testfrom/testdir");
@@ -60,20 +61,21 @@ TEST(FullTest, Simple) {
}
{
try {
EXPECT_EQ(std::filesystem::is_directory("Simple/testtores/testdir"), true);
ASSERT_EQ(std::filesystem::is_directory("Simple/testtores/testdir"), true);
} catch (...) {
std::cerr << "Empty directory doesn't exist!" << std::endl;
throw;
}
for (int i = 0; i < 257; i++) {
std::ifstream o(std::filesystem::path("Simple/testtores") / ("f" + std::to_string(i)), std::ios::binary | std::ios::in);
std::ifstream o(std::filesystem::path("Simple/testtores") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::in);
try {
EXPECT_EQ(o.is_open(), true);
ASSERT_EQ(o.is_open(), true);
for (int j = 0; j < i; j++) {
char c;
EXPECT_EQ(o.get(c).operator bool(), true);
EXPECT_EQ(static_cast<char>(j % 256), c);
ASSERT_EQ(o.get(c).operator bool(), true);
ASSERT_EQ(static_cast<char>(j % 256), c);
}
} catch (...) {
std::cerr << "Error comparing file " << i << std::endl;
@@ -89,7 +91,8 @@ TEST(FullTest, SimpleWithIgnore) {
{
std::filesystem::create_directories("SimpleWithIgnore/testfrom");
for (int i = 0; i < 257; i++) {
std::ofstream o(std::filesystem::path("SimpleWithIgnore/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
std::ofstream o(std::filesystem::path("SimpleWithIgnore/testfrom") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::out | std::ios::trunc);
for (int j = 0; j < i; j++) o.put(static_cast<char>(j % 256));
}
std::filesystem::create_directories("SimpleWithIgnore/testfrom/testdir");
@@ -134,7 +137,9 @@ TEST(FullTest, SimpleWithIgnore) {
}
{
Config conf;
conf.add("repo", "SimpleWithIgnore/testto").add("aid", std::to_string(aid)).add("to", "SimpleWithIgnore/testtores");
conf.add("repo", "SimpleWithIgnore/testto")
.add("aid", std::to_string(aid))
.add("to", "SimpleWithIgnore/testtores");
auto repo = std::make_unique<FileRepository>(conf);
repo->open();
@@ -145,15 +150,16 @@ TEST(FullTest, SimpleWithIgnore) {
cmd.run(Context{&logger, repo.get()});
}
{
EXPECT_EQ(std::filesystem::is_directory("SimpleWithIgnore/testtores/testdir"), true);
ASSERT_EQ(std::filesystem::is_directory("SimpleWithIgnore/testtores/testdir"), true);
for (int i = 0; i < 257; i++) {
std::ifstream o(std::filesystem::path("SimpleWithIgnore/testtores") / ("f" + std::to_string(i)), std::ios::binary | std::ios::in);
EXPECT_EQ(o.is_open(), true);
std::ifstream o(std::filesystem::path("SimpleWithIgnore/testtores") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::in);
ASSERT_EQ(o.is_open(), true);
for (int j = 0; j < i; j++) {
char c;
EXPECT_EQ(o.get(c).operator bool(), true);
EXPECT_EQ(static_cast<char>(j % 256), c);
ASSERT_EQ(o.get(c).operator bool(), true);
ASSERT_EQ(static_cast<char>(j % 256), c);
}
}
@@ -161,31 +167,31 @@ TEST(FullTest, SimpleWithIgnore) {
std::ifstream file("SimpleWithIgnore/testtores/testdir2/.ignore");
std::string s;
file >> s;
EXPECT_EQ(s, "hello.txt");
ASSERT_EQ(s, "hello.txt");
}
{
std::ifstream file("SimpleWithIgnore/testtores/testdir2/testdir3/.ignore");
std::string s;
file >> s;
EXPECT_EQ(s, ".*\\.txt");
ASSERT_EQ(s, ".*\\.txt");
}
{
std::ifstream file("SimpleWithIgnore/testtores/testdir2/hello.txt");
EXPECT_EQ(!file, true);
ASSERT_EQ(!file, true);
}
{
std::ifstream file("SimpleWithIgnore/testtores/testdir2/testdir3/hello.txt");
EXPECT_EQ(!file, true);
ASSERT_EQ(!file, true);
}
{
std::ifstream file("SimpleWithIgnore/testtores/testdir2/testdir3/asdf.txt");
EXPECT_EQ(!file, true);
ASSERT_EQ(!file, true);
}
{
std::ifstream file("SimpleWithIgnore/testtores/testdir2/testdir4/asdf.txt");
std::string s;
file >> s;
EXPECT_EQ(s, "asdf2");
ASSERT_EQ(s, "asdf2");
}
}
}
@@ -196,13 +202,16 @@ TEST(FullTest, SimpleWithCompress) {
{
std::filesystem::create_directories("SimpleWithCompress/testfrom");
for (int i = 0; i < 257; i++) {
std::ofstream o(std::filesystem::path("SimpleWithCompress/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
std::ofstream o(std::filesystem::path("SimpleWithCompress/testfrom") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::out | std::ios::trunc);
for (int j = 0; j < i; j++) o.put(static_cast<char>(j % 256));
}
std::filesystem::create_directories("SimpleWithCompress/testfrom/testdir");
Config conf;
conf.add("repo", "SimpleWithCompress/testto").add("compression", "zlib").add("from", "SimpleWithCompress/testfrom");
conf.add("repo", "SimpleWithCompress/testto")
.add("compression", "zlib")
.add("from", "SimpleWithCompress/testfrom");
auto repo = std::make_unique<FileRepository>(conf);
repo->init();
@@ -215,7 +224,9 @@ TEST(FullTest, SimpleWithCompress) {
}
{
Config conf;
conf.add("repo", "SimpleWithCompress/testto").add("aid", std::to_string(aid)).add("to", "SimpleWithCompress/testtores");
conf.add("repo", "SimpleWithCompress/testto")
.add("aid", std::to_string(aid))
.add("to", "SimpleWithCompress/testtores");
auto repo = std::make_unique<FileRepository>(conf);
repo->open();
@@ -226,15 +237,16 @@ TEST(FullTest, SimpleWithCompress) {
cmd.run(Context{&logger, repo.get()});
}
{
EXPECT_EQ(std::filesystem::is_directory("SimpleWithCompress/testtores/testdir"), true);
ASSERT_EQ(std::filesystem::is_directory("SimpleWithCompress/testtores/testdir"), true);
for (int i = 0; i < 257; i++) {
std::ifstream o(std::filesystem::path("SimpleWithCompress/testtores") / ("f" + std::to_string(i)), std::ios::binary | std::ios::in);
EXPECT_EQ(o.is_open(), true);
std::ifstream o(std::filesystem::path("SimpleWithCompress/testtores") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::in);
ASSERT_EQ(o.is_open(), true);
for (int j = 0; j < i; j++) {
char c;
EXPECT_EQ(o.get(c).operator bool(), true);
EXPECT_EQ(static_cast<char>(j % 256), c);
ASSERT_EQ(o.get(c).operator bool(), true);
ASSERT_EQ(static_cast<char>(j % 256), c);
}
}
}
@@ -246,13 +258,19 @@ TEST(FullTest, SimpleWithCompEnd) {
{
std::filesystem::create_directories("SimpleWithCompEnd/testfrom");
for (int i = 0; i < 257; i++) {
std::ofstream o(std::filesystem::path("SimpleWithCompEnd/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
std::ofstream o(std::filesystem::path("SimpleWithCompEnd/testfrom") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::out | std::ios::trunc);
for (int j = 0; j < i; j++) o.put(static_cast<char>(j % 256));
}
std::filesystem::create_directories("SimpleWithCompEnd/testfrom/testdir");
Config conf;
conf.add("repo", "SimpleWithCompEnd/testto").add("compression", "zlib").add("from", "SimpleWithCompEnd/testfrom").add("encryption", "aes").add("password", "testp").add("salt", "tests");
conf.add("repo", "SimpleWithCompEnd/testto")
.add("compression", "zlib")
.add("from", "SimpleWithCompEnd/testfrom")
.add("encryption", "aes")
.add("password", "testp")
.add("salt", "tests");
auto repo = std::make_unique<FileRepository>(conf);
repo->init();
@@ -272,14 +290,17 @@ TEST(FullTest, SimpleWithCompEnd) {
bool ok = true;
try {
EXPECT_EQ(repo->open(), false);
ASSERT_EQ(repo->open(), false);
ok = false;
} catch (...) {}
EXPECT_EQ(ok, true);
ASSERT_EQ(ok, true);
}
{
Config conf;
conf.add("repo", "SimpleWithCompEnd/testto").add("password", "testp").add("aid", std::to_string(aid)).add("to", "SimpleWithCompEnd/testtores");
conf.add("repo", "SimpleWithCompEnd/testto")
.add("password", "testp")
.add("aid", std::to_string(aid))
.add("to", "SimpleWithCompEnd/testtores");
auto repo = std::make_unique<FileRepository>(conf);
repo->open();
@@ -291,15 +312,16 @@ TEST(FullTest, SimpleWithCompEnd) {
cmd.run(Context{&logger, repo.get()});
}
{
EXPECT_EQ(std::filesystem::is_directory("SimpleWithCompEnd/testtores/testdir"), true);
ASSERT_EQ(std::filesystem::is_directory("SimpleWithCompEnd/testtores/testdir"), true);
for (int i = 0; i < 257; i++) {
std::ifstream o(std::filesystem::path("SimpleWithCompEnd/testtores") / ("f" + std::to_string(i)), std::ios::binary | std::ios::in);
EXPECT_EQ(o.is_open(), true);
std::ifstream o(std::filesystem::path("SimpleWithCompEnd/testtores") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::in);
ASSERT_EQ(o.is_open(), true);
for (int j = 0; j < i; j++) {
char c;
EXPECT_EQ(o.get(c).operator bool(), true);
EXPECT_EQ(static_cast<char>(j % 256), c);
ASSERT_EQ(o.get(c).operator bool(), true);
ASSERT_EQ(static_cast<char>(j % 256), c);
}
}
}
@@ -310,16 +332,40 @@ TEST(FullTest, Fuzz) {
srand(time(nullptr));
std::vector<Config> confs;
Config conf;
conf.add("repo", "Fuzz/testto").add("compression", "none").add("from", "Fuzz/testfrom").add("encryption", "none").add("password", "testp").add("salt", "tests").add("progress", "none");
conf.add("repo", "Fuzz/testto")
.add("compression", "none")
.add("from", "Fuzz/testfrom")
.add("encryption", "none")
.add("password", "testp")
.add("salt", "tests")
.add("progress", "none");
confs.emplace_back(conf);
conf = Config();
conf.add("repo", "Fuzz/testto").add("compression", "zlib").add("from", "Fuzz/testfrom").add("encryption", "none").add("password", "testp").add("salt", "tests").add("progress", "none");
conf.add("repo", "Fuzz/testto")
.add("compression", "zlib")
.add("from", "Fuzz/testfrom")
.add("encryption", "none")
.add("password", "testp")
.add("salt", "tests")
.add("progress", "none");
confs.emplace_back(conf);
conf = Config();
conf.add("repo", "Fuzz/testto").add("compression", "none").add("from", "Fuzz/testfrom").add("encryption", "zlib").add("password", "testp").add("salt", "tests").add("progress", "none");
conf.add("repo", "Fuzz/testto")
.add("compression", "none")
.add("from", "Fuzz/testfrom")
.add("encryption", "zlib")
.add("password", "testp")
.add("salt", "tests")
.add("progress", "none");
confs.emplace_back(conf);
conf = Config();
conf.add("repo", "Fuzz/testto").add("compression", "zlib").add("from", "Fuzz/testfrom").add("encryption", "aes").add("password", "testp").add("salt", "tests").add("progress", "none");
conf.add("repo", "Fuzz/testto")
.add("compression", "zlib")
.add("from", "Fuzz/testfrom")
.add("encryption", "aes")
.add("password", "testp")
.add("salt", "tests")
.add("progress", "none");
confs.emplace_back(conf);
for (auto const &conf: confs) {
@@ -332,7 +378,8 @@ TEST(FullTest, Fuzz) {
{
std::filesystem::create_directories("Fuzz/testfrom");
for (int i = 0; i < 2; i++) {
std::ofstream o(std::filesystem::path("Fuzz/testfrom") / ("f" + std::to_string(i)), std::ios::binary | std::ios::out | std::ios::trunc);
std::ofstream o(std::filesystem::path("Fuzz/testfrom") / ("f" + std::to_string(i)),
std::ios::binary | std::ios::out | std::ios::trunc);
for (int j = 0; j < i; j++) o.put(j % 2);
}
@@ -350,28 +397,28 @@ TEST(FullTest, Fuzz) {
{
if (filetobreak & 0b00000001) {
for (int i = 0; i < cutoff; i++)
randomChange("Fuzz/testto/1");
for (int i = 0; i < cutoff; i++) randomChange("Fuzz/testto/1");
if (cutoff > 5)
std::filesystem::resize_file("Fuzz/testto/1", std::filesystem::file_size("Fuzz/testto/1") - cutoff);
std::filesystem::resize_file("Fuzz/testto/1",
std::filesystem::file_size("Fuzz/testto/1") - cutoff);
}
if (filetobreak & 0b00000010) {
for (int i = 0; i < cutoff; i++)
randomChange("Fuzz/testto/index");
for (int i = 0; i < cutoff; i++) randomChange("Fuzz/testto/index");
if (cutoff > 5)
std::filesystem::resize_file("Fuzz/testto/index", std::filesystem::file_size("Fuzz/testto/index") - cutoff);
std::filesystem::resize_file("Fuzz/testto/index",
std::filesystem::file_size("Fuzz/testto/index") - cutoff);
}
if (filetobreak & 0b00000100) {
for (int i = 0; i < cutoff; i++)
randomChange("Fuzz/testto/offsets");
for (int i = 0; i < cutoff; i++) randomChange("Fuzz/testto/offsets");
if (cutoff > 5)
std::filesystem::resize_file("Fuzz/testto/offsets", std::filesystem::file_size("Fuzz/testto/offsets") - cutoff);
std::filesystem::resize_file("Fuzz/testto/offsets",
std::filesystem::file_size("Fuzz/testto/offsets") - cutoff);
}
if (filetobreak & 0b00001000) {
for (int i = 0; i < cutoff; i++)
randomChange("Fuzz/testto/info");
for (int i = 0; i < cutoff; i++) randomChange("Fuzz/testto/info");
if (cutoff > 5)
std::filesystem::resize_file("Fuzz/testto/info", std::filesystem::file_size("Fuzz/testto/info") - cutoff);
std::filesystem::resize_file("Fuzz/testto/info",
std::filesystem::file_size("Fuzz/testto/info") - cutoff);
}
}
@@ -391,10 +438,9 @@ TEST(FullTest, Fuzz) {
CommandRestore cmd;
cmd.run(Context{&logger, repo.get()});
auto outstr = runnerout.str();
if (outstr.find("Error") == std::string::npos)
ok = false;
if (outstr.find("Error") == std::string::npos) ok = false;
} catch (...) {}
EXPECT_EQ(ok, true);
ASSERT_EQ(ok, true);
}
}
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_executable(
FileRepositoryTest
@@ -20,5 +20,5 @@ target_link_libraries(
GTest::gtest_main commands utils testUtils repo
)
gtest_discover_tests(ChunkTest)
gtest_discover_tests(FileRepositoryTest)
gtest_discover_tests(ChunkTest DISCOVERY_TIMEOUT 600)
gtest_discover_tests(FileRepositoryTest DISCOVERY_TIMEOUT 600)

View File

@@ -29,26 +29,18 @@ TEST(Chunk, Deserialize) {
std::vector<char> data2{'q', 'w', 'e', 'r', 'b'};
Chunk o2e(777, {1}, data2);
EXPECT_EQ(o1.id, o1e.id);
EXPECT_EQ(o2.id, o2e.id);
EXPECT_EQ((int) o1.type, (int) o1e.type);
EXPECT_EQ((int) o2.type, (int) o2e.type);
ASSERT_EQ(o1.id, o1e.id);
ASSERT_EQ(o2.id, o2e.id);
ASSERT_EQ((int) o1.type, (int) o1e.type);
ASSERT_EQ((int) o2.type, (int) o2e.type);
EXPECT_EQ(o1.data.size(), o1e.data.size());
EXPECT_EQ(o2.data.size(), o2e.data.size());
for (int i = 0; i < o1.data.size(); i++) {
EXPECT_EQ(o1.data[i], o1e.data[i]);
}
for (int i = 0; i < o2.data.size(); i++) {
EXPECT_EQ(o2.data[i], o2e.data[i]);
}
ASSERT_EQ(o1.data.size(), o1e.data.size());
ASSERT_EQ(o2.data.size(), o2e.data.size());
for (int i = 0; i < o1.data.size(); i++) { ASSERT_EQ(o1.data[i], o1e.data[i]); }
for (int i = 0; i < o2.data.size(); i++) { ASSERT_EQ(o2.data[i], o2e.data[i]); }
for (int i = 0; i < o1.SHA.size(); i++) {
EXPECT_EQ(o1.SHA[i], o1e.SHA[i]);
}
for (int i = 0; i < o2.SHA.size(); i++) {
EXPECT_EQ(o2.SHA[i], o2e.SHA[i]);
}
for (int i = 0; i < o1.SHA.size(); i++) { ASSERT_EQ(o1.SHA[i], o1e.SHA[i]); }
for (int i = 0; i < o2.SHA.size(); i++) { ASSERT_EQ(o2.SHA[i], o2e.SHA[i]); }
}
}
@@ -57,9 +49,7 @@ TEST(Chunk, Garbage) {
auto eb = e.cbegin();
try {
Chunk o1(eb, e.cend());
} catch (...) {
return;
}
} catch (...) { return; }
FAIL() << "Object constructed with garbage data!";
}
@@ -68,9 +58,7 @@ TEST(Chunk, Garbage2) {
auto eb = e.cbegin();
try {
Chunk o1(eb, e.cend());
} catch (...) {
return;
}
} catch (...) { return; }
FAIL() << "Object constructed with garbage data!";
}
@@ -93,16 +81,13 @@ TEST(Chunk, Garbage3) {
try {
Chunk o1 = Serialize::deserialize<Chunk>(s1);
fail = true;
} catch (...) {
}
} catch (...) {}
try {
Chunk o2 = Serialize::deserialize<Chunk>(s2);
fail = true;
} catch (...) {
}
} catch (...) {}
if (fail)
FAIL() << "Object constructed with garbage data!";
if (fail) FAIL() << "Object constructed with garbage data!";
}
}

View File

@@ -17,8 +17,7 @@ TEST(FileRepository, Deserialize) {
conf.add("repo", "Deserizlize/testrepo");
FileRepository repo(conf);
repo.init();
std::vector<char>
data1{'a', 'b', 'c', 'e'};
std::vector<char> data1{'a', 'b', 'c', 'e'};
std::string o1k(16, '\0');
std::string o2k(16, '\0');
@@ -33,8 +32,8 @@ TEST(FileRepository, Deserialize) {
repo.putObject(o2);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 666);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 777);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 666);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 777);
}
{
Config conf;
@@ -46,11 +45,11 @@ TEST(FileRepository, Deserialize) {
std::string o2k(16, '\0');
o2k[0] = 1;
o2k[1] = 2;
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 666);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 777);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 666);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 777);
auto o1o = repo.getObject(666);
auto o2o = repo.getObject(777);
auto o1o = repo.getObjectRaw(666);
auto o2o = repo.getObjectRaw(777);
auto o1ob = o1o.cbegin();
auto o2ob = o2o.cbegin();
@@ -62,22 +61,18 @@ TEST(FileRepository, Deserialize) {
std::vector<char> data2{'q', 'w', 'e', 'r', static_cast<char>(255)};
Chunk o2e(777, o2k, data2);
EXPECT_EQ(o1.id, o1e.id);
EXPECT_EQ(o2.id, o2e.id);
EXPECT_EQ((int) o1.type, (int) o1e.type);
EXPECT_EQ((int) o2.type, (int) o2e.type);
ASSERT_EQ(o1.id, o1e.id);
ASSERT_EQ(o2.id, o2e.id);
ASSERT_EQ((int) o1.type, (int) o1e.type);
ASSERT_EQ((int) o2.type, (int) o2e.type);
auto o1d = o1.data;
auto o1ed = o1e.data;
auto o2d = o2.data;
auto o2ed = o2e.data;
EXPECT_EQ(o1.data.size(), o1e.data.size());
EXPECT_EQ(o2.data.size(), o2e.data.size());
for (int i = 0; i < o1.data.size(); i++) {
EXPECT_EQ(o1.data[i], o1e.data[i]);
}
for (int i = 0; i < o2.data.size(); i++) {
EXPECT_EQ(o2.data[i], o2e.data[i]);
}
ASSERT_EQ(o1.data.size(), o1e.data.size());
ASSERT_EQ(o2.data.size(), o2e.data.size());
for (int i = 0; i < o1.data.size(); i++) { ASSERT_EQ(o1.data[i], o1e.data[i]); }
for (int i = 0; i < o2.data.size(); i++) { ASSERT_EQ(o2.data[i], o2e.data[i]); }
}
}
@@ -99,8 +94,7 @@ TEST(FileRepository, Filters) {
std::string o2k(16, '\0');
o2k[0] = 1;
o2k[1] = 2;
std::vector<char>
data1{'a', 'b', 'c', 'e'};
std::vector<char> data1{'a', 'b', 'c', 'e'};
Chunk o1(666, o1k, data1);
std::vector<char> data2{'q', 'w', 'e', 'r', 'b'};
Chunk o2(777, o2k, data2);
@@ -109,8 +103,8 @@ TEST(FileRepository, Filters) {
repo.putObject(o2);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 666);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 777);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 666);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 777);
}
{
Config conf;
@@ -132,7 +126,7 @@ TEST(FileRepository, Filters) {
try {
auto o1o = repo.getObject(666);
auto o1o = repo.getObjectRaw(666);
auto o1ob = o1o.cbegin();
Chunk o1(o1ob, o1o.cend());
@@ -140,15 +134,14 @@ TEST(FileRepository, Filters) {
} catch (...) {}
try {
auto o2o = repo.getObject(777);
auto o2o = repo.getObjectRaw(777);
auto o2ob = o2o.cbegin();
Chunk o2(o2ob, o2o.cend());
err = true;
} catch (...) {}
if (err)
throw Exception("Object constructed with garbage data!");
if (err) throw Exception("Object constructed with garbage data!");
}
{
Config conf;
@@ -167,11 +160,11 @@ TEST(FileRepository, Filters) {
std::string o2k(16, '\0');
o2k[0] = 1;
o2k[1] = 2;
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 666);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 777);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 666);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 777);
auto o1o = repo.getObject(666);
auto o2o = repo.getObject(777);
auto o1o = repo.getObjectRaw(666);
auto o2o = repo.getObjectRaw(777);
auto o1ob = o1o.cbegin();
auto o2ob = o2o.cbegin();
@@ -183,27 +176,22 @@ TEST(FileRepository, Filters) {
std::vector<char> data2{'q', 'w', 'e', 'r', 'b'};
Chunk o2e(777, o2k, data2);
EXPECT_EQ(o1.id, o1e.id);
EXPECT_EQ(o2.id, o2e.id);
EXPECT_EQ((int) o1.type, (int) o1e.type);
EXPECT_EQ((int) o2.type, (int) o2e.type);
ASSERT_EQ(o1.id, o1e.id);
ASSERT_EQ(o2.id, o2e.id);
ASSERT_EQ((int) o1.type, (int) o1e.type);
ASSERT_EQ((int) o2.type, (int) o2e.type);
auto o1d = o1.data;
auto o1ed = o1e.data;
auto o2d = o2.data;
auto o2ed = o2e.data;
EXPECT_EQ(o1.data.size(), o1e.data.size());
EXPECT_EQ(o2.data.size(), o2e.data.size());
for (int i = 0; i < o1.data.size(); i++) {
EXPECT_EQ(o1.data[i], o1e.data[i]);
}
for (int i = 0; i < o2.data.size(); i++) {
EXPECT_EQ(o2.data[i], o2e.data[i]);
}
ASSERT_EQ(o1.data.size(), o1e.data.size());
ASSERT_EQ(o2.data.size(), o2e.data.size());
for (int i = 0; i < o1.data.size(); i++) { ASSERT_EQ(o1.data[i], o1e.data[i]); }
for (int i = 0; i < o2.data.size(); i++) { ASSERT_EQ(o2.data[i], o2e.data[i]); }
}
}
TEST(FileRepository, IDsDisabled) {
GTEST_SKIP();
Cleaner c({"IDS/testrepo"});
{
Config conf;
@@ -215,29 +203,28 @@ TEST(FileRepository, IDsDisabled) {
std::string o2k(16, '\0');
o2k[0] = 1;
std::vector<char>
data1{'a', 'b', 'c', 'e'};
std::vector<char> data1{'a', 'b', 'c', 'e'};
Chunk o1(repo.getId(), o1k, data1);
std::vector<char> data2{'q', 'w', 'e', 'r', 'b'};
Chunk o2(repo.getId(), o2k, data2);
EXPECT_EQ(o1.id, 1);
EXPECT_EQ(o2.id, 2);
ASSERT_EQ(o1.id, 1);
ASSERT_EQ(o2.id, 2);
repo.putObject(o1);
repo.putObject(o2);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 1);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 2);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 1);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 2);
}
{
Config conf;
conf.add("repo", "IDS/testrepo");
FileRepository repo(conf);
repo.open();
auto o1o = repo.getObject(1);
auto o2o = repo.getObject(2);
auto o1o = repo.getObjectRaw(1);
auto o2o = repo.getObjectRaw(2);
auto o1ob = o1o.cbegin();
auto o2ob = o2o.cbegin();
@@ -251,29 +238,25 @@ TEST(FileRepository, IDsDisabled) {
std::vector<char> data2{'q', 'w', 'e', 'r', 'b'};
Chunk o2e(2, o2k, data2);
EXPECT_EQ(o1.id, o1e.id);
EXPECT_EQ(o2.id, o2e.id);
EXPECT_EQ((int) o1.type, (int) o1e.type);
EXPECT_EQ((int) o2.type, (int) o2e.type);
ASSERT_EQ(o1.id, o1e.id);
ASSERT_EQ(o2.id, o2e.id);
ASSERT_EQ((int) o1.type, (int) o1e.type);
ASSERT_EQ((int) o2.type, (int) o2e.type);
auto o1d = o1.data;
auto o1ed = o1e.data;
auto o2d = o2.data;
auto o2ed = o2e.data;
EXPECT_EQ(o1.data.size(), o1e.data.size());
EXPECT_EQ(o2.data.size(), o2e.data.size());
for (int i = 0; i < o1.data.size(); i++) {
EXPECT_EQ(o1.data[i], o1e.data[i]);
}
for (int i = 0; i < o2.data.size(); i++) {
EXPECT_EQ(o2.data[i], o2e.data[i]);
}
ASSERT_EQ(o1.data.size(), o1e.data.size());
ASSERT_EQ(o2.data.size(), o2e.data.size());
for (int i = 0; i < o1.data.size(); i++) { ASSERT_EQ(o1.data[i], o1e.data[i]); }
for (int i = 0; i < o2.data.size(); i++) { ASSERT_EQ(o2.data[i], o2e.data[i]); }
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 1);
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 2);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o1k), 1);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 2);
repo.deleteObject(o1);
repo.deleteObjects({o1.id});
}
{
Config conf;
@@ -283,13 +266,12 @@ TEST(FileRepository, IDsDisabled) {
std::string o2k(16, '\0');
o2k[0] = 1;
EXPECT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 2);
ASSERT_EQ(repo.getObjectId(Object::ObjectType::Chunk, o2k), 2);
auto id = repo.getId();
EXPECT_EQ(id, 1);
std::vector<char>
data1{'a', 'b', 'c', 'e'};
ASSERT_EQ(id, 1);
std::vector<char> data1{'a', 'b', 'c', 'e'};
Chunk o1(id, o2k, data1);
EXPECT_EQ(repo.getId(), 3);
ASSERT_EQ(repo.getId(), 3);
}
}

View File

@@ -1,4 +1,4 @@
cmake_minimum_required(VERSION 3.22)
cmake_minimum_required(VERSION 3.18)
add_library(testUtils srcs/Cleaner.cpp)

View File

@@ -5,13 +5,9 @@
#include "Cleaner.h"
Cleaner::Cleaner(std::vector<std::filesystem::path> toClean) : toClean(std::move(toClean)) {
for (const auto &p: this->toClean) {
std::filesystem::remove_all(p);
}
for (const auto &p: this->toClean) { std::filesystem::remove_all(p); }
}
Cleaner::~Cleaner() {
for (const auto &p: toClean) {
std::filesystem::remove_all(p);
}
for (const auto &p: toClean) { std::filesystem::remove_all(p); }
}