refactor
This commit is contained in:
parent
ff13633962
commit
7112fbee7e
@ -65,10 +65,17 @@ private:
|
||||
static void create_item_meta(api_meta_map &meta, bool directory,
|
||||
const api_file &file);
|
||||
|
||||
auto do_directory_operation(
|
||||
const std::string &api_path,
|
||||
std::function<api_error(const encrypt_config &cfg,
|
||||
const std::string &source_path)>
|
||||
callback) const -> api_error;
|
||||
|
||||
auto
|
||||
process_directory_entry(const std::filesystem::directory_entry &dir_entry,
|
||||
const encrypt_config &cfg,
|
||||
std::string &api_path) const -> bool;
|
||||
|
||||
void remove_deleted_files();
|
||||
|
||||
public:
|
||||
|
@ -169,6 +169,45 @@ auto encrypt_provider::create_directory(const std::string &api_path,
|
||||
return api_error::not_implemented;
|
||||
}
|
||||
|
||||
auto encrypt_provider::do_directory_operation(
|
||||
const std::string &api_path,
|
||||
std::function<api_error(const encrypt_config &cfg,
|
||||
const std::string &source_path)>
|
||||
callback) const -> api_error {
|
||||
bool exists{};
|
||||
auto res = is_file(api_path, exists);
|
||||
if (res != api_error::success) {
|
||||
return res;
|
||||
}
|
||||
if (exists) {
|
||||
return api_error::item_exists;
|
||||
}
|
||||
|
||||
auto cfg = config_.get_encrypt_config();
|
||||
std::string source_path{api_path};
|
||||
if (api_path != "/") {
|
||||
res =
|
||||
utils::encryption::decrypt_file_path(cfg.encryption_token, source_path);
|
||||
if (res != api_error::success) {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
|
||||
source_path =
|
||||
utils::path::absolute(utils::path::combine(cfg.path, {source_path}));
|
||||
if (source_path != cfg.path &&
|
||||
not source_path.starts_with(cfg.path +
|
||||
utils::path::directory_seperator)) {
|
||||
return api_error::directory_not_found;
|
||||
}
|
||||
|
||||
if (not utils::file::is_directory(source_path)) {
|
||||
return api_error::directory_not_found;
|
||||
}
|
||||
|
||||
return callback(cfg, source_path);
|
||||
}
|
||||
|
||||
auto encrypt_provider::get_api_path_from_source(const std::string &source_path,
|
||||
std::string &api_path) const
|
||||
-> api_error {
|
||||
@ -210,173 +249,143 @@ auto encrypt_provider::get_api_path_from_source(const std::string &source_path,
|
||||
|
||||
auto encrypt_provider::get_directory_item_count(
|
||||
const std::string &api_path) const -> std::uint64_t {
|
||||
auto result = db::db_select{*db_, source_table}
|
||||
.column("source_path")
|
||||
.where("api_path")
|
||||
.equals(api_path)
|
||||
.go();
|
||||
std::optional<db::db_select::row> row;
|
||||
if (not(result.get_row(row) && row.has_value())) {
|
||||
return 0U;
|
||||
}
|
||||
|
||||
auto source_path = row->get_column("source_path").get_value<std::string>();
|
||||
result = db::db_select{*db_, directory_table}
|
||||
.column("api_path")
|
||||
.where("source_path")
|
||||
.equals(source_path)
|
||||
.go();
|
||||
if (not result.has_row()) {
|
||||
return 0U;
|
||||
}
|
||||
|
||||
const auto cfg = config_.get_encrypt_config();
|
||||
static const auto *function_name = __FUNCTION__;
|
||||
|
||||
std::uint64_t count{};
|
||||
try {
|
||||
for ([[maybe_unused]] const auto &dir_entry :
|
||||
std::filesystem::directory_iterator(source_path)) {
|
||||
count++;
|
||||
}
|
||||
|
||||
return count;
|
||||
} catch (const std::exception &ex) {
|
||||
utils::error::raise_error(__FUNCTION__, ex, cfg.path,
|
||||
"failed to get directory item count");
|
||||
auto res = do_directory_operation(
|
||||
api_path,
|
||||
[&api_path, &count](const encrypt_config & /* cfg */,
|
||||
const std::string &source_path) -> api_error {
|
||||
try {
|
||||
for ([[maybe_unused]] const auto &dir_entry :
|
||||
std::filesystem::directory_iterator(source_path)) {
|
||||
count++;
|
||||
}
|
||||
} catch (const std::exception &ex) {
|
||||
utils::error::raise_api_path_error(
|
||||
function_name, api_path, source_path, ex,
|
||||
"failed to get directory item count");
|
||||
}
|
||||
return api_error::success;
|
||||
});
|
||||
if (res != api_error::success) {
|
||||
utils::error::raise_api_path_error(function_name, api_path, res,
|
||||
"failed to get directory item count");
|
||||
}
|
||||
|
||||
return 0U;
|
||||
return count;
|
||||
}
|
||||
|
||||
auto encrypt_provider::get_directory_items(const std::string &api_path,
|
||||
directory_item_list &list) const
|
||||
-> api_error {
|
||||
bool exists{};
|
||||
auto res = is_file(api_path, exists);
|
||||
if (res != api_error::success) {
|
||||
return res;
|
||||
}
|
||||
if (exists) {
|
||||
return api_error::item_exists;
|
||||
}
|
||||
static const auto *function_name = __FUNCTION__;
|
||||
|
||||
auto cfg = config_.get_encrypt_config();
|
||||
std::string source_path{api_path};
|
||||
if (api_path != "/") {
|
||||
res =
|
||||
utils::encryption::decrypt_file_path(cfg.encryption_token, source_path);
|
||||
if (res != api_error::success) {
|
||||
return res;
|
||||
}
|
||||
}
|
||||
return do_directory_operation(
|
||||
api_path,
|
||||
[this, &list](const encrypt_config &cfg,
|
||||
const std::string &source_path) -> api_error {
|
||||
try {
|
||||
for (const auto &dir_entry :
|
||||
std::filesystem::directory_iterator(source_path)) {
|
||||
try {
|
||||
std::string current_api_path{};
|
||||
if (dir_entry.is_directory()) {
|
||||
auto result = db::db_select{*db_, directory_table}
|
||||
.column("api_path")
|
||||
.where("source_path")
|
||||
.equals(dir_entry.path().string())
|
||||
.go();
|
||||
std::optional<db::db_select::row> row;
|
||||
if (result.get_row(row) && row.has_value()) {
|
||||
current_api_path =
|
||||
row->get_column("api_path").get_value<std::string>();
|
||||
}
|
||||
if (current_api_path.empty()) {
|
||||
process_directory_entry(dir_entry, cfg, current_api_path);
|
||||
|
||||
source_path =
|
||||
utils::path::absolute(utils::path::combine(cfg.path, {source_path}));
|
||||
if (source_path != cfg.path &&
|
||||
not source_path.starts_with(cfg.path +
|
||||
utils::path::directory_seperator)) {
|
||||
return api_error::directory_not_found;
|
||||
}
|
||||
result = db::db_select{*db_, directory_table}
|
||||
.column("api_path")
|
||||
.where("source_path")
|
||||
.equals(dir_entry.path().string())
|
||||
.go();
|
||||
row.reset();
|
||||
if (not(result.get_row(row) && row.has_value())) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (not utils::file::is_directory(source_path)) {
|
||||
return api_error::directory_not_found;
|
||||
}
|
||||
current_api_path =
|
||||
row->get_column("api_path").get_value<std::string>();
|
||||
}
|
||||
} else {
|
||||
std::string api_path_data{};
|
||||
auto result = db::db_select{*db_, file_table}
|
||||
.column("data")
|
||||
.where("source_path")
|
||||
.equals(dir_entry.path().string())
|
||||
.go();
|
||||
std::optional<db::db_select::row> row;
|
||||
if (result.get_row(row) && row.has_value()) {
|
||||
api_path_data =
|
||||
row->get_column("data").get_value<std::string>();
|
||||
}
|
||||
|
||||
try {
|
||||
for (const auto &dir_entry :
|
||||
std::filesystem::directory_iterator(source_path)) {
|
||||
try {
|
||||
std::string current_api_path{};
|
||||
if (dir_entry.is_directory()) {
|
||||
auto result = db::db_select{*db_, directory_table}
|
||||
.column("api_path")
|
||||
.where("source_path")
|
||||
.equals(dir_entry.path().string())
|
||||
.go();
|
||||
std::optional<db::db_select::row> row;
|
||||
if (result.get_row(row) && row.has_value()) {
|
||||
current_api_path =
|
||||
row->get_column("api_path").get_value<std::string>();
|
||||
}
|
||||
if (current_api_path.empty()) {
|
||||
process_directory_entry(dir_entry, cfg, current_api_path);
|
||||
if (api_path_data.empty()) {
|
||||
if (not process_directory_entry(dir_entry, cfg,
|
||||
current_api_path)) {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
current_api_path = json::parse(api_path_data)
|
||||
.at("api_path")
|
||||
.get<std::string>();
|
||||
}
|
||||
}
|
||||
|
||||
result = db::db_select{*db_, directory_table}
|
||||
.column("api_path")
|
||||
.where("source_path")
|
||||
.equals(dir_entry.path().string())
|
||||
.go();
|
||||
row.reset();
|
||||
if (not(result.get_row(row) && row.has_value())) {
|
||||
continue;
|
||||
auto file =
|
||||
create_api_file(current_api_path, dir_entry.is_directory(),
|
||||
dir_entry.path().string());
|
||||
|
||||
directory_item dir_item{};
|
||||
dir_item.api_parent = file.api_parent;
|
||||
dir_item.api_path = file.api_path;
|
||||
dir_item.directory = dir_entry.is_directory();
|
||||
dir_item.resolved = true;
|
||||
dir_item.size = file.file_size;
|
||||
create_item_meta(dir_item.meta, dir_item.directory, file);
|
||||
|
||||
list.emplace_back(std::move(dir_item));
|
||||
} catch (const std::exception &ex) {
|
||||
utils::error::raise_error(function_name, ex,
|
||||
dir_entry.path().string(),
|
||||
"failed to process directory item");
|
||||
}
|
||||
|
||||
current_api_path =
|
||||
row->get_column("api_path").get_value<std::string>();
|
||||
}
|
||||
} else {
|
||||
std::string api_path_data{};
|
||||
auto result = db::db_select{*db_, file_table}
|
||||
.column("data")
|
||||
.where("source_path")
|
||||
.equals(dir_entry.path().string())
|
||||
.go();
|
||||
std::optional<db::db_select::row> row;
|
||||
if (result.get_row(row) && row.has_value()) {
|
||||
api_path_data = row->get_column("data").get_value<std::string>();
|
||||
}
|
||||
|
||||
if (api_path_data.empty()) {
|
||||
if (not process_directory_entry(dir_entry, cfg, current_api_path)) {
|
||||
continue;
|
||||
}
|
||||
} else {
|
||||
current_api_path =
|
||||
json::parse(api_path_data).at("api_path").get<std::string>();
|
||||
}
|
||||
} catch (const std::exception &ex) {
|
||||
utils::error::raise_error(function_name, ex, source_path,
|
||||
"failed to get directory items");
|
||||
return api_error::error;
|
||||
}
|
||||
|
||||
auto file = create_api_file(current_api_path, dir_entry.is_directory(),
|
||||
dir_entry.path().string());
|
||||
std::sort(list.begin(), list.end(),
|
||||
[](const auto &item1, const auto &item2) -> bool {
|
||||
return (item1.directory && not item2.directory) ||
|
||||
(not(item2.directory && not item1.directory) &&
|
||||
(item1.api_path.compare(item2.api_path) < 0));
|
||||
});
|
||||
|
||||
directory_item dir_item{};
|
||||
dir_item.api_parent = file.api_parent;
|
||||
dir_item.api_path = file.api_path;
|
||||
dir_item.directory = dir_entry.is_directory();
|
||||
dir_item.resolved = true;
|
||||
dir_item.size = file.file_size;
|
||||
create_item_meta(dir_item.meta, dir_item.directory, file);
|
||||
|
||||
list.emplace_back(std::move(dir_item));
|
||||
} catch (const std::exception &ex) {
|
||||
utils::error::raise_error(__FUNCTION__, ex, dir_entry.path().string(),
|
||||
"failed to process directory item");
|
||||
}
|
||||
}
|
||||
} catch (const std::exception &ex) {
|
||||
utils::error::raise_error(__FUNCTION__, ex, source_path,
|
||||
"failed to get directory items");
|
||||
return api_error::error;
|
||||
}
|
||||
|
||||
std::sort(list.begin(), list.end(), [](const auto &a, const auto &b) -> bool {
|
||||
return (a.directory && not b.directory) ||
|
||||
(not(b.directory && not a.directory) &&
|
||||
(a.api_path.compare(b.api_path) < 0));
|
||||
});
|
||||
|
||||
list.insert(list.begin(), directory_item{
|
||||
"..",
|
||||
"",
|
||||
true,
|
||||
});
|
||||
list.insert(list.begin(), directory_item{
|
||||
".",
|
||||
"",
|
||||
true,
|
||||
});
|
||||
|
||||
return api_error::success;
|
||||
list.insert(list.begin(), directory_item{
|
||||
"..",
|
||||
"",
|
||||
true,
|
||||
});
|
||||
list.insert(list.begin(), directory_item{
|
||||
".",
|
||||
"",
|
||||
true,
|
||||
});
|
||||
return api_error::success;
|
||||
});
|
||||
}
|
||||
|
||||
auto encrypt_provider::get_file(const std::string &api_path,
|
||||
@ -857,12 +866,12 @@ auto encrypt_provider::read_file_bytes(const std::string &api_path,
|
||||
const auto relative_path =
|
||||
std::filesystem::path(source_path).lexically_relative(cfg.path);
|
||||
|
||||
auto ri = std::make_shared<reader_info>();
|
||||
ri->reader = std::make_unique<utils::encryption::encrypting_reader>(
|
||||
auto info = std::make_shared<reader_info>();
|
||||
info->reader = std::make_unique<utils::encryption::encrypting_reader>(
|
||||
relative_path.filename().string(), source_path, stop_requested,
|
||||
cfg.encryption_token, relative_path.parent_path().string());
|
||||
reader_lookup_[source_path] = ri;
|
||||
iv_list = ri->reader->get_iv_list();
|
||||
reader_lookup_[source_path] = info;
|
||||
iv_list = info->reader->get_iv_list();
|
||||
|
||||
file_data["original_file_size"] = file_size;
|
||||
file_data["iv_list"] = iv_list;
|
||||
@ -882,11 +891,11 @@ auto encrypt_provider::read_file_bytes(const std::string &api_path,
|
||||
std::array<unsigned char,
|
||||
crypto_aead_xchacha20poly1305_IETF_NPUBBYTES>>>();
|
||||
if (reader_lookup_.find(source_path) == reader_lookup_.end()) {
|
||||
auto ri = std::make_shared<reader_info>();
|
||||
ri->reader = std::make_unique<utils::encryption::encrypting_reader>(
|
||||
auto info = std::make_shared<reader_info>();
|
||||
info->reader = std::make_unique<utils::encryption::encrypting_reader>(
|
||||
api_path, source_path, stop_requested, cfg.encryption_token,
|
||||
std::move(iv_list));
|
||||
reader_lookup_[source_path] = ri;
|
||||
reader_lookup_[source_path] = info;
|
||||
}
|
||||
}
|
||||
|
||||
@ -894,16 +903,16 @@ auto encrypt_provider::read_file_bytes(const std::string &api_path,
|
||||
return api_error::success;
|
||||
}
|
||||
|
||||
auto ri = reader_lookup_.at(source_path);
|
||||
ri->last_access_time = std::chrono::system_clock::now();
|
||||
auto info = reader_lookup_.at(source_path);
|
||||
info->last_access_time = std::chrono::system_clock::now();
|
||||
reader_lookup_lock.unlock();
|
||||
|
||||
mutex_lock reader_lock(ri->reader_mtx);
|
||||
ri->reader->set_read_position(offset);
|
||||
mutex_lock reader_lock(info->reader_mtx);
|
||||
info->reader->set_read_position(offset);
|
||||
data.resize(size);
|
||||
|
||||
const auto res = ri->reader->reader_function(data.data(), 1u, data.size(),
|
||||
ri->reader.get());
|
||||
const auto res = info->reader->reader_function(data.data(), 1U, data.size(),
|
||||
info->reader.get());
|
||||
if (res == 0) {
|
||||
return api_error::os_error;
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user