123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284 |
- #include "datfile.h"
- #include "subfiledata.h"
- #include "subfiles/ddssubfile.h"
- #include <iostream>
- #include <yaml-cpp/yaml.h>
- #include <EasyLogging++/easylogging++.h>
- INITIALIZE_EASYLOGGINGPP
- namespace LOTRO_DAT {
- DatExportApi DatFile::api_;
- DatFile::DatFile(int file_handle) {
- initialized_ = false;
- file_handle_ = file_handle;
- el::Configurations defaultConf;
- el::Loggers::addFlag(el::LoggingFlag::LogDetailedCrashReason);
- el::Loggers::addFlag(el::LoggingFlag::ImmediateFlush);
- el::Loggers::addFlag(el::LoggingFlag::StrictLogFileSizeCheck);
- defaultConf.setToDefault();
- defaultConf.setGlobally(el::ConfigurationType::Format, "%datetime %level : %msg (function: %func)");
- defaultConf.setGlobally(el::ConfigurationType::ToFile, "true");
- defaultConf.setGlobally(el::ConfigurationType::Filename, "dat_library.log");
- defaultConf.setGlobally(el::ConfigurationType::ToStandardOutput, "false");
- defaultConf.setGlobally(el::ConfigurationType::MaxLogFileSize, "15728640"); // 15MB
- #ifndef NDEBUG
- defaultConf.set(el::Level::Debug, el::ConfigurationType::Enabled, "true");
- defaultConf.set(el::Level::Debug, el::ConfigurationType::Filename, "dat_library_debug.log");
- #elif NDEBUG
- defaultConf.set(el::Level::Debug, el::ConfigurationType::Enabled, "false");
- #endif
- el::Loggers::reconfigureAllLoggers(defaultConf);
- export_data_buf_ = BinaryData(64 * 1024 * 1024); // 64 MB - max file size;
- }
- DatFile::~DatFile() {
- Deinit();
- }
- bool DatFile::Init(const std::string& filename) {
- if (initialized_) {
- Deinit();
- }
- if (api_.OpenDatFile(file_handle_, filename.c_str(), 130) == file_handle_) {
- initialized_ = true;
- filename_ = filename;
- LoadAllFilesInfo();
- return true;
- }
- return false;
- }
- void DatFile::LoadAllFilesInfo() {
- int subfiles_num = api_.GetNumSubfiles(file_handle_);
- for (int i = 0; i < subfiles_num; ++i) {
- SubfileInfo file_info;
- api_.GetSubfileSizes(file_handle_, &file_info.file_id, &file_info.size, &file_info.iteration, i, 1);
- files_info_[file_info.file_id] = file_info;
- }
- }
- void DatFile::Deinit() {
- if (initialized_) {
- api_.CloseDatFile(file_handle_);
- files_info_.clear();
- initialized_ = false;
- }
- }
- bool DatFile::checkIfPatchedByLegacyV1(const std::string& filename) {
- FILE* file_handler = fopen(filename.c_str(), "r+b");
- if (file_handler == nullptr) {
- return false;
- }
- BinaryData data(1024);
- fread(data.data(), 1024, 1, file_handler);
- fclose(file_handler);
- return (data.ToNumber<4>(0x128) != 0 || data.ToNumber<4>(0x12C) != 0);
- }
- int DatFile::GetDatFileMaxIteration() const {
- int subfiles_num = api_.GetNumSubfiles(file_handle_);
- int max_iteration = -1;
- for (int i = 0; i < subfiles_num; ++i) {
- SubfileInfo file_info;
- api_.GetSubfileSizes(file_handle_, &file_info.file_id, &file_info.size, &file_info.iteration, i, 1);
- max_iteration = std::max(max_iteration, file_info.iteration);
- }
- return max_iteration;
- }
- bool DatFile::Initialized() const{
- return initialized_;
- }
- const std::string& DatFile::GetFilename() const {
- return filename_;
- }
- SubfileInfo DatFile::getSubfileInfo(int file_id) const {
- if (files_info_.count(file_id) == 0) {
- return SubfileInfo();
- } else {
- return files_info_.at(file_id);
- }
- }
- size_t DatFile::GetFilesNumInDatFile() {
- return api_.GetNumSubfiles(file_handle_);
- }
- size_t DatFile::PatchAllFilesFromDatabase(Database& db) {
- size_t patched_files_num = 0;
- SubfileData file;
- int i = 0;
- const int total_files = db.CountRows();
- std::cout << "Patching all files from database..." << std::endl;
- while (!(file = db.GetNextFile()).Empty()) {
- if (i * 100 / total_files != (i - 1) * 100 / total_files) {
- std::cout << "Completed " << i * 100 / total_files << "%" << std::endl;
- }
- ++i;
- if (!file.options["fid"]) {
- LOG(ERROR) << "Incorrect db entry - no file_id specified";
- continue;
- }
- PatchFile(file);
- ++patched_files_num;
- }
- return patched_files_num;
- }
- void DatFile::PatchFile(SubfileData file_data, int version, int iteration, bool create) {
- if (!file_data.options["fid"]) {
- LOG(ERROR) << "Trying to patch file, but file id is not specified, skipping!";
- return;
- }
- int file_id = file_data.options["fid"].as<int>();
- if (files_info_.count(file_id) == 0 && !create) {
- LOG(ERROR) << "Trying to patch file, not existing in files_info. File id = " << file_id;
- return;
- }
- if (create) {
- BinaryData file = file_data.binary_data;
- api_.PutSubfileData(file_handle_, file_id, file.data(), 0, file.size(), version, iteration);
- return;
- }
- const SubfileInfo& file_info = files_info_[file_id];
- int existing_file_version = 0; // will be evaluated with api_.GetSubfileData
- int size = api_.GetSubfileData(file_handle_, file_id, export_data_buf_.data(), existing_file_version);
- if (size <= 0) {
- LOG(ERROR) << "Trying to patch file, not existing in .dat file. File id = " << file_id;
- return;
- }
- BinaryData old_data = export_data_buf_.CutData(0, size);
-
- BinaryData file = BuildForImport(old_data, file_data);
- if (version == -1) {
- version = existing_file_version;
- }
- if (iteration == -1) {
- iteration = file_info.iteration;
- }
- api_.PutSubfileData(file_handle_, file_id, file.data(), 0, file.size(), version, iteration);
- }
- void DatFile::PatchFile(int file_id, FILE_TYPE type, std::string path_to_file, int version, int iteration) {
- BinaryData new_data(64 * 1024 * 1024);
- std::ifstream in(path_to_file, std::ifstream::binary);
- in.read((char*)new_data.data(), new_data.size());
- std::streamsize data_size = in.gcount();
- in.close();
- SubfileData imported_subfile;
- imported_subfile.binary_data = new_data.CutData(0, data_size);
- imported_subfile.options["ext"] = StringFromFileType(type);
- imported_subfile.options["fid"] = file_id;
- PatchFile(imported_subfile, version, iteration);
- }
- FILE_TYPE DatFile::GetExistingFileType(int file_id) {
- int version = 0;
- api_.GetSubfileData(file_handle_, file_id, export_data_buf_.data(), version);
- return FileTypeFromFileContents(file_id, export_data_buf_);
- }
- void DatFile::PerformOperationOnAllSubfiles(const SubfileOperation& operation) {
- if (files_info_.empty()) {
- LoadAllFilesInfo();
- }
- std::cout << "Performing operation on all files...";
- int i = 0;
- for (const std::pair<int, SubfileInfo>& info : files_info_) {
- if (i * 100 / files_info_.size() != (i - 1) * 100 / files_info_.size()) {
- std::cout << "Completed " << i * 100 / files_info_.size() << "%" << std::endl;
- }
- operation(info.second);
- ++i;
- }
- }
- int DatFile::ExportFilesByType(FILE_TYPE type, Database& db) {
- int num_files = 0;
- SubfileOperation operation = [this, type, &db, &num_files](const SubfileInfo& info) {
- FILE_TYPE file_type = GetExistingFileType(info.file_id);
- if (file_type == type) {
- ExportFileById(info.file_id, db);
- ++num_files;
- }
- };
- PerformOperationOnAllSubfiles(operation);
- return num_files;
- }
- int DatFile::ExportFilesByType(FILE_TYPE type, std::string path_to_directory) {
- int num_files = 0;
- SubfileOperation operation = [this, type, path_to_directory, &num_files](const SubfileInfo& info) {
- FILE_TYPE file_type = GetExistingFileType(info.file_id);
- if (file_type == type) {
- ExportFileById(info.file_id, path_to_directory + "/" + std::to_string(info.file_id));
- ++num_files;
- }
- };
- PerformOperationOnAllSubfiles(operation);
- return num_files;
- }
- void DatFile::ExportFileById(int file_id, std::string target_file_path) {
- int version = 0;
- int size = api_.GetSubfileData(file_handle_, file_id, export_data_buf_.data(), version);
- auto data = export_data_buf_.CutData(0, size);
- FILE_TYPE file_type = FileTypeFromFileContents(file_id, data);
- SubfileData file = BuildForExport(file_id, data);
- std::ofstream out(target_file_path + StringFromFileType(file_type), std::ofstream::binary);
- out.write((char*)file.binary_data.data(), file.binary_data.size());
- out.close();
- }
- void DatFile::ExportFileById(int file_id, Database& db) {
- int version = 0;
- int size = api_.GetSubfileData(file_handle_, file_id, export_data_buf_.data(), version);
- auto data = export_data_buf_.CutData(0, size);
- SubfileData file = BuildForExport(file_id, data);
- db.PushFile(file);
- }
- int DatFile::GetFileVersion(int file_id) {
- return api_.GetSubfileVersion(file_handle_, file_id);
- }
- SubfileData DatFile::GetFile(int file_id) {
- int version = 0;
- int size = api_.GetSubfileData(file_handle_, file_id, export_data_buf_.data(), version);
- auto data = export_data_buf_.CutData(0, size);
- return BuildForExport(file_id, data);
- }
- }; // namespace LOTRO_DAT
|