Core/DataStores: Refactor DB2 loaders to be reusable by extractors

This commit is contained in:
Shauren
2017-03-08 18:10:02 +01:00
parent f585c83124
commit 7b235ce6e4
31 changed files with 2874 additions and 3001 deletions

View File

@@ -28,7 +28,6 @@ target_include_directories(mapextractor
target_link_libraries(mapextractor
PUBLIC
common
extractor_common)
CollectIncludeDirectories(

View File

@@ -1,200 +0,0 @@
/*
* Copyright (C) 2008-2017 TrinityCore <http://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#include "DB2.h"
#include <cassert>
DB2FileLoader::DB2FileLoader()
{
meta = nullptr;
recordSize = 0;
recordCount = 0;
fieldCount = 0;
stringSize = 0;
tableHash = 0;
layoutHash = 0;
minIndex = 0;
maxIndex = 0;
localeMask = 0;
copyIdSize = 0;
data = nullptr;
stringTable = nullptr;
idTable = nullptr;
idTableSize = 0;
copyTable = nullptr;
fields = nullptr;
}
bool DB2FileLoader::Load(CASC::FileHandle const& db2Handle, DB2Meta const* meta_)
{
if (data)
{
delete[] data;
data = nullptr;
}
DWORD bytesRead = 0;
meta = meta_;
std::uint32_t header;
CASC::ReadFile(db2Handle, &header, sizeof(header), &bytesRead);
if (bytesRead != sizeof(header)) // Signature
return false;
EndianConvert(header);
if (header != 0x35424457)
return false; //'WDB5'
CASC::ReadFile(db2Handle, &recordCount, sizeof(recordCount), &bytesRead);
if (bytesRead != sizeof(recordCount)) // Number of records
return false;
EndianConvert(recordCount);
CASC::ReadFile(db2Handle, &fieldCount, sizeof(fieldCount), &bytesRead);
if (bytesRead != sizeof(fieldCount)) // Number of fields
return false;
EndianConvert(fieldCount);
CASC::ReadFile(db2Handle, &recordSize, sizeof(recordSize), &bytesRead);
if (bytesRead != sizeof(recordSize)) // Size of a record
return false;
EndianConvert(recordSize);
CASC::ReadFile(db2Handle, &stringSize, sizeof(stringSize), &bytesRead);
if (bytesRead != sizeof(stringSize)) // String size
return false;
EndianConvert(stringSize);
CASC::ReadFile(db2Handle, &tableHash, sizeof(tableHash), &bytesRead);
if (bytesRead != sizeof(tableHash)) // Table hash
return false;
EndianConvert(tableHash);
CASC::ReadFile(db2Handle, &layoutHash, sizeof(layoutHash), &bytesRead);
if (bytesRead != sizeof(layoutHash)) // Layout hash
return false;
if (layoutHash != meta->LayoutHash)
return false;
EndianConvert(layoutHash);
CASC::ReadFile(db2Handle, &minIndex, sizeof(minIndex), &bytesRead);
if (bytesRead != sizeof(minIndex)) // MinIndex WDB2
return false;
EndianConvert(minIndex);
CASC::ReadFile(db2Handle, &maxIndex, sizeof(maxIndex), &bytesRead);
if (bytesRead != sizeof(maxIndex)) // MaxIndex WDB2
return false;
EndianConvert(maxIndex);
CASC::ReadFile(db2Handle, &localeMask, sizeof(localeMask), &bytesRead);
if (bytesRead != sizeof(localeMask)) // Locales
return false;
EndianConvert(localeMask);
CASC::ReadFile(db2Handle, &copyIdSize, sizeof(copyIdSize), &bytesRead);
if (bytesRead != sizeof(copyIdSize))
return false;
EndianConvert(copyIdSize);
CASC::ReadFile(db2Handle, &metaFlags, sizeof(metaFlags), &bytesRead);
if (bytesRead != sizeof(metaFlags))
return false;
EndianConvert(metaFlags);
ASSERT((metaFlags & 0x1) == 0);
ASSERT((meta->IndexField == -1) || (meta->IndexField == int32((metaFlags >> 16))));
fields = new FieldEntry[fieldCount];
CASC::ReadFile(db2Handle, fields, fieldCount * sizeof(FieldEntry), &bytesRead);
if (bytesRead != fieldCount * sizeof(FieldEntry))
return false;
if (!meta->HasIndexFieldInData())
idTableSize = recordCount * sizeof(std::uint32_t);
data = new unsigned char[recordSize * recordCount + stringSize];
stringTable = data + recordSize * recordCount;
CASC::ReadFile(db2Handle, data, recordSize * recordCount + stringSize, &bytesRead);
if (bytesRead != recordSize * recordCount + stringSize)
return false;
if (idTableSize)
{
idTable = new unsigned char[idTableSize];
CASC::ReadFile(db2Handle, idTable, idTableSize, &bytesRead);
if (bytesRead != idTableSize)
return false;
}
if (copyIdSize)
{
copyTable = new unsigned char[copyIdSize];
CASC::ReadFile(db2Handle, copyTable, copyIdSize, &bytesRead);
if (bytesRead != copyIdSize)
return false;
}
return true;
}
DB2FileLoader::~DB2FileLoader()
{
delete[] data;
delete[] idTable;
delete[] copyTable;
delete[] fields;
}
DB2FileLoader::Record DB2FileLoader::getRecord(size_t id)
{
assert(data);
return Record(*this, data + id * recordSize);
}
std::pair<std::uint32_t, std::uint32_t> DB2FileLoader::GetRowCopy(std::uint32_t i) const
{
std::uint32_t* copyIds = (std::uint32_t*)copyTable;
std::uint32_t to = copyIds[i];
std::uint32_t from = copyIds[i + 1];
return{ from, to };
}
std::uint32_t DB2FileLoader::GetMaxId() const
{
std::uint32_t j = maxIndex;
for (std::uint32_t i = 0; i < GetNumRowCopies(); ++i)
if (j < GetRowCopy(i).second)
j = GetRowCopy(i).second;
return j;
}

View File

@@ -1,176 +0,0 @@
/*
* Copyright (C) 2008-2017 TrinityCore <http://www.trinitycore.org/>
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*/
#ifndef MapExtractor_DB2_h__
#define MapExtractor_DB2_h__
#include "DB2Meta.h"
#include "CascHandles.h"
#include "CascLib.h"
#include "Utilities/ByteConverter.h"
#include "Errors.h"
class DB2FileLoader
{
public:
DB2FileLoader();
~DB2FileLoader();
bool Load(CASC::FileHandle const& db2Handle, DB2Meta const* meta);
class Record
{
public:
float getFloat(std::uint32_t field, std::uint32_t arrayIndex) const
{
ASSERT(field < file.fieldCount);
float val = *reinterpret_cast<float*>(offset + GetOffset(field) + arrayIndex * sizeof(float));
EndianConvert(val);
return val;
}
std::uint32_t getUInt(std::uint32_t field, std::uint32_t arrayIndex) const
{
ASSERT(field < file.fieldCount);
return GetVarInt(field, GetByteSize(field), arrayIndex);
}
std::uint8_t getUInt8(std::uint32_t field, std::uint32_t arrayIndex) const
{
ASSERT(field < file.fieldCount);
ASSERT(GetByteSize(field) == 1);
return *reinterpret_cast<std::uint8_t*>(offset + GetOffset(field) + arrayIndex * sizeof(std::uint8_t));
}
std::uint16_t getUInt16(std::uint32_t field, std::uint32_t arrayIndex) const
{
ASSERT(field < file.fieldCount);
ASSERT(GetByteSize(field) == 2);
std::uint16_t val = *reinterpret_cast<std::uint16_t*>(offset + GetOffset(field) + arrayIndex * sizeof(std::uint16_t));
EndianConvert(val);
return val;
}
char const* getString(std::uint32_t field, std::uint32_t arrayIndex) const
{
ASSERT(field < file.fieldCount);
std::uint32_t stringOffset = *reinterpret_cast<std::uint32_t*>(offset + GetOffset(field) + arrayIndex * sizeof(std::uint32_t));
EndianConvert(stringOffset);
ASSERT(stringOffset < file.stringSize);
return reinterpret_cast<char*>(file.stringTable + stringOffset);
}
private:
std::uint16_t GetOffset(std::uint32_t field) const
{
ASSERT(field < file.fieldCount);
return file.fields[field].Offset;
}
std::uint16_t GetByteSize(std::uint32_t field) const
{
ASSERT(field < file.fieldCount);
return 4 - file.fields[field].UnusedBits / 8;
}
std::uint32_t GetVarInt(std::uint32_t field, std::uint16_t size, std::uint32_t arrayIndex) const
{
ASSERT(field < file.fieldCount);
switch (size)
{
case 1:
{
return *reinterpret_cast<std::uint8_t*>(offset + GetOffset(field) + arrayIndex * sizeof(std::uint8_t));
}
case 2:
{
std::uint16_t val = *reinterpret_cast<std::uint16_t*>(offset + GetOffset(field) + arrayIndex * sizeof(std::uint16_t));
EndianConvert(val);
return val;
}
case 3:
{
#pragma pack(push, 1)
struct dbcint24 { std::uint8_t v[3]; };
#pragma pack(pop)
dbcint24 val = *reinterpret_cast<dbcint24*>(offset + GetOffset(field) + arrayIndex * sizeof(dbcint24));
EndianConvert(val);
return std::uint32_t(val.v[0]) | (std::uint32_t(val.v[1]) << 8) | (std::uint32_t(val.v[2]) << 16);
}
case 4:
{
std::uint32_t val = *reinterpret_cast<std::uint32_t*>(offset + GetOffset(field) + arrayIndex * sizeof(std::uint32_t));
EndianConvert(val);
return val;
}
default:
break;
}
ASSERT(false, "GetByteSize(field) < 4");
return 0;
}
Record(DB2FileLoader &file_, unsigned char *offset_): offset(offset_), file(file_) {}
unsigned char *offset;
DB2FileLoader &file;
friend class DB2FileLoader;
};
// Get record by id
Record getRecord(size_t id);
std::uint32_t getId(size_t row) { return ((std::uint32_t*)idTable)[row]; }
std::pair<std::uint32_t, std::uint32_t> GetRowCopy(std::uint32_t i) const;
std::uint32_t GetNumRows() const { return recordCount; }
std::uint32_t GetNumRowCopies() const { return copyIdSize / 8; }
std::uint32_t GetMaxId() const;
private:
#pragma pack(push, 1)
struct FieldEntry
{
std::uint16_t UnusedBits;
std::uint16_t Offset;
};
#pragma pack(pop)
DB2Meta const* meta;
// WDB2 / WCH2 fields
std::uint32_t recordSize;
std::uint32_t recordCount;
std::uint32_t fieldCount;
std::uint32_t stringSize;
std::uint32_t tableHash;
std::uint32_t layoutHash;
std::uint32_t minIndex;
std::uint32_t maxIndex;
std::uint32_t localeMask;
std::uint32_t copyIdSize;
std::uint32_t metaFlags;
unsigned char* data;
unsigned char* stringTable;
unsigned char* idTable;
std::uint32_t idTableSize;
unsigned char* copyTable;
FieldEntry* fields;
};
#endif // MapExtractor_DB2_h__

View File

@@ -17,7 +17,20 @@
*/
#define _CRT_SECURE_NO_DEPRECATE
#define WIN32_LEAN_AND_MEAN
#include "Banner.h"
#include "CascHandles.h"
#include "Common.h"
#include "DB2CascFileSource.h"
#include "DB2Meta.h"
#include "DBFilesClientList.h"
#include "StringFormat.h"
#include "adt.h"
#include "wdt.h"
#include <CascLib.h>
#include <boost/filesystem/path.hpp>
#include <boost/filesystem/operations.hpp>
#include <cstdio>
#include <deque>
#include <fstream>
@@ -25,46 +38,6 @@
#include <cstdlib>
#include <cstring>
#include <boost/filesystem/path.hpp>
#include <boost/filesystem/operations.hpp>
#include "Common.h"
#include "DBFilesClientList.h"
#include "CascLib.h"
#include "CascHandles.h"
#include "DB2.h"
#include "Banner.h"
#include "StringFormat.h"
#include "adt.h"
#include "wdt.h"
namespace
{
const char* HumanReadableCASCError(int error)
{
switch (error)
{
case ERROR_SUCCESS: return "SUCCESS";
case ERROR_FILE_CORRUPT: return "FILE_CORRUPT";
case ERROR_CAN_NOT_COMPLETE: return "CAN_NOT_COMPLETE";
case ERROR_HANDLE_EOF: return "HANDLE_EOF";
case ERROR_NO_MORE_FILES: return "NO_MORE_FILES";
case ERROR_BAD_FORMAT: return "BAD_FORMAT";
case ERROR_INSUFFICIENT_BUFFER: return "INSUFFICIENT_BUFFER";
case ERROR_ALREADY_EXISTS: return "ALREADY_EXISTS";
case ERROR_DISK_FULL: return "DISK_FULL";
case ERROR_INVALID_PARAMETER: return "INVALID_PARAMETER";
case ERROR_NOT_SUPPORTED: return "NOT_SUPPORTED";
case ERROR_NOT_ENOUGH_MEMORY: return "NOT_ENOUGH_MEMORY";
case ERROR_INVALID_HANDLE: return "INVALID_HANDLE";
case ERROR_ACCESS_DENIED: return "ACCESS_DENIED";
case ERROR_FILE_NOT_FOUND: return "FILE_NOT_FOUND";
default: return "UNKNOWN";
}
}
}
CASC::StorageHandle CascStorage;
typedef struct
@@ -79,36 +52,128 @@ std::set<std::string> CameraFileNames;
boost::filesystem::path input_path;
boost::filesystem::path output_path;
struct CinematicCameraMeta
struct CinematicCameraLoadInfo
{
static DB2Meta const* Instance()
static DB2FileLoadInfo const* Instance()
{
static DB2FieldMeta const fields[] =
{
{ false, FT_INT, "ID" },
{ false, FT_STRING_NOT_LOCALIZED, "Model" },
{ false, FT_FLOAT, "OriginX" },
{ false, FT_FLOAT, "OriginY" },
{ false, FT_FLOAT, "OriginZ" },
{ false, FT_FLOAT, "OriginFacing" },
{ false, FT_SHORT, "SoundID" },
};
static char const* types = "sffh";
static uint8 const arraySizes[4] = { 1, 3, 1, 1 };
static DB2Meta instance(-1, 4, 0xA7B95349, types, arraySizes);
return &instance;
static DB2Meta const meta(-1, 4, 0xA7B95349, types, arraySizes);
static DB2FileLoadInfo const loadInfo(&fields[0], std::extent<decltype(fields)>::value, &meta);
return &loadInfo;
}
};
struct LiquidTypeMeta
struct LiquidTypeLoadInfo
{
static DB2Meta const* Instance()
static DB2FileLoadInfo const* Instance()
{
static DB2FieldMeta const fields[] =
{
{ false, FT_INT, "ID" },
{ false, FT_STRING, "Name" },
{ false, FT_INT, "SpellID" },
{ false, FT_FLOAT, "MaxDarkenDepth" },
{ false, FT_FLOAT, "FogDarkenIntensity" },
{ false, FT_FLOAT, "AmbDarkenIntensity" },
{ false, FT_FLOAT, "DirDarkenIntensity" },
{ false, FT_FLOAT, "ParticleScale" },
{ false, FT_STRING_NOT_LOCALIZED, "Texture1" },
{ false, FT_STRING_NOT_LOCALIZED, "Texture2" },
{ false, FT_STRING_NOT_LOCALIZED, "Texture3" },
{ false, FT_STRING_NOT_LOCALIZED, "Texture4" },
{ false, FT_STRING_NOT_LOCALIZED, "Texture5" },
{ false, FT_STRING_NOT_LOCALIZED, "Texture6" },
{ false, FT_INT, "Color1" },
{ false, FT_INT, "Color2" },
{ false, FT_FLOAT, "Float1" },
{ false, FT_FLOAT, "Float2" },
{ false, FT_FLOAT, "Float3" },
{ false, FT_FLOAT, "Float4" },
{ false, FT_FLOAT, "Float5" },
{ false, FT_FLOAT, "Float6" },
{ false, FT_FLOAT, "Float7" },
{ false, FT_FLOAT, "Float8" },
{ false, FT_FLOAT, "Float9" },
{ false, FT_FLOAT, "Float10" },
{ false, FT_FLOAT, "Float11" },
{ false, FT_FLOAT, "Float12" },
{ false, FT_FLOAT, "Float13" },
{ false, FT_FLOAT, "Float14" },
{ false, FT_FLOAT, "Float15" },
{ false, FT_FLOAT, "Float16" },
{ false, FT_FLOAT, "Float17" },
{ false, FT_FLOAT, "Float18" },
{ false, FT_INT, "Int1" },
{ false, FT_INT, "Int2" },
{ false, FT_INT, "Int3" },
{ false, FT_INT, "Int4" },
{ false, FT_SHORT, "Flags" },
{ false, FT_SHORT, "LightID" },
{ false, FT_BYTE, "Type" },
{ false, FT_BYTE, "ParticleMovement" },
{ false, FT_BYTE, "ParticleTexSlots" },
{ false, FT_BYTE, "MaterialID" },
{ false, FT_BYTE, "DepthTexCount1" },
{ false, FT_BYTE, "DepthTexCount2" },
{ false, FT_BYTE, "DepthTexCount3" },
{ false, FT_BYTE, "DepthTexCount4" },
{ false, FT_BYTE, "DepthTexCount5" },
{ false, FT_BYTE, "DepthTexCount6" },
{ false, FT_INT, "SoundID" },
};
static char const* types = "sifffffsifihhbbbbbi";
static uint8 const arraySizes[19] = { 1, 1, 1, 1, 1, 1, 1, 6, 2, 18, 4, 1, 1, 1, 1, 1, 1, 6, 1 };
static DB2Meta instance(-1, 19, 0x99FC34E5, types, arraySizes);
return &instance;
static DB2Meta const meta(-1, 19, 0x99FC34E5, types, arraySizes);
static DB2FileLoadInfo const loadInfo(&fields[0], std::extent<decltype(fields)>::value, &meta);
return &loadInfo;
}
};
struct MapMeta
struct MapLoadInfo
{
static DB2Meta const* Instance()
static DB2FileLoadInfo const* Instance()
{
static DB2FieldMeta const fields[] =
{
{ false, FT_INT, "ID" },
{ false, FT_STRING_NOT_LOCALIZED, "Directory" },
{ false, FT_INT, "Flags1" },
{ false, FT_INT, "Flags2" },
{ false, FT_FLOAT, "MinimapIconScale" },
{ false, FT_FLOAT, "CorpsePosX" },
{ false, FT_FLOAT, "CorpsePosY" },
{ false, FT_STRING, "MapName" },
{ false, FT_STRING, "MapDescription0" },
{ false, FT_STRING, "MapDescription1" },
{ false, FT_SHORT, "AreaTableID" },
{ false, FT_SHORT, "LoadingScreenID" },
{ true, FT_SHORT, "CorpseMapID" },
{ false, FT_SHORT, "TimeOfDayOverride" },
{ true, FT_SHORT, "ParentMapID" },
{ true, FT_SHORT, "CosmeticParentMapID" },
{ false, FT_SHORT, "WindSettingsID" },
{ false, FT_BYTE, "InstanceType" },
{ false, FT_BYTE, "unk5" },
{ false, FT_BYTE, "ExpansionID" },
{ false, FT_BYTE, "MaxPlayers" },
{ false, FT_BYTE, "TimeOffset" },
};
static char const* types = "siffssshhhhhhhbbbbb";
static uint8 const arraySizes[19] = { 1, 2, 1, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1 };
static DB2Meta instance(-1, 19, 0xF7CF2DA2, types, arraySizes);
return &instance;
static DB2Meta const meta(-1, 19, 0xF7CF2DA2, types, arraySizes);
static DB2FileLoadInfo const loadInfo(&fields[0], std::extent<decltype(fields)>::value, &meta);
return &loadInfo;
}
};
@@ -306,29 +371,22 @@ void ReadMapDBC()
{
printf("Read Map.db2 file...\n");
CASC::FileHandle dbcFile = CASC::OpenFile(CascStorage, "DBFilesClient\\Map.db2", CASC_LOCALE_NONE, true);
if (!dbcFile)
{
exit(1);
}
DB2CascFileSource source(CascStorage, "DBFilesClient\\Map.db2");
DB2FileLoader db2;
if (!db2.Load(dbcFile, MapMeta::Instance()))
if (!db2.Load(&source, MapLoadInfo::Instance()))
{
printf("Fatal error: Invalid Map.db2 file format! %s\n", HumanReadableCASCError(GetLastError()));
printf("Fatal error: Invalid Map.db2 file format! %s\n", CASC::HumanReadableCASCError(GetLastError()));
exit(1);
}
map_ids.resize(db2.GetNumRows());
map_ids.resize(db2.GetRecordCount());
std::unordered_map<uint32, uint32> idToIndex;
for (uint32 x = 0; x < db2.GetNumRows(); ++x)
for (uint32 x = 0; x < db2.GetRecordCount(); ++x)
{
if (MapMeta::Instance()->HasIndexFieldInData())
map_ids[x].id = db2.getRecord(x).getUInt(MapMeta::Instance()->GetIndexField(), 0);
else
map_ids[x].id = db2.getId(x);
DB2Record record = db2.GetRecord(x);
map_ids[x].id = record.GetId();
const char* map_name = db2.getRecord(x).getString(0, 0);
const char* map_name = record.GetString(0, 0);
size_t max_map_name_length = sizeof(map_ids[x].name);
if (strlen(map_name) >= max_map_name_length)
{
@@ -341,15 +399,14 @@ void ReadMapDBC()
idToIndex[map_ids[x].id] = x;
}
for (uint32 x = 0; x < db2.GetNumRowCopies(); ++x)
for (uint32 x = 0; x < db2.GetRecordCopyCount(); ++x)
{
uint32 from = db2.GetRowCopy(x).first;
uint32 to = db2.GetRowCopy(x).second;
auto itr = idToIndex.find(from);
DB2RecordCopy copy = db2.GetRecordCopy(x);
auto itr = idToIndex.find(copy.SourceRowId);
if (itr != idToIndex.end())
{
map_id id;
id.id = to;
id.id = copy.NewRowId;
strcpy(id.name, map_ids[itr->second].name);
map_ids.push_back(id);
}
@@ -361,14 +418,10 @@ void ReadMapDBC()
void ReadLiquidTypeTableDBC()
{
printf("Read LiquidType.db2 file...\n");
CASC::FileHandle dbcFile = CASC::OpenFile(CascStorage, "DBFilesClient\\LiquidType.db2", CASC_LOCALE_NONE, true);
if (!dbcFile)
{
exit(1);
}
DB2CascFileSource source(CascStorage, "DBFilesClient\\LiquidType.db2");
DB2FileLoader db2;
if (!db2.Load(dbcFile, LiquidTypeMeta::Instance()))
if (!db2.Load(&source, LiquidTypeLoadInfo::Instance()))
{
printf("Fatal error: Invalid LiquidType.db2 file format!\n");
exit(1);
@@ -376,19 +429,14 @@ void ReadLiquidTypeTableDBC()
LiqType.resize(db2.GetMaxId() + 1, 0xFFFF);
for (uint32 x = 0; x < db2.GetNumRows(); ++x)
for (uint32 x = 0; x < db2.GetRecordCount(); ++x)
{
uint32 liquidTypeId;
if (LiquidTypeMeta::Instance()->HasIndexFieldInData())
liquidTypeId = db2.getRecord(x).getUInt(LiquidTypeMeta::Instance()->GetIndexField(), 0);
else
liquidTypeId = db2.getId(x);
LiqType[liquidTypeId] = db2.getRecord(x).getUInt8(13, 0);
DB2Record record = db2.GetRecord(x);
LiqType[record.GetId()] = record.GetUInt8(13, 0);
}
for (uint32 x = 0; x < db2.GetNumRowCopies(); ++x)
LiqType[db2.GetRowCopy(x).second] = LiqType[db2.GetRowCopy(x).first];
for (uint32 x = 0; x < db2.GetRecordCopyCount(); ++x)
LiqType[db2.GetRecordCopy(x).NewRowId] = LiqType[db2.GetRecordCopy(x).SourceRowId];
printf("Done! (" SZFMTD " LiqTypes loaded)\n", LiqType.size());
}
@@ -397,24 +445,19 @@ bool ReadCinematicCameraDBC()
{
printf("Read CinematicCamera.db2 file...\n");
CASC::FileHandle dbcFile = CASC::OpenFile(CascStorage, "DBFilesClient\\CinematicCamera.db2", CASC_LOCALE_NONE, true);
if (!dbcFile)
{
printf("Unable to open CinematicCamera.db2. Camera extract aborted.\n");
return false;
}
DB2CascFileSource source(CascStorage, "DBFilesClient\\CinematicCamera.db2");
DB2FileLoader db2;
if (!db2.Load(dbcFile, CinematicCameraMeta::Instance()))
if (!db2.Load(&source, CinematicCameraLoadInfo::Instance()))
{
printf("Invalid CinematicCamera.db2 file format. Camera extract aborted. %s\n", HumanReadableCASCError(GetLastError()));
printf("Invalid CinematicCamera.db2 file format. Camera extract aborted. %s\n", CASC::HumanReadableCASCError(GetLastError()));
return false;
}
// get camera file list from DB2
for (size_t i = 0; i < db2.GetNumRows(); ++i)
for (size_t i = 0; i < db2.GetRecordCount(); ++i)
{
std::string camFile(db2.getRecord(i).getString(0, 0));
DB2Record record = db2.GetRecord(i);
std::string camFile(record.GetString(0, 0));
size_t loc = camFile.find(".mdx");
if (loc != std::string::npos)
camFile.replace(loc, 4, ".m2");
@@ -1089,23 +1132,6 @@ bool ConvertADT(std::string const& inputPath, std::string const& outputPath, int
return true;
}
void ExtractWmos(ChunkedFile& file, std::set<std::string>& wmoList)
{
if (FileChunk* chunk = file.GetChunk("MWMO"))
{
file_MWMO* wmo = chunk->As<file_MWMO>();
if (wmo->size)
{
char* fileName = wmo->FileList;
while (fileName < wmo->FileList + wmo->size)
{
wmoList.insert(fileName);
fileName += strlen(fileName) + 1;
}
}
}
}
void ExtractMaps(uint32 build)
{
std::string storagePath;
@@ -1119,8 +1145,6 @@ void ExtractMaps(uint32 build)
CreateDir(output_path / "maps");
std::set<std::string> wmoList;
printf("Convert map files\n");
for (std::size_t z = 0; z < map_ids.size(); ++z)
{
@@ -1131,8 +1155,6 @@ void ExtractMaps(uint32 build)
if (!wdt.loadFile(CascStorage, storagePath, false))
continue;
ExtractWmos(wdt, wmoList);
FileChunk* chunk = wdt.GetChunk("MAIN");
for (uint32 y = 0; y < WDT_MAP_SIZE; ++y)
{
@@ -1144,11 +1166,6 @@ void ExtractMaps(uint32 build)
storagePath = Trinity::StringFormat("World\\Maps\\%s\\%s_%u_%u.adt", map_ids[z].name, map_ids[z].name, x, y);
outputFileName = Trinity::StringFormat("%s/maps/%04u_%02u_%02u.map", output_path.string().c_str(), map_ids[z].id, y, x);
ConvertADT(storagePath, outputFileName, y, x, build);
storagePath = Trinity::StringFormat("World\\Maps\\%s\\%s_%u_%u_obj0.adt", map_ids[z].name, map_ids[z].name, x, y);
ChunkedFile adtObj;
if (adtObj.loadFile(CascStorage, storagePath, false))
ExtractWmos(adtObj, wmoList);
}
// draw progress bar
@@ -1156,17 +1173,6 @@ void ExtractMaps(uint32 build)
}
}
if (!wmoList.empty())
{
if (FILE* wmoListFile = fopen("wmo_list.txt", "w"))
{
for (std::string const& wmo : wmoList)
fprintf(wmoListFile, "%s\n", wmo.c_str());
fclose(wmoListFile);
}
}
printf("\n");
}
@@ -1226,7 +1232,7 @@ void ExtractDBFilesClient(int l)
++count;
}
else
printf("Unable to open file %s in the archive for locale %s: %s\n", fileName, localeNames[l], HumanReadableCASCError(GetLastError()));
printf("Unable to open file %s in the archive for locale %s: %s\n", fileName, localeNames[l], CASC::HumanReadableCASCError(GetLastError()));
fileName = DBFilesClientList[++index];
}
@@ -1260,7 +1266,7 @@ void ExtractCameraFiles()
++count;
}
else
printf("Unable to open file %s in the archive: %s\n", cameraFileName.c_str(), HumanReadableCASCError(GetLastError()));
printf("Unable to open file %s in the archive: %s\n", cameraFileName.c_str(), CASC::HumanReadableCASCError(GetLastError()));
}
printf("Extracted %u camera files\n", count);
@@ -1326,7 +1332,7 @@ void ExtractGameTables()
++count;
}
else
printf("Unable to open file %s in the archive: %s\n", fileName, HumanReadableCASCError(GetLastError()));
printf("Unable to open file %s in the archive: %s\n", fileName, CASC::HumanReadableCASCError(GetLastError()));
fileName = GameTables[++index];
}

View File

@@ -19,6 +19,7 @@
#define _CRT_SECURE_NO_DEPRECATE
#include "loadlib.h"
#include <CascLib.h>
u_map_fcc MverMagic = { { 'R','E','V','M' } };
@@ -40,9 +41,16 @@ bool ChunkedFile::loadFile(CASC::StorageHandle const& mpq, std::string const& fi
if (!file)
return false;
data_size = CASC::GetFileSize(file, nullptr);
DWORD fileSize = CASC::GetFileSize(file, nullptr);
if (fileSize == CASC_INVALID_SIZE)
return false;
data_size = fileSize;
data = new uint8[data_size];
CASC::ReadFile(file, data, data_size, nullptr/*bytesRead*/);
DWORD bytesRead = 0;
if (!CASC::ReadFile(file, data, data_size, &bytesRead) || bytesRead != data_size)
return false;
parseChunks();
if (prepareLoadedData())
return true;

View File

@@ -21,7 +21,6 @@
#include "Define.h"
#include "CascHandles.h"
#include "CascLib.h"
#include <map>
#include <string>