Buildsystem: Overhaul of the CMake buildsystem:

- utilize CMake for completely generating revision.h (kills off genrev and revision.h targets):
  - pull and set correct revision-ID/hash from Mercurial (hg) when using regular repository sourcetree
  - pull and set correct revision-ID/hash from archived releases (when pulled from googlecode)
  - set and use _BUILD_DIRECTIVE definition (earlier part of revision.h) as compiletime definition
- delete genrev and related buildsystems
  (now deprecated)
- Move some files around to adhere to buildsystem structure
Thanks to Shauren for figuring out the definition-behaviour for MSVC while he was in the shower

--HG--
branch : trunk
rename : cmake_uninstall.cmake.in => cmake/platform/unix/cmake_uninstall.in.cmake
This commit is contained in:
click
2010-09-29 23:42:09 +02:00
parent af660f80ec
commit b87d8f4700
15 changed files with 83 additions and 614 deletions

View File

@@ -13,7 +13,7 @@ project(TrinityCore)
# CMake policies (can not be handled elsewhere)
cmake_minimum_required(VERSION 2.6)
cmake_policy(SET CMP0005 OLD)
cmake_policy(SET CMP0005 NEW)
# Set RPATH-handing (CMake parameters)
set(CMAKE_SKIP_BUILD_RPATH 0)
@@ -57,19 +57,51 @@ if( UNIX )
find_package(BZip2)
endif()
# Find current revision of downloaded sourcetree
execute_process(
COMMAND hg tip --template {rev}
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
OUTPUT_VARIABLE HG_REVISION
if(EXISTS ${CMAKE_SOURCE_DIR}/.hg_archival.txt)
set(hg_rev_id_str "Archive")
file(READ
${CMAKE_SOURCE_DIR}/.hg_archival.txt hg_rev_hash_str
LIMIT 10
OFFSET 7
NEWLINE_CONSUME
)
string(STRIP ${hg_rev_id_str} hg_rev_id_str)
string(STRIP ${hg_rev_hash_str} hg_rev_hash_str)
set(hg_rev_id "0")
set(hg_rev_hash ${hg_rev_hash_str})
else()
# Find revision ID and hash of the sourcetree
execute_process(
COMMAND hg id -n
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
OUTPUT_VARIABLE hg_rev_id_str
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET
)
execute_process(
COMMAND hg id -i
WORKING_DIRECTORY "${CMAKE_SOURCE_DIR}"
OUTPUT_VARIABLE hg_rev_hash_str
OUTPUT_STRIP_TRAILING_WHITESPACE
ERROR_QUIET
)
# Strip off excess strings (shows when the source is actually modified)
string(REPLACE "+" "" hg_rev_id ${hg_rev_id_str})
string(REPLACE "+" "" hg_rev_hash ${hg_rev_hash_str})
endif()
# Create the actual revision.h file from the above params
configure_file(
"${CMAKE_SOURCE_DIR}/revision.h.in.cmake"
"${CMAKE_BINARY_DIR}/revision.h"
@ONLY
)
message(STATUS "Created revision.h")
# print out the results before continuing
include(cmake/showoptions.cmake)
# add and generate revision.h
add_subdirectory(src/genrevision)
# add dependencies
add_subdirectory(dep)

View File

@@ -1,3 +1,11 @@
# build in Release-mode by default if not explicitly set
if( NOT CMAKE_BUILD_TYPE )
set(CMAKE_BUILD_TYPE "Release")
endif()
# Set build-directive (used in core to tell which buildtype we used)
add_definitions(-D_BUILD_DIRECTIVE=${CMAKE_BUILD_TYPE})
add_definitions(-fno-delete-null-pointer-checks)
if( USE_SFMT)
@@ -6,18 +14,18 @@ if( USE_SFMT)
add_definitions(-msse2 -mfpmath=sse)
endif()
add_definitions(-DHAVE_SSE2 -D__SSE2__)
message(STATUS "- GCC: SFMT enabled, SSE2 flags forced")
message(STATUS "GCC: SFMT enabled, SSE2 flags forced")
endif()
if( WITH_WARNINGS )
add_definitions(-Wall -Wfatal-errors -Wextra)
message(STATUS "- GCC: All warnings enabled")
message(STATUS "GCC: All warnings enabled")
else()
add_definitions(--no-warnings)
message(STATUS "- GCC: All warnings disabled")
message(STATUS "GCC: All warnings disabled")
endif()
if( WITH_COREDEBUG )
add_definitions(-ggdb3)
message(STATUS "- GCC: Debug-flags set (-ggdb3)")
message(STATUS "GCC: Debug-flags set (-ggdb3)")
endif()

View File

@@ -10,46 +10,49 @@ if(PLATFORM EQUAL 64)
# here: http://tinyurl.com/2cb428. Syntax highlighting is important for proper
# debugger functionality.
add_definitions("-D_WIN64")
message(STATUS "- MSVC: 64-bit platform, enforced -D_WIN64 parameter")
message(STATUS "MSVC: 64-bit platform, enforced -D_WIN64 parameter")
#Enable extended object support for debug compiles on X64 (not required on X86)
set(CMAKE_CXX_FLAGS_DEBUG "${CMAKE_CXX_FLAGS_DEBUG} /bigobj")
message(STATUS "- MSVC: Enabled extended object-support for debug-compiles")
message(STATUS "MSVC: Enabled extended object-support for debug-compiles")
else()
# mark 32 bit executables large address aware so they can use > 2GB address space
set(CMAKE_EXE_LINKER_FLAGS "${CMAKE_EXE_LINKER_FLAGS} /LARGEADDRESSAWARE")
message(STATUS "- MSVC: Enabled large address awareness")
message(STATUS "MSVC: Enabled large address awareness")
# Test if we need SSE2-support
if(USE_SFMT)
add_definitions(/arch:SSE2)
message(STATUS "- MSVC: Enabled SSE2 support")
message(STATUS "MSVC: Enabled SSE2 support")
endif()
endif()
# Set build-directive (used in core to tell which buildtype we used)
add_definitions(-D_BUILD_DIRECTIVE="$(ConfigurationName)")
# multithreaded compiling on VS
if((NOT USE_COREPCH) AND (NOT USE_SCRIPTPCH))
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /MP")
message(STATUS "- MSVC: PCH not used - enabled multithreaded compiling")
message(STATUS "MSVC: PCH not used - enabled multithreaded compiling")
endif()
# Define _CRT_SECURE_CPP_OVERLOAD_STANDARD_NAMES - eliminates the warning by changing the strcpy call to strcpy_s, which prevents buffer overruns
add_definitions(-D_CRT_SECURE_CPP_OVERLOAD_STANDARD_NAMES)
message(STATUS "- MSVC: Overloaded standard names")
message(STATUS "MSVC: Overloaded standard names")
# Ignore warnings about older, less secure functions
add_definitions(-D_CRT_SECURE_NO_WARNINGS)
message(STATUS "- MSVC: Disabled NON-SECURE warnings")
message(STATUS "MSVC: Disabled NON-SECURE warnings")
#Ignore warnings about POSIX deprecation
add_definitions(-D_CRT_NONSTDC_NO_WARNINGS)
message(STATUS "- MSVC: Disabled POSIX warnings")
message(STATUS "MSVC: Disabled POSIX warnings")
# disable warnings in Visual Studio 8 and above if not wanted
if(NOT WITH_WARNINGS)
if(MSVC AND NOT CMAKE_GENERATOR MATCHES "Visual Studio 7")
set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} /wd4996 /wd4355 /wd4244 /wd4985 /wd4267 /wd4619")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} /wd4996 /wd4355 /wd4244 /wd4985 /wd4267 /wd4619")
message(STATUS "- MSVC: Disabled generic compiletime warnings")
message(STATUS "MSVC: Disabled generic compiletime warnings")
endif()
endif()

View File

@@ -0,0 +1,3 @@
# Set build-directive (used in core to tell which buildtype we used)
add_definitions(-D_BUILD_DIRECTIVE="\"$(CONFIGURATION)"\")

View File

@@ -12,9 +12,9 @@ endif()
# configure uninstaller
configure_file(
"${CMAKE_CURRENT_SOURCE_DIR}/cmake_uninstall.cmake.in"
"${CMAKE_CURRENT_BINARY_DIR}/cmake_uninstall.cmake"
IMMEDIATE @ONLY
"${CMAKE_SOURCE_DIR}/cmake/platform/unix/cmake_uninstall.in.cmake"
"${CMAKE_BINARY_DIR}/cmake_uninstall.cmake"
@ONLY
)
message(STATUS "UNIX: Configuring uninstall target")

View File

@@ -1,7 +1,7 @@
# output generic information about the core and buildtype chosen
message("")
message("* TrinityCore revision : ${HG_REVISION}")
message("* TrinityCore revision : ${hg_rev_id_str} (${hg_rev_hash_str})")
if( UNIX )
message("* Build binaries in : ${CMAKE_BUILD_TYPE} mode")
endif()

9
revision.h.in.cmake Normal file
View File

@@ -0,0 +1,9 @@
#ifndef __REVISION_H__
#define __REVISION_H__
#define _REVISION "@hg_rev_id_str@"
#define _HASH "@hg_rev_hash_str@"
#define FILEVER 0,0,@hg_rev_id@,0
#define PRODUCTVER 0,0,@hg_rev_id@,0
#define STRFILEVER "0, 0, @hg_rev_id@, @hg_rev_hash@"
#define STRPRODUCTVER "0, 0, @hg_rev_id@, @hg_rev_hash@"
#endif // __REVISION_H__

View File

@@ -1,37 +0,0 @@
# Copyright (C) 2008-2010 Trinity <http://www.trinitycore.org/>
#
# This file is free software; as a special exception the author gives
# unlimited permission to copy and/or distribute it, with or without
# modifications, as long as this notice is preserved.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY, to the extent permitted by law; without even the
# implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
set(GENREV_SRC
genrevision.cpp
)
add_executable(genrev
${GENREV_SRC}
)
if( CMAKE_GENERATOR MATCHES "Visual Studio" )
add_custom_target(revision.h ALL
COMMAND ${CMAKE_BINARY_DIR}/bin/$(ConfigurationName)/genrev -m $(ConfigurationName) ${CMAKE_SOURCE_DIR}
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
DEPENDS genrev
)
elseif( CMAKE_GENERATOR MATCHES "Xcode" )
add_custom_target(revision.h ALL
COMMAND ${CMAKE_BINARY_DIR}/src/genrevision/$(CONFIGURATION)/genrev -m ${CMAKE_BUILD_TYPE} ${CMAKE_SOURCE_DIR}
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
DEPENDS genrev
)
else()
add_custom_target(revision.h ALL
COMMAND ${CMAKE_BINARY_DIR}/src/genrevision/genrev -m ${CMAKE_BUILD_TYPE} ${CMAKE_SOURCE_DIR}
WORKING_DIRECTORY ${CMAKE_BINARY_DIR}
DEPENDS genrev
)
endif()

View File

@@ -1,539 +0,0 @@
/*
* Copyright (C) 2005-2009 MaNGOS <http://getmangos.com/>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*/
#include <fstream>
#include <sstream>
#include <time.h>
#include <stdio.h>
#include <string.h>
std::string build_directive = "Unknown";
struct RawData
{
char hash_str[200];
char rev_str[200];
char date_str[200];
char time_str[200];
};
void extractDataFromSvn(FILE* EntriesFile, std::string /*path*/, bool url, RawData& data)
{
char aux[800];
char buf[200];
char repo_str[200];
char num_str[200];
if (fgets(aux, 600, EntriesFile) && fgets(buf, 200, EntriesFile))
{
sscanf(buf, "%s", num_str);
if (fgets(buf, 200, EntriesFile))
{
sscanf(buf, "%s", repo_str);
if (fgets(aux, 800, EntriesFile) && fgets(buf, 200, EntriesFile))
{
sscanf(buf, "%10sT%8s", data.date_str, data.time_str);
if (url)
sprintf(data.rev_str,"%s at %s",num_str,repo_str);
else
strcpy(data.rev_str,num_str);
}
}
}
}
void extractDataFromHG(FILE* EntriesFile, std::string /*path*/, bool /*url*/, RawData& data)
{
char buf[200];
char hash_str[200];
char revision_str[200];
bool found = false;
while (fgets(buf,200,EntriesFile))
{
if (sscanf(buf,"%s %s",hash_str,revision_str)==2)
{
found = true;
break;
}
}
if (!found)
{
strcpy(data.hash_str,"*");
strcpy(data.rev_str,"*");
strcpy(data.date_str,"*");
strcpy(data.time_str,"*");
return;
}
char thash_str[200];
for (int i = 11; i >= 0; --i)
{
thash_str[i] = hash_str[i];
}
thash_str[12] = '\0';
strcpy(data.hash_str,thash_str);
strcpy(data.rev_str,revision_str);
strcpy(data.date_str,"*");
strcpy(data.time_str,"*");
}
void extractDataFromArchive(FILE* EntriesFile, std::string /*path*/, bool /*url*/, RawData& data)
{
char buf[200];
char hash_str[200];
//char revision_str[200];
char repo_str[200];
char branch_str[200];
char latesttag_str[200];
char latesttagdistance_str[200];
bool error = true;
if (fgets(buf,200,EntriesFile))
{
sscanf(buf,"repo: %s",repo_str);
if (fgets(buf,200,EntriesFile))
{
sscanf(buf,"node: %s",hash_str);
if (fgets(buf,200,EntriesFile))
{
sscanf(buf,"branch: %s",branch_str);
if (fgets(buf,200,EntriesFile))
{
sscanf(buf,"latesttag: %[^\n]",latesttag_str);
if (fgets(buf,200,EntriesFile))
{
sscanf(buf,"latesttagdistance: %s",latesttagdistance_str);
error = false;
}
}
}
}
}
if (!error)
{
char thash_str[200];
for (int i = 11; i >= 0; --i)
thash_str[i] = hash_str[i];
thash_str[12] = '\0';
strcpy(data.hash_str,thash_str);
strcpy(data.rev_str,"Archive");
}
else
{
strcpy(data.hash_str,"*");
strcpy(data.rev_str,"*");
}
strcpy(data.date_str,"*");
strcpy(data.time_str,"*");
}
void extractDataFromGit(FILE* EntriesFile, std::string path, bool url, RawData& data)
{
char buf[200];
char hash_str[200];
char branch_str[200];
char url_str[200];
bool found = false;
while (fgets(buf,200,EntriesFile))
{
if (sscanf(buf,"%s\t\tbranch %s of %s",hash_str,branch_str,url_str)==3)
{
found = true;
break;
}
}
if (!found)
{
strcpy(data.hash_str,"*");
strcpy(data.rev_str,"*");
strcpy(data.date_str,"*");
strcpy(data.time_str,"*");
return;
}
if (url)
{
char* host_str = NULL;
char* acc_str = NULL;
char* repo_str = NULL;
// parse URL like git@github.com:mangos/mangos
char url_buf[200];
int res = sscanf(url_str,"git@%s",url_buf);
if (res)
{
host_str = strtok(url_buf,":");
acc_str = strtok(NULL,"/");
repo_str = strtok(NULL," ");
}
else
{
res = sscanf(url_str,"git://%s",url_buf);
if (res)
{
host_str = strtok(url_buf,"/");
acc_str = strtok(NULL,"/");
repo_str = strtok(NULL,".");
}
}
// can generate nice link
if (res)
sprintf(data.rev_str,"http://%s/%s/%s/commit/%s",host_str,acc_str,repo_str,hash_str);
// unknonw URL format, use as-is
else
sprintf(data.rev_str,"%s at %s",hash_str,url_str);
}
else
strcpy(data.rev_str,hash_str);
strcpy(data.hash_str,"*");
time_t rev_time = 0;
// extracting date/time
FILE* LogFile = fopen((path+".git/logs/HEAD").c_str(), "r");
if (LogFile)
{
while (fgets(buf,200,LogFile))
{
char buf2[200];
char new_hash[200];
int unix_time = 0;
int res2 = sscanf(buf,"%s %s %s %s %i",buf2,new_hash,buf2,buf2,&unix_time);
if (res2!=5)
continue;
if (strcmp(hash_str,new_hash))
continue;
rev_time = unix_time;
break;
}
fclose(LogFile);
if (rev_time)
{
tm* aTm = localtime(&rev_time);
// YYYY year
// MM month (2 digits 01-12)
// DD day (2 digits 01-31)
// HH hour (2 digits 00-23)
// MM minutes (2 digits 00-59)
// SS seconds (2 digits 00-59)
sprintf(data.date_str,"%04d-%02d-%02d",aTm->tm_year+1900,aTm->tm_mon+1,aTm->tm_mday);
sprintf(data.time_str,"%02d:%02d:%02d",aTm->tm_hour,aTm->tm_min,aTm->tm_sec);
}
else
{
strcpy(data.date_str,"*");
strcpy(data.time_str,"*");
}
}
else
{
strcpy(data.date_str,"*");
strcpy(data.time_str,"*");
}
}
bool extractDataFromGit(std::string filename, std::string path, bool url, RawData& data)
{
FILE* EntriesFile = fopen(filename.c_str(), "r");
if (!EntriesFile)
return false;
extractDataFromGit(EntriesFile,path,url,data);
fclose(EntriesFile);
return true;
}
bool extractDataFromHG(std::string filename, std::string path, bool url, RawData& data)
{
FILE* EntriesFile = fopen(filename.c_str(), "r");
if (!EntriesFile)
return false;
extractDataFromHG(EntriesFile,path,url,data);
fclose(EntriesFile);
return true;
}
bool extractDataFromArchive(std::string filename, std::string path, bool url, RawData& data)
{
FILE* EntriesFile = fopen(filename.c_str(), "r");
if (!EntriesFile)
return false;
extractDataFromArchive(EntriesFile,path,url,data);
fclose(EntriesFile);
return true;
}
bool extractDataFromSvn(std::string filename, std::string path, bool url, RawData& data)
{
FILE* EntriesFile = fopen(filename.c_str(), "r");
if (!EntriesFile)
return false;
extractDataFromSvn(EntriesFile,path,url,data);
fclose(EntriesFile);
return true;
}
std::string generateHeader(char const* rev_str, char const* date_str, char const* time_str, char const* hash_str)
{
std::ostringstream newData;
newData << "#ifndef __REVISION_H__" << std::endl;
newData << "#define __REVISION_H__" << std::endl;
newData << " #define _BUILD_DIRECTIVE \"" << build_directive << "\"" << std::endl;
newData << " #define _REVISION \"" << rev_str << "\"" << std::endl;
newData << " #define _HASH \"" << hash_str << "\"" << std::endl;
newData << " #define _REVISION_DATE \"" << date_str << "\"" << std::endl;
newData << " #define _REVISION_TIME \"" << time_str << "\""<< std::endl;
if (!strcmp(rev_str,"Archive") || !strcmp(rev_str,"*"))
{
newData << " #define FILEVER 0,0,0,0"<< std::endl;
newData << " #define PRODUCTVER 0,0,0,0"<< std::endl;
}
else
{
newData << " #define FILEVER 0,0," << rev_str << ",0"<< std::endl;
newData << " #define PRODUCTVER 0,0," << rev_str << ",0"<< std::endl;
}
newData << " #define STRFILEVER \"0, 0, " << rev_str << ", " << hash_str << "\""<< std::endl;
newData << " #define STRPRODUCTVER \"0, 0, " << rev_str << ", " << hash_str << "\""<< std::endl;
newData << "#endif // __REVISION_H__" << std::endl;
return newData.str();
}
int main(int argc, char **argv)
{
bool use_url = false;
bool file_prefered = true;
bool hg_prefered = false;
bool git_prefered = false;
bool svn_prefered = false;
std::string path;
// Call: tool {options} [path]
// -f use cmake generated file (default)
// -h use hg prefered
// -g use git prefered
// -s use svn prefered
// -r use only revision (without repo URL) (default)
// -u include repositire URL as commit URL or "rev at URL"
// -m build mode string
for (int k = 1; k <= argc; ++k)
{
if (!argv[k] || !*argv[k])
break;
if (argv[k][0]!='-')
{
path = argv[k];
if (path.size() > 0 && (path[path.size()-1]!='/' || path[path.size()-1]!='\\'))
path += '/';
break;
}
switch(argv[k][1])
{
case 'f':
file_prefered = true;
hg_prefered = false;
git_prefered = false;
svn_prefered = false;
continue;
case 'h':
file_prefered = false;
hg_prefered = true;
git_prefered = false;
svn_prefered = false;
continue;
case 'g':
file_prefered = false;
hg_prefered = false;
git_prefered = true;
svn_prefered = false;
continue;
case 'r':
use_url = false;
continue;
case 's':
file_prefered = false;
hg_prefered = false;
git_prefered = false;
svn_prefered = true;
continue;
case 'u':
use_url = true;
continue;
case 'm':
build_directive = argv[++k];
continue;
default:
printf("Unknown option %s",argv[k]);
return 1;
}
}
/// new data extraction
std::string newData;
{
RawData data;
bool res = false;
if (svn_prefered)
{
/// SVN data
res = extractDataFromSvn(path+".svn/entries",path,use_url,data);
if (!res)
res = extractDataFromSvn(path+"_svn/entries",path,use_url,data);
// HG data
if (!res)
res = extractDataFromHG(path+".hg/branchheads.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+"_hg/branchheads.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+".hg/branch.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+"_hg/branch.cache",path,use_url,data);
// GIT data
if (!res)
res = extractDataFromGit(path+".git/FETCH_HEAD",path,use_url,data);
if (!res)
res = extractDataFromGit(path+"_git/FETCH_HEAD",path,use_url,data);
}
else if (git_prefered)
{
// GIT data
res = extractDataFromGit(path+".git/FETCH_HEAD",path,use_url,data);
if (!res)
res = extractDataFromGit(path+"_git/FETCH_HEAD",path,use_url,data);
// HG data
if (!res)
res = extractDataFromHG(path+".hg/branchheads.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+"_hg/branchheads.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+".hg/branch.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+"_hg/branch.cache",path,use_url,data);
/// SVN data
if (!res)
res = extractDataFromSvn(path+".svn/entries",path,use_url,data);
if (!res)
res = extractDataFromSvn(path+"_svn/entries",path,use_url,data);
}
else if (hg_prefered)
{
// HG data
res = extractDataFromHG(path+".hg/branchheads.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+"_hg/branchheads.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+".hg/branch.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+"_hg/branch.cache",path,use_url,data);
/// SVN data
if (!res)
res = extractDataFromSvn(path+".svn/entries",path,use_url,data);
if (!res)
res = extractDataFromSvn(path+"_svn/entries",path,use_url,data);
// GIT data
if (!res)
res = extractDataFromGit(path+".git/FETCH_HEAD",path,use_url,data);
if (!res)
res = extractDataFromGit(path+"_git/FETCH_HEAD",path,use_url,data);
}
else if (file_prefered)
{
res = extractDataFromHG("hg_revision","/",use_url,data);
// HG data
if (!res)
res = extractDataFromHG(path+".hg/branchheads.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+"_hg/branchheads.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+".hg/branch.cache",path,use_url,data);
if (!res)
res = extractDataFromHG(path+"_hg/branch.cache",path,use_url,data);
/// SVN data
if (!res)
res = extractDataFromSvn(path+".svn/entries",path,use_url,data);
if (!res)
res = extractDataFromSvn(path+"_svn/entries",path,use_url,data);
// GIT data
if (!res)
res = extractDataFromGit(path+".git/FETCH_HEAD",path,use_url,data);
if (!res)
res = extractDataFromGit(path+"_git/FETCH_HEAD",path,use_url,data);
}
if (!res)
res = extractDataFromArchive(path+".hg_archival.txt",path,use_url,data);
if (!res)
res = extractDataFromArchive(path+"_hg_archival.txt",path,use_url,data);
if (res)
newData = generateHeader(data.rev_str,data.date_str,data.time_str,data.hash_str);
else
newData = generateHeader("*", "*", "*", "*");
}
/// get existed header data for compare
std::string oldData;
if (FILE* HeaderFile = fopen("revision.h","rb"))
{
while (!feof(HeaderFile))
{
int c = fgetc(HeaderFile);
if (c < 0)
break;
oldData += (char)c;
}
fclose(HeaderFile);
}
/// update header only if different data
if (newData != oldData)
{
if (FILE* OutputFile = fopen("revision.h","wb"))
{
fprintf(OutputFile,"%s",newData.c_str());
fclose(OutputFile);
}
}
return 0;
}

View File

@@ -68,10 +68,8 @@ add_executable(authserver
${authserver_SRCS}
)
add_dependencies(authserver revision.h)
if( NOT WIN32 )
add_definitions(-D_TRINITY_REALM_CONFIG='"${CONF_DIR}/authserver.conf"')
add_definitions(-D_TRINITY_REALM_CONFIG="${CONF_DIR}/authserver.conf")
endif()
if( UNIX )

View File

@@ -198,8 +198,6 @@ include_directories(
add_library(game STATIC ${game_STAT_SRCS})
add_dependencies(game revision.h)
# Generate precompiled header
if( USE_COREPCH )
if(CMAKE_COMPILER_IS_GNUCXX)

View File

@@ -141,8 +141,6 @@ include_directories(
add_library(scripts STATIC ${scripts_STAT_SRCS})
add_dependencies(scripts genrev)
# Generate precompiled header
if( USE_SCRIPTPCH )
if(CMAKE_COMPILER_IS_GNUCXX)

View File

@@ -76,8 +76,6 @@ include_directories(
add_library(shared STATIC ${shared_STAT_SRCS})
add_dependencies(shared revision.h)
target_link_libraries(shared
${ACE_LIBRARY}
)

View File

@@ -146,11 +146,9 @@ set(worldserver_LINK_FLAGS "")
add_executable(worldserver ${worldserver_SRCS})
if( NOT WIN32 )
add_definitions(-D_TRINITY_CORE_CONFIG='"${CONF_DIR}/worldserver.conf"')
add_definitions(-D_TRINITY_CORE_CONFIG="${CONF_DIR}/worldserver.conf")
endif()
add_dependencies(worldserver revision.h)
if( UNIX )
set(worldserver_LINK_FLAGS "-pthread -lncurses ${worldserver_LINK_FLAGS}")
endif()