latest sources commit
This commit is contained in:
163
apps/EnumUtils/enumutils_describe.py
Normal file
163
apps/EnumUtils/enumutils_describe.py
Normal file
@@ -0,0 +1,163 @@
|
||||
from re import compile, MULTILINE
|
||||
from os import walk, getcwd
|
||||
|
||||
notice = ('''/*
|
||||
* This file is part of the AzerothCore Project. See AUTHORS file for Copyright information
|
||||
*
|
||||
* This program is free software; you can redistribute it and/or modify it
|
||||
* under the terms of the GNU Affero General Public License as published by the
|
||||
* Free Software Foundation; either version 3 of the License, or (at your
|
||||
* option) any later version.
|
||||
*
|
||||
* This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
|
||||
* more details.
|
||||
*
|
||||
* You should have received a copy of the GNU General Public License along
|
||||
* with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
*/
|
||||
|
||||
''')
|
||||
|
||||
if not getcwd().endswith('src'):
|
||||
print('Run this from the src directory!')
|
||||
print('(Invoke as \'python ../apps/EnumUtils/enumutils_describe.py\')')
|
||||
exit(1)
|
||||
|
||||
EnumPattern = compile(r'//\s*EnumUtils: DESCRIBE THIS(?:\s*\(in ([^\)]+)\))?\s+enum\s+([0-9A-Za-z]+)[^\n]*\s*{([^}]+)};')
|
||||
EnumValuesPattern = compile(r'\s+\S.+?(,|$)[^\n]*')
|
||||
EnumValueNamePattern = compile(r'^\s*([a-zA-Z0-9_]+)', flags=MULTILINE)
|
||||
EnumValueSkipLinePattern = compile(r'^\s*//')
|
||||
EnumValueCommentPattern = compile(r'//,?[ \t]*([^\n]+)$')
|
||||
CommentMatchFormat = compile(r'^(((TITLE +(.+?))|(DESCRIPTION +(.+?))) *){1,2}$')
|
||||
CommentSkipFormat = compile(r'^SKIP *$')
|
||||
|
||||
def strescape(str):
|
||||
res = ''
|
||||
for char in str:
|
||||
if char in ('\\', '"') or not (32 <= ord(char) < 127):
|
||||
res += ('\\%03o' % ord(char))
|
||||
else:
|
||||
res += char
|
||||
return '"' + res + '"'
|
||||
|
||||
def processFile(path, filename):
|
||||
input = open('%s/%s.h' % (path, filename),'r')
|
||||
if input is None:
|
||||
print('Failed to open %s.h' % filename)
|
||||
return
|
||||
|
||||
file = input.read()
|
||||
|
||||
enums = []
|
||||
for enum in EnumPattern.finditer(file):
|
||||
prefix = enum.group(1) or ''
|
||||
name = enum.group(2)
|
||||
values = []
|
||||
for value in EnumValuesPattern.finditer(enum.group(3)):
|
||||
valueData = value.group(0)
|
||||
|
||||
valueNameMatch = EnumValueNamePattern.search(valueData)
|
||||
if valueNameMatch is None:
|
||||
if EnumValueSkipLinePattern.search(valueData) is None:
|
||||
print('Name of value not found: %s' % repr(valueData))
|
||||
continue
|
||||
valueName = valueNameMatch.group(1)
|
||||
|
||||
valueCommentMatch = EnumValueCommentPattern.search(valueData)
|
||||
valueComment = None
|
||||
if valueCommentMatch:
|
||||
valueComment = valueCommentMatch.group(1)
|
||||
|
||||
valueTitle = None
|
||||
valueDescription = None
|
||||
|
||||
if valueComment is not None:
|
||||
if CommentSkipFormat.match(valueComment) is not None:
|
||||
continue
|
||||
commentMatch = CommentMatchFormat.match(valueComment)
|
||||
if commentMatch is not None:
|
||||
valueTitle = commentMatch.group(4)
|
||||
valueDescription = commentMatch.group(6)
|
||||
else:
|
||||
valueDescription = valueComment
|
||||
|
||||
if valueTitle is None:
|
||||
valueTitle = valueName
|
||||
if valueDescription is None:
|
||||
valueDescription = ''
|
||||
|
||||
values.append((valueName, valueTitle, valueDescription))
|
||||
|
||||
enums.append((prefix + name, prefix, values))
|
||||
print('%s.h: Enum %s parsed with %d values' % (filename, name, len(values)))
|
||||
|
||||
if not enums:
|
||||
return
|
||||
|
||||
print('Done parsing %s.h (in %s)\n' % (filename, path))
|
||||
output = open('%s/enuminfo_%s.cpp' % (path, filename), 'w')
|
||||
if output is None:
|
||||
print('Failed to create enuminfo_%s.cpp' % filename)
|
||||
return
|
||||
|
||||
# write output file
|
||||
output.write(notice)
|
||||
output.write('#include "%s.h"\n' % filename)
|
||||
output.write('#include "Define.h"\n')
|
||||
output.write('#include "SmartEnum.h"\n')
|
||||
output.write('#include <stdexcept>\n')
|
||||
output.write('\n')
|
||||
output.write('namespace Acore::Impl::EnumUtilsImpl\n')
|
||||
output.write('{\n')
|
||||
for name, prefix, values in enums:
|
||||
tag = ('data for enum \'%s\' in \'%s.h\' auto-generated' % (name, filename))
|
||||
output.write('\n')
|
||||
output.write('/*' + ('*'*(len(tag)+2)) + '*\\\n')
|
||||
output.write('|* ' + tag + ' *|\n')
|
||||
output.write('\\*' + ('*'*(len(tag)+2)) + '*/\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT EnumText EnumUtils<%s>::ToString(%s value)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (value)\n')
|
||||
output.write(' {\n')
|
||||
for label, title, description in values:
|
||||
output.write(' case %s: return { %s, %s, %s };\n' % (prefix + label, strescape(label), strescape(title), strescape(description)))
|
||||
output.write(' default: throw std::out_of_range("value");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT size_t EnumUtils<%s>::Count() { return %d; }\n' % (name, len(values)))
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT %s EnumUtils<%s>::FromIndex(size_t index)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (index)\n')
|
||||
output.write(' {\n')
|
||||
for (i, (label, title, description)) in enumerate(values):
|
||||
output.write(' case %d: return %s;\n' % (i, prefix + label))
|
||||
output.write(' default: throw std::out_of_range("index");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
output.write('\n')
|
||||
output.write('template <>\n')
|
||||
output.write('AC_API_EXPORT size_t EnumUtils<%s>::ToIndex(%s value)\n' % (name, name))
|
||||
output.write('{\n')
|
||||
output.write(' switch (value)\n')
|
||||
output.write(' {\n')
|
||||
for (i, (label, title, description)) in enumerate(values):
|
||||
output.write(' case %s: return %d;\n' % (prefix + label, i))
|
||||
output.write(' default: throw std::out_of_range("value");\n')
|
||||
output.write(' }\n')
|
||||
output.write('}\n')
|
||||
|
||||
output.write('}\n')
|
||||
|
||||
FilenamePattern = compile(r'^(.+)\.h$')
|
||||
for root, dirs, files in walk('.'):
|
||||
for n in files:
|
||||
nameMatch = FilenamePattern.match(n)
|
||||
if nameMatch is not None:
|
||||
processFile(root, nameMatch.group(1))
|
||||
238
apps/Fmt/FormatReplace.py
Normal file
238
apps/Fmt/FormatReplace.py
Normal file
@@ -0,0 +1,238 @@
|
||||
import pathlib
|
||||
from os import getcwd
|
||||
|
||||
if not getcwd().endswith('src') and not getcwd().endswith('modules'):
|
||||
print('Run this from the src or modules directory!')
|
||||
print('(Invoke as \'python ../apps/Fmt/FormatReplace.py\')')
|
||||
exit(1)
|
||||
|
||||
def isASSERT(line):
|
||||
substring = 'ASSERT'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isABORTMSG(line):
|
||||
substring = 'ABORT_MSG'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def islog(line):
|
||||
substring = 'LOG_'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
# def isSendSysMessage(line):
|
||||
# substring = 'SendSysMessage'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
# def isPSendSysMessage(line):
|
||||
# substring = 'PSendSysMessage'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
def isPQuery(line):
|
||||
substring = 'PQuery'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isPExecute(line):
|
||||
substring = 'PExecute'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def isPAppend(line):
|
||||
substring = 'PAppend'
|
||||
if substring in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
# def isStringFormat(line):
|
||||
# substring = 'StringFormat'
|
||||
# if substring in line:
|
||||
# return True
|
||||
# else :
|
||||
# return False
|
||||
|
||||
def haveDelimeter(line):
|
||||
if ';' in line:
|
||||
return True
|
||||
else :
|
||||
return False
|
||||
|
||||
def checkSoloLine(line):
|
||||
if isABORTMSG(line):
|
||||
line = line.replace("ABORT_MSG", "ABORT");
|
||||
return handleCleanup(line), False
|
||||
elif isASSERT(line):
|
||||
return handleCleanup(line), False
|
||||
elif islog(line):
|
||||
return handleCleanup(line), False
|
||||
elif isPExecute(line):
|
||||
line = line.replace("PExecute", "Execute");
|
||||
return handleCleanup(line), False
|
||||
elif isPQuery(line):
|
||||
line = line.replace("PQuery", "Query");
|
||||
return handleCleanup(line), False
|
||||
elif isPAppend(line):
|
||||
line = line.replace("PAppend", "Append");
|
||||
return handleCleanup(line), False
|
||||
# elif isSendSysMessage(line):
|
||||
# return handleCleanup(line), False
|
||||
# elif isPSendSysMessage(line):
|
||||
# return handleCleanup(line), False
|
||||
# elif isStringFormat(line):
|
||||
# return handleCleanup(line), False
|
||||
else:
|
||||
return line, False
|
||||
|
||||
def startMultiLine(line):
|
||||
if isABORTMSG(line):
|
||||
line = line.replace("ABORT_MSG", "ABORT");
|
||||
return handleCleanup(line), True
|
||||
elif isASSERT(line):
|
||||
return handleCleanup(line), True
|
||||
elif islog(line):
|
||||
return handleCleanup(line), True
|
||||
# elif isSendSysMessage(line):
|
||||
# return handleCleanup(line), True
|
||||
# elif isPSendSysMessage(line):
|
||||
# return handleCleanup(line), True
|
||||
elif isPQuery(line):
|
||||
line = line.replace("PQuery", "Query");
|
||||
return handleCleanup(line), True
|
||||
elif isPExecute(line):
|
||||
line = line.replace("PExecute", "Execute");
|
||||
return handleCleanup(line), True
|
||||
elif isPAppend(line):
|
||||
line = line.replace("PAppend", "Append");
|
||||
return handleCleanup(line), True
|
||||
# elif isStringFormat(line):
|
||||
# return handleCleanup(line), True
|
||||
else :
|
||||
return line, False
|
||||
|
||||
def continueMultiLine(line, existPrevLine):
|
||||
if haveDelimeter(line):
|
||||
existPrevLine = False;
|
||||
return handleCleanup(line), existPrevLine
|
||||
|
||||
def checkTextLine(line, existPrevLine):
|
||||
if existPrevLine:
|
||||
return continueMultiLine(line, existPrevLine)
|
||||
else :
|
||||
if haveDelimeter(line):
|
||||
return checkSoloLine(line)
|
||||
else :
|
||||
return startMultiLine(line)
|
||||
|
||||
def handleCleanup(line):
|
||||
line = line.replace("%s", "{}");
|
||||
line = line.replace("%u", "{}");
|
||||
line = line.replace("%hu", "{}");
|
||||
line = line.replace("%lu", "{}");
|
||||
line = line.replace("%llu", "{}");
|
||||
line = line.replace("%zu", "{}");
|
||||
line = line.replace("%02u", "{:02}");
|
||||
line = line.replace("%03u", "{:03}");
|
||||
line = line.replace("%04u", "{:04}");
|
||||
line = line.replace("%05u", "{:05}");
|
||||
line = line.replace("%02i", "{:02}");
|
||||
line = line.replace("%03i", "{:03}");
|
||||
line = line.replace("%04i", "{:04}");
|
||||
line = line.replace("%05i", "{:05}");
|
||||
line = line.replace("%02d", "{:02}");
|
||||
line = line.replace("%03d", "{:03}");
|
||||
line = line.replace("%04d", "{:04}");
|
||||
line = line.replace("%05d", "{:05}");
|
||||
line = line.replace("%d", "{}");
|
||||
line = line.replace("%i", "{}");
|
||||
line = line.replace("%x", "{:x}");
|
||||
line = line.replace("%X", "{:X}");
|
||||
line = line.replace("%lx", "{:x}");
|
||||
line = line.replace("%lX", "{:X}");
|
||||
line = line.replace("%02X", "{:02X}");
|
||||
line = line.replace("%08X", "{:08X}");
|
||||
line = line.replace("%f", "{}");
|
||||
line = line.replace("%.1f", "{0:.1f}");
|
||||
line = line.replace("%.2f", "{0:.2f}");
|
||||
line = line.replace("%.3f", "{0:.3f}");
|
||||
line = line.replace("%.4f", "{0:.4f}");
|
||||
line = line.replace("%.5f", "{0:.5f}");
|
||||
line = line.replace("%3.1f", "{:3.1f}");
|
||||
line = line.replace("%%", "%");
|
||||
line = line.replace(".c_str()", "");
|
||||
line = line.replace("\" SZFMTD \"", "{}");
|
||||
line = line.replace("\" UI64FMTD \"", "{}");
|
||||
# line = line.replace("\" STRING_VIEW_FMT \"", "{}");
|
||||
# line = line.replace("STRING_VIEW_FMT_ARG", "");
|
||||
return line
|
||||
|
||||
def getDefaultfile(name):
|
||||
file1 = open(name, "r+", encoding="utf8", errors='replace')
|
||||
|
||||
result = ''
|
||||
|
||||
while True:
|
||||
line = file1.readline()
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
result += line
|
||||
|
||||
file1.close
|
||||
return result
|
||||
|
||||
def getModifiedfile(name):
|
||||
file1 = open(name, "r+", encoding="utf8", errors='replace')
|
||||
|
||||
prevLines = False
|
||||
result = ''
|
||||
|
||||
while True:
|
||||
line = file1.readline()
|
||||
|
||||
if not line:
|
||||
break
|
||||
|
||||
line, prevLines = checkTextLine(line, prevLines)
|
||||
result += line
|
||||
|
||||
file1.close
|
||||
return result
|
||||
|
||||
def updModifiedfile(name, text):
|
||||
file = open(name, "w", encoding="utf8", errors='replace')
|
||||
file.write(text)
|
||||
file.close()
|
||||
|
||||
def handlefile(name):
|
||||
oldtext = getDefaultfile(name)
|
||||
newtext = getModifiedfile(name)
|
||||
|
||||
if oldtext != newtext:
|
||||
updModifiedfile(name, newtext)
|
||||
|
||||
p = pathlib.Path('.')
|
||||
for i in p.glob('**/*'):
|
||||
fname = i.absolute()
|
||||
if '.cpp' in i.name:
|
||||
handlefile(fname)
|
||||
if '.h' in i.name:
|
||||
handlefile(fname)
|
||||
7
apps/account-create/.formatter.exs
Normal file
7
apps/account-create/.formatter.exs
Normal file
@@ -0,0 +1,7 @@
|
||||
# Used by "mix format"
|
||||
[
|
||||
inputs: [
|
||||
".formatter.exs",
|
||||
"account.exs"
|
||||
]
|
||||
]
|
||||
10
apps/account-create/Dockerfile
Normal file
10
apps/account-create/Dockerfile
Normal file
@@ -0,0 +1,10 @@
|
||||
FROM elixir:1.14-slim
|
||||
|
||||
RUN mix local.hex --force && \
|
||||
mix local.rebar --force
|
||||
|
||||
COPY account.exs /account.exs
|
||||
COPY srp.exs /srp.exs
|
||||
RUN chmod +x /account.exs
|
||||
|
||||
CMD /account.exs
|
||||
102
apps/account-create/README.md
Normal file
102
apps/account-create/README.md
Normal file
@@ -0,0 +1,102 @@
|
||||
# Account.exs
|
||||
|
||||
Simple script to create an account for AzerothCore
|
||||
|
||||
This script allows a server admin to create a user automatically when after the `dbimport` tool runs, without needed to open up the `worldserver` console.
|
||||
|
||||
## How To Use
|
||||
|
||||
### Pre-requisites
|
||||
|
||||
- MySQL is running
|
||||
- The authserver database (`acore_auth`, typically) has a table named `account`
|
||||
|
||||
### Running
|
||||
|
||||
```bash
|
||||
$ elixir account.exs
|
||||
```
|
||||
|
||||
### Configuration
|
||||
|
||||
This script reads from environment variables in order to control which account it creates and the MySQL server it's communicating with
|
||||
|
||||
|
||||
- `ACORE_USERNAME` Username for account, default "admin"
|
||||
- `ACORE_PASSWORD` Password for account, default "admin"
|
||||
- `ACORE_GM_LEVEL` GM Level for account, default 3
|
||||
- `MYSQL_DATABASE` Database name, default "acore_auth"
|
||||
- `MYSQL_USERNAME` MySQL username, default "root"
|
||||
- `MYSQL_PASSWORD` MySQL password, default "password"
|
||||
- `MYSQL_PORT` MySQL Port, default 3306
|
||||
- `MYSQL_HOST` MySQL Host, default "localhost"
|
||||
|
||||
To use these environment variables, execute the script like so:
|
||||
|
||||
```bash
|
||||
$ MYSQL_HOST=mysql \
|
||||
MYSQL_PASSWORD="fourthehoard" \
|
||||
ACORE_USERNAME=drekthar \
|
||||
ACORE_PASSWORD=securepass22 \
|
||||
elixir account.exs
|
||||
```
|
||||
|
||||
This can also be used in a loop. Consider this csv file:
|
||||
|
||||
```csv
|
||||
user,pass,gm_level
|
||||
admin,adminpass,2
|
||||
soapuser,soappass,3
|
||||
mainuser,userpass,0
|
||||
```
|
||||
|
||||
You can then loop over this csv file, and manage users like so:
|
||||
|
||||
```bash
|
||||
$ while IFS=, read -r user pass gm; do
|
||||
ACORE_USERNAME=$user \
|
||||
ACORE_PASSWORD=$pass \
|
||||
GM_LEVEL=$gm \
|
||||
elixir account.exs
|
||||
done <<< $(tail -n '+2' users.csv)
|
||||
```
|
||||
|
||||
### Docker
|
||||
|
||||
Running and building with docker is simple:
|
||||
|
||||
```bash
|
||||
$ docker build -t acore/account-create .
|
||||
$ docker run \
|
||||
-e MYSQL_HOST=mysql \
|
||||
-v mix_cache:/root/.cache/mix/installs \
|
||||
acore/account-create
|
||||
```
|
||||
|
||||
Note that the `MYSQL_HOST` is required to be set with the docker container, as the default setting targets `localhost`.
|
||||
|
||||
### docker-compose
|
||||
|
||||
A simple way to integrate this into a docker-compose file.
|
||||
|
||||
This is why I wrote this script - an automatic way to have an admin account idempotently created on startup of the server.
|
||||
|
||||
```yaml
|
||||
services:
|
||||
account-create:
|
||||
image: acore/account-create:${DOCKER_IMAGE_TAG:-master}
|
||||
build:
|
||||
context: apps/account-create/
|
||||
dockerfile: apps/account-create/Dockerfile
|
||||
environment:
|
||||
MYSQL_HOST: ac-database
|
||||
MYSQL_PASSWORD: ${DOCKER_DB_ROOT_PASSWORD:-password}
|
||||
ACORE_USERNAME: ${ACORE_ROOT_ADMIN_ACCOUNT:-admin}
|
||||
ACORE_PASSWORD: ${ACORE_ROOT_ADMIN_PASSWORD:-password}
|
||||
volumes:
|
||||
- mix_cache:/root/.cache/mix/installs
|
||||
profiles: [local, app, db-import-local]
|
||||
depends_on:
|
||||
ac-db-import:
|
||||
condition: service_completed_successfully
|
||||
```
|
||||
134
apps/account-create/account.exs
Normal file
134
apps/account-create/account.exs
Normal file
@@ -0,0 +1,134 @@
|
||||
#!/usr/bin/env elixir
|
||||
# Execute this Elixir script with the below command
|
||||
#
|
||||
# $ ACORE_USERNAME=foo ACORE_PASSWORD=barbaz123 elixir account.exs
|
||||
#
|
||||
# Set environment variables for basic configuration
|
||||
#
|
||||
# ACORE_USERNAME - Username for account, default "admin"
|
||||
# ACORE_PASSWORD - Password for account, default "admin"
|
||||
# ACORE_GM_LEVEL - GM level for account
|
||||
# MYSQL_DATABASE - Database name, default "acore_auth"
|
||||
# MYSQL_USERNAME - MySQL username, default "root"
|
||||
# MYSQL_PASSWORD - MySQL password, default "password"
|
||||
# MYSQL_PORT - MySQL Port, default 3306
|
||||
# MYSQL_HOST - MySQL Host, default "localhost"
|
||||
|
||||
# Install remote dependencies
|
||||
[
|
||||
{:myxql, "~> 0.6.0"}
|
||||
]
|
||||
|> Mix.install()
|
||||
|
||||
# Start the logger
|
||||
Application.start(:logger)
|
||||
require Logger
|
||||
|
||||
# Constants
|
||||
default_credential = "admin"
|
||||
default_gm_level = "3"
|
||||
account_access_comment = "Managed via account-create script"
|
||||
|
||||
# Import srp functions
|
||||
Code.require_file("srp.exs", Path.absname(__DIR__))
|
||||
|
||||
# Assume operator provided a "human-readable" name.
|
||||
# The database stores usernames in all caps
|
||||
username_lower =
|
||||
System.get_env("ACORE_USERNAME", default_credential)
|
||||
|> tap(&Logger.info("Account to create: #{&1}"))
|
||||
|
||||
username = String.upcase(username_lower)
|
||||
|
||||
password = System.get_env("ACORE_PASSWORD", default_credential)
|
||||
|
||||
gm_level = System.get_env("ACORE_GM_LEVEL", default_gm_level) |> String.to_integer()
|
||||
|
||||
if Range.new(0, 3) |> Enum.member?(gm_level) |> Kernel.not do
|
||||
Logger.info("Valid ACORE_GM_LEVEL values are 0, 1, 2, and 3. The given value was: #{gm_level}.")
|
||||
end
|
||||
|
||||
{:ok, pid} =
|
||||
MyXQL.start_link(
|
||||
protocol: :tcp,
|
||||
database: System.get_env("MYSQL_DATABASE", "acore_auth"),
|
||||
username: System.get_env("MYSQL_USERNAME", "root"),
|
||||
password: System.get_env("MYSQL_PASSWORD", "password"),
|
||||
port: System.get_env("MYSQL_PORT", "3306") |> String.to_integer(),
|
||||
hostname: System.get_env("MYSQL_HOST", "localhost")
|
||||
)
|
||||
|
||||
Logger.info("MySQL connection created")
|
||||
|
||||
Logger.info("Checking database for user #{username_lower}")
|
||||
|
||||
# Check if user already exists in database
|
||||
{:ok, result} = MyXQL.query(pid, "SELECT salt FROM account WHERE username=?", [username])
|
||||
|
||||
%{salt: salt, verifier: verifier} =
|
||||
case result do
|
||||
%{rows: [[salt | _] | _]} ->
|
||||
Logger.info("Salt for #{username_lower} found in database")
|
||||
# re-use the salt if the user exists in database
|
||||
Srp.generate_stored_values(username, password, salt)
|
||||
_ ->
|
||||
Logger.info("Salt not found in database for #{username_lower}. Generating a new one")
|
||||
Srp.generate_stored_values(username, password)
|
||||
end
|
||||
|
||||
Logger.info("New salt and verifier generated")
|
||||
|
||||
# Insert values into DB, replacing the verifier if the user already exists
|
||||
result =
|
||||
MyXQL.query(
|
||||
pid,
|
||||
"""
|
||||
INSERT INTO account
|
||||
(`username`, `salt`, `verifier`)
|
||||
VALUES
|
||||
(?, ?, ?)
|
||||
ON DUPLICATE KEY UPDATE verifier=?
|
||||
""",
|
||||
[username, salt, verifier, verifier]
|
||||
)
|
||||
|
||||
case result do
|
||||
{:error, %{message: message}} ->
|
||||
File.write("fail.log", message)
|
||||
|
||||
Logger.info(
|
||||
"Account #{username_lower} failed to create. You can check the error message at fail.log."
|
||||
)
|
||||
|
||||
exit({:shutdown, 1})
|
||||
|
||||
# if num_rows changed and last_insert_id == 0, it means the verifier matched. No change necessary
|
||||
{:ok, %{num_rows: 1, last_insert_id: 0}} ->
|
||||
Logger.info(
|
||||
"Account #{username_lower} doesn't need to have its' password changed. You should be able to log in with that account"
|
||||
)
|
||||
|
||||
{:ok, %{num_rows: 1}} ->
|
||||
Logger.info(
|
||||
"Account #{username_lower} has been created. You should now be able to login with that account"
|
||||
)
|
||||
|
||||
{:ok, %{num_rows: 2}} ->
|
||||
Logger.info(
|
||||
"Account #{username_lower} has had its' password reset. You should now be able to login with that account"
|
||||
)
|
||||
end
|
||||
|
||||
# Set GM level to configured value
|
||||
{:ok, _} =
|
||||
MyXQL.query(
|
||||
pid,
|
||||
"""
|
||||
INSERT INTO account_access
|
||||
(`id`, `gmlevel`, `comment`)
|
||||
VALUES
|
||||
((SELECT id FROM account WHERE username=?), ?, ?)
|
||||
ON DUPLICATE KEY UPDATE gmlevel=?, comment=?
|
||||
""", [username, gm_level, account_access_comment, gm_level, account_access_comment])
|
||||
|
||||
Logger.info("GM Level for #{username_lower} set to #{gm_level}")
|
||||
59
apps/account-create/srp.exs
Normal file
59
apps/account-create/srp.exs
Normal file
@@ -0,0 +1,59 @@
|
||||
defmodule Srp do
|
||||
# Constants required in WoW's SRP6 implementation
|
||||
@n <<137, 75, 100, 94, 137, 225, 83, 91, 189, 173, 91, 139, 41, 6, 80, 83, 8, 1, 177, 142, 191,
|
||||
191, 94, 143, 171, 60, 130, 135, 42, 62, 155, 183>>
|
||||
@g <<7>>
|
||||
@field_length 32
|
||||
|
||||
# Wrapper function
|
||||
def generate_stored_values(username, password, salt \\ "") do
|
||||
default_state()
|
||||
|> generate_salt(salt)
|
||||
|> calculate_x(username, password)
|
||||
|> calculate_v()
|
||||
end
|
||||
|
||||
def default_state() do
|
||||
%{n: @n, g: @g}
|
||||
end
|
||||
|
||||
# Generate salt if it's not defined
|
||||
def generate_salt(state, "") do
|
||||
salt = :crypto.strong_rand_bytes(32)
|
||||
Map.merge(state, %{salt: salt})
|
||||
end
|
||||
|
||||
# Don't generate salt if it's already defined
|
||||
def generate_salt(state, salt) do
|
||||
padded_salt = pad_binary(salt)
|
||||
Map.merge(state, %{salt: padded_salt})
|
||||
end
|
||||
|
||||
# Get h1
|
||||
def calculate_x(state, username, password) do
|
||||
hash = :crypto.hash(:sha, String.upcase(username) <> ":" <> String.upcase(password))
|
||||
x = reverse(:crypto.hash(:sha, state.salt <> hash))
|
||||
Map.merge(state, %{x: x, username: username})
|
||||
end
|
||||
|
||||
# Get h2
|
||||
def calculate_v(state) do
|
||||
verifier =
|
||||
:crypto.mod_pow(state.g, state.x, state.n)
|
||||
|> reverse()
|
||||
|> pad_binary()
|
||||
|
||||
Map.merge(state, %{verifier: verifier})
|
||||
end
|
||||
|
||||
defp pad_binary(blob) do
|
||||
pad = @field_length - byte_size(blob)
|
||||
<<blob::binary, 0::pad*8>>
|
||||
end
|
||||
|
||||
defp reverse(binary) do
|
||||
binary
|
||||
|> :binary.decode_unsigned(:big)
|
||||
|> :binary.encode_unsigned(:little)
|
||||
end
|
||||
end
|
||||
26
apps/bash_shared/common.sh
Normal file
26
apps/bash_shared/common.sh
Normal file
@@ -0,0 +1,26 @@
|
||||
function registerHooks() { acore_event_registerHooks "$@"; }
|
||||
function runHooks() { acore_event_runHooks "$@"; }
|
||||
|
||||
source "$AC_PATH_CONF/dist/config.sh" # include dist to avoid missing conf variables
|
||||
|
||||
# first check if it's defined in env, otherwise use the default
|
||||
USER_CONF_PATH=${USER_CONF_PATH:-"$AC_PATH_CONF/config.sh"}
|
||||
|
||||
if [ -f "$USER_CONF_PATH" ]; then
|
||||
source "$USER_CONF_PATH" # should overwrite previous
|
||||
else
|
||||
echo "NOTICE: file <$USER_CONF_PATH> not found, we use default configuration only."
|
||||
fi
|
||||
|
||||
#
|
||||
# Load modules
|
||||
#
|
||||
|
||||
for entry in "$AC_PATH_MODULES/"*/include.sh
|
||||
do
|
||||
if [ -e "$entry" ]; then
|
||||
source "$entry"
|
||||
fi
|
||||
done
|
||||
|
||||
ACORE_VERSION=$("$AC_PATH_DEPS/jsonpath/JSONPath.sh" -f "$AC_PATH_ROOT/acore.json" -b '$.version')
|
||||
28
apps/bash_shared/defines.sh
Normal file
28
apps/bash_shared/defines.sh
Normal file
@@ -0,0 +1,28 @@
|
||||
unamestr=$(uname)
|
||||
if [[ "$unamestr" == 'Darwin' ]]; then
|
||||
if ! command -v brew &>/dev/null ; then
|
||||
ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||
fi
|
||||
if ! [ "${BASH_VERSINFO}" -ge 4 ]; then
|
||||
brew install bash
|
||||
fi
|
||||
if ! command -v greadlink &>/dev/null ; then
|
||||
brew install coreutils
|
||||
fi
|
||||
AC_PATH_ROOT=$(greadlink -f "$AC_PATH_APPS/../")
|
||||
else
|
||||
AC_PATH_ROOT=$(readlink -f "$AC_PATH_APPS/../")
|
||||
fi
|
||||
|
||||
case $AC_PATH_ROOT in
|
||||
/*) AC_PATH_ROOT=$AC_PATH_ROOT;;
|
||||
*) AC_PATH_ROOT=$PWD/$AC_PATH_ROOT;;
|
||||
esac
|
||||
|
||||
AC_PATH_CONF="$AC_PATH_ROOT/conf"
|
||||
|
||||
AC_PATH_MODULES="$AC_PATH_ROOT/modules"
|
||||
|
||||
AC_PATH_DEPS="$AC_PATH_ROOT/deps"
|
||||
|
||||
AC_PATH_VAR="$AC_PATH_ROOT/var"
|
||||
16
apps/bash_shared/includes.sh
Normal file
16
apps/bash_shared/includes.sh
Normal file
@@ -0,0 +1,16 @@
|
||||
[[ ${GUARDYVAR:-} -eq 1 ]] && return || readonly GUARDYVAR=1 # include it once
|
||||
|
||||
# force default language for applications
|
||||
LC_ALL=C
|
||||
|
||||
AC_PATH_APPS="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../" && pwd )"
|
||||
|
||||
AC_PATH_SHARED="$AC_PATH_APPS/bash_shared"
|
||||
|
||||
source "$AC_PATH_SHARED/defines.sh"
|
||||
|
||||
source "$AC_PATH_DEPS/acore/bash-lib/src/event/hooks.sh"
|
||||
|
||||
source "$AC_PATH_SHARED/common.sh"
|
||||
|
||||
[[ "$OSTYPE" = "msys" ]] && AC_BINPATH_FULL="$BINPATH" || AC_BINPATH_FULL="$BINPATH/bin"
|
||||
40
apps/ci/ci-codestyle.sh
Normal file
40
apps/ci/ci-codestyle.sh
Normal file
@@ -0,0 +1,40 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "Codestyle check script:"
|
||||
echo
|
||||
|
||||
declare -A singleLineRegexChecks=(
|
||||
["LOG_.+GetCounter"]="Use ObjectGuid::ToString().c_str() method instead of ObjectGuid::GetCounter() when logging. Check the lines above"
|
||||
["[[:blank:]]$"]="Remove whitespace at the end of the lines above"
|
||||
["\t"]="Replace tabs with 4 spaces in the lines above"
|
||||
)
|
||||
|
||||
for check in ${!singleLineRegexChecks[@]}; do
|
||||
echo " Checking RegEx: '${check}'"
|
||||
|
||||
if grep -P -r -I -n ${check} src; then
|
||||
echo
|
||||
echo "${singleLineRegexChecks[$check]}"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
declare -A multiLineRegexChecks=(
|
||||
["LOG_[^;]+GetCounter"]="Use ObjectGuid::ToString().c_str() method instead of ObjectGuid::GetCounter() when logging. Check the lines above"
|
||||
["\n\n\n"]="Multiple blank lines detected, keep only one. Check the files above"
|
||||
)
|
||||
|
||||
for check in ${!multiLineRegexChecks[@]}; do
|
||||
echo " Checking RegEx: '${check}'"
|
||||
|
||||
if grep -Pzo -r -I ${check} src; then
|
||||
echo
|
||||
echo
|
||||
echo "${multiLineRegexChecks[$check]}"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Everything looks good"
|
||||
8
apps/ci/ci-compile.sh
Normal file
8
apps/ci/ci-compile.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "compile core"
|
||||
export AC_CCACHE=true
|
||||
./acore.sh "compiler" "all"
|
||||
|
||||
69
apps/ci/ci-conf-core.sh
Normal file
69
apps/ci/ci-conf-core.sh
Normal file
@@ -0,0 +1,69 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CTOOLS_BUILD=none
|
||||
CSCRIPTS=static
|
||||
CMODULES=static
|
||||
CBUILD_TESTING=ON
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
36
apps/ci/ci-conf-db.sh
Normal file
36
apps/ci/ci-conf-db.sh
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CAPPS_BUILD=none
|
||||
CTOOLS_BUILD=db-only
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
67
apps/ci/ci-conf-tools.sh
Normal file
67
apps/ci/ci-conf-tools.sh
Normal file
@@ -0,0 +1,67 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CAPPS_BUILD=none
|
||||
CTOOLS_BUILD=maps-only
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
15
apps/ci/ci-dry-run.sh
Normal file
15
apps/ci/ci-dry-run.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# Start mysql
|
||||
sudo systemctl start mysql
|
||||
|
||||
source "$CURRENT_PATH/ci-gen-server-conf-files.sh" $1 "etc" "bin" "root"
|
||||
|
||||
(cd ./env/dist/bin/ && timeout 5m ./$APP_NAME -dry-run)
|
||||
|
||||
# Stop mysql
|
||||
sudo systemctl stop mysql
|
||||
18
apps/ci/ci-error-check.sh
Normal file
18
apps/ci/ci-error-check.sh
Normal file
@@ -0,0 +1,18 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
ERRORS_FILE="./env/dist/bin/Errors.log";
|
||||
|
||||
echo "Checking Startup Errors"
|
||||
echo
|
||||
|
||||
if [[ -s ${ERRORS_FILE} ]]; then
|
||||
printf "The Errors.log file contains startup errors:\n\n";
|
||||
cat ${ERRORS_FILE};
|
||||
printf "\nPlease solve the startup errors listed above!\n";
|
||||
exit 1;
|
||||
else
|
||||
echo "> No startup errors found in Errors.log";
|
||||
fi
|
||||
|
||||
echo
|
||||
echo "Done"
|
||||
15
apps/ci/ci-gen-server-conf-files.sh
Normal file
15
apps/ci/ci-gen-server-conf-files.sh
Normal file
@@ -0,0 +1,15 @@
|
||||
APP_NAME=$1
|
||||
CONFIG_FOLDER=${2:-"etc"}
|
||||
BIN_FOLDER=${3-"bin"}
|
||||
MYSQL_ROOT_PASSWORD=${4:-""}
|
||||
|
||||
# copy dist files to conf files
|
||||
cp ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf.dist ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
|
||||
# replace login info
|
||||
sed -i "s/127.0.0.1;3306;acore;acore/localhost;3306;root;$MYSQL_ROOT_PASSWORD/" ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
|
||||
if [[ $APP_NAME == "worldserver" ]]; then
|
||||
sed -i 's/DataDir = \".\"/DataDir = \".\/data"/' ./env/dist/$CONFIG_FOLDER/$APP_NAME.conf
|
||||
git clone --depth=1 --branch=master --single-branch https://github.com/ac-data/ac-data.git ./env/dist/$BIN_FOLDER/data
|
||||
fi
|
||||
29
apps/ci/ci-install-modules.sh
Normal file
29
apps/ci/ci-install-modules.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
echo "install modules"
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-eluna.git modules/mod-eluna
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-autobalance.git modules/mod-autobalance
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ah-bot.git modules/mod-ah-bot
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-anticheat.git modules/mod-anticheat
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-bg-item-reward.git modules/mod-bg-item-reward
|
||||
# NOTE: disabled because it causes DB error
|
||||
# git clone --depth=1 --branch=master https://github.com/azerothcore/mod-azerothshard.git modules/mod-azerothshard
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cfbg.git modules/mod-cfbg
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chat-transmitter modules/mod-chat-transmitter
|
||||
# NOTE: disabled because it causes DB error
|
||||
#git clone --depth=1 --branch=master https://github.com/azerothcore/mod-chromie-xp.git modules/mod-chromie-xp
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-cta-switch.git modules/mod-cta-switch
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-desertion-warnings.git modules/mod-desertion-warnings
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-duel-reset.git modules/mod-duel-reset
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-ip-tracker.git modules/mod-ip-tracker
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-low-level-arena.git modules/mod-low-level-arena
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-low-level-rbg.git modules/mod-low-level-rbg
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-multi-client-check.git modules/mod-multi-client-check
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvp-titles.git modules/mod-pvp-titles
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-pvpstats-announcer.git modules/mod-pvpstats-announcer
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-queue-list-cache.git modules/mod-queue-list-cache
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-server-auto-shutdown.git modules/mod-server-auto-shutdown
|
||||
git clone --depth=1 --branch=master https://github.com/azerothcore/mod-transmog.git modules/mod-transmog
|
||||
git clone --depth=1 --branch=main https://github.com/azerothcore/mod-progression-system.git modules/mod-progression-system
|
||||
74
apps/ci/ci-install.sh
Normal file
74
apps/ci/ci-install.sh
Normal file
@@ -0,0 +1,74 @@
|
||||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cat >>conf/config.sh <<CONFIG_SH
|
||||
MTHREADS=$(($(grep -c ^processor /proc/cpuinfo) + 2))
|
||||
CWARNINGS=ON
|
||||
CDEBUG=OFF
|
||||
CTYPE=Release
|
||||
CSCRIPTS=static
|
||||
CBUILD_TESTING=ON
|
||||
CSERVERS=ON
|
||||
CTOOLS=ON
|
||||
CSCRIPTPCH=OFF
|
||||
CCOREPCH=OFF
|
||||
CCUSTOMOPTIONS='-DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache -DCMAKE_C_FLAGS="-Werror" -DCMAKE_CXX_FLAGS="-Werror"'
|
||||
CONFIG_SH
|
||||
|
||||
time sudo apt-get update -y
|
||||
# time sudo apt-get upgrade -y
|
||||
time sudo apt-get install -y git lsb-release sudo
|
||||
time ./acore.sh install-deps
|
||||
|
||||
case $COMPILER in
|
||||
|
||||
# this is in order to use the "default" gcc version of the OS, without forcing a specific version
|
||||
"gcc" )
|
||||
time sudo apt-get install -y gcc g++
|
||||
echo "CCOMPILERC=\"gcc\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc8" )
|
||||
time sudo apt-get install -y gcc-8 g++-8
|
||||
echo "CCOMPILERC=\"gcc-8\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-8\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"gcc10" )
|
||||
time sudo apt-get install -y gcc-10 g++-10
|
||||
echo "CCOMPILERC=\"gcc-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"g++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
# this is in order to use the "default" clang version of the OS, without forcing a specific version
|
||||
"clang" )
|
||||
time sudo apt-get install -y clang
|
||||
echo "CCOMPILERC=\"clang\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang10" )
|
||||
time sudo apt-get install -y clang-10
|
||||
echo "CCOMPILERC=\"clang-10\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-10\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang11" )
|
||||
time sudo apt-get install -y clang-11
|
||||
echo "CCOMPILERC=\"clang-11\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-11\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
"clang12" )
|
||||
time sudo apt-get install -y clang-12
|
||||
echo "CCOMPILERC=\"clang-12\"" >> ./conf/config.sh
|
||||
echo "CCOMPILERCXX=\"clang++-12\"" >> ./conf/config.sh
|
||||
;;
|
||||
|
||||
* )
|
||||
echo "Unknown compiler $COMPILER"
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
54
apps/ci/ci-pending-changelogs.ts
Normal file
54
apps/ci/ci-pending-changelogs.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
import * as semver from "https://deno.land/x/semver/mod.ts";
|
||||
|
||||
// specify the needed paths here
|
||||
const CHANGELOG_PATH = "doc/changelog";
|
||||
const CHANGELOG_PENDING_PATH = `${CHANGELOG_PATH}/pendings`;
|
||||
const CHANGELOG_MASTER_FILE = `${CHANGELOG_PATH}/master.md`;
|
||||
const ACORE_JSON = "./acore.json";
|
||||
|
||||
// read the acore.json file to work with the versioning
|
||||
const decoder = new TextDecoder("utf-8");
|
||||
const data = await Deno.readFile(ACORE_JSON);
|
||||
const acoreInfo = JSON.parse(decoder.decode(data));
|
||||
|
||||
let changelogText = await Deno.readTextFile(CHANGELOG_MASTER_FILE);
|
||||
|
||||
const currentVersion = acoreInfo.version;
|
||||
|
||||
const res=Deno.run({ cmd: [ "git", "rev-parse",
|
||||
"HEAD"],
|
||||
stdout: 'piped',
|
||||
stderr: 'piped',
|
||||
stdin: 'null' });
|
||||
await res.status();
|
||||
const gitVersion = new TextDecoder().decode(await res.output());
|
||||
|
||||
|
||||
for await (const dirEntry of Deno.readDir(CHANGELOG_PENDING_PATH)) {
|
||||
if (!dirEntry.isFile || !dirEntry.name.endsWith(".md")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Upgrade the prerelease version number (e.g. 1.0.0-dev.1 -> 1.0.0-dev.2)
|
||||
acoreInfo.version = semver.inc(acoreInfo.version, "prerelease", {
|
||||
includePrerelease: true,
|
||||
});
|
||||
|
||||
// read the pending file found and add it at the beginning of the changelog text
|
||||
const data = await Deno.readTextFile(
|
||||
`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`,
|
||||
);
|
||||
changelogText = `## ${acoreInfo.version} | Commit: [${gitVersion}](https://github.com/azerothcore/azerothcore-wotlk/commit/${gitVersion}\n\n${data}\n${changelogText}`;
|
||||
|
||||
// remove the pending file
|
||||
await Deno.remove(`${CHANGELOG_PENDING_PATH}/${dirEntry.name}`);
|
||||
}
|
||||
|
||||
// write to acore.json and master.md only if new version is available
|
||||
if (currentVersion != acoreInfo.version) {
|
||||
console.log(`Changelog version upgraded from ${currentVersion} to ${acoreInfo.version}`)
|
||||
Deno.writeTextFile(CHANGELOG_MASTER_FILE, changelogText);
|
||||
Deno.writeTextFile(ACORE_JSON, JSON.stringify(acoreInfo, null, 2)+"\n");
|
||||
} else {
|
||||
console.log("No changelogs to add")
|
||||
}
|
||||
84
apps/ci/ci-pending-sql.sh
Normal file
84
apps/ci/ci-pending-sql.sh
Normal file
@@ -0,0 +1,84 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/../bash_shared/includes.sh"
|
||||
|
||||
UPDATES_PATH="$AC_PATH_ROOT/data/sql/updates/"
|
||||
|
||||
COMMIT_HASH=
|
||||
|
||||
function import() {
|
||||
db=$1
|
||||
folder="db_"$db
|
||||
pendingPath="$AC_PATH_ROOT/data/sql/updates/pending_$folder"
|
||||
updPath="$UPDATES_PATH/$folder"
|
||||
archivedPath="$AC_PATH_ROOT/data/sql/archive/$folder/6.x"
|
||||
|
||||
latestUpd=$(ls -1 $updPath/ | tail -n 1)
|
||||
|
||||
if [ -z $latestUpd ]; then
|
||||
latestUpd=$(ls -1 $archivedPath/ | tail -n 1)
|
||||
echo "> Last update file for db $db is missing! Using archived file" $latestUpd
|
||||
fi
|
||||
|
||||
dateToday=$(date +%Y_%m_%d)
|
||||
counter=0
|
||||
|
||||
dateLast=$latestUpd
|
||||
tmp=${dateLast#*_*_*_}
|
||||
oldCnt=${tmp%.sql}
|
||||
oldDate=${dateLast%_$tmp}
|
||||
|
||||
if [ "$oldDate" = "$dateToday" ]; then
|
||||
((counter=10#$oldCnt+1)) # 10 # is needed to explictly add to a base 10 number
|
||||
fi;
|
||||
|
||||
for entry in "$pendingPath"/*.sql
|
||||
do
|
||||
if [[ -e $entry ]]; then
|
||||
oldVer=$oldDate"_"$oldCnt
|
||||
|
||||
cnt=$(printf -v counter "%02d" $counter ; echo $counter)
|
||||
|
||||
newVer=$dateToday"_"$cnt
|
||||
|
||||
newFile="$updPath/"$dateToday"_"$cnt".sql"
|
||||
|
||||
oldFile=$(basename "$entry")
|
||||
prefix=${oldFile%_*.sql}
|
||||
suffix=${oldFile#rev_}
|
||||
rev=${suffix%.sql}
|
||||
|
||||
isRev=0
|
||||
if [[ $prefix = "rev" && $rev =~ ^-?[0-9]+$ ]]; then
|
||||
isRev=1
|
||||
fi
|
||||
|
||||
echo "-- DB update $oldVer -> $newVer" > "$newFile";
|
||||
|
||||
cat $entry >> "$newFile";
|
||||
|
||||
currentHash="$(git log --diff-filter=A "$entry" | grep "^commit " | sed -e 's/commit //')"
|
||||
|
||||
if [[ "$COMMIT_HASH" != *"$currentHash"* ]]
|
||||
then
|
||||
COMMIT_HASH="$COMMIT_HASH $currentHash"
|
||||
fi
|
||||
|
||||
rm $entry;
|
||||
|
||||
oldDate=$dateToday
|
||||
oldCnt=$cnt
|
||||
|
||||
((counter+=1))
|
||||
fi
|
||||
done
|
||||
|
||||
}
|
||||
|
||||
import "world"
|
||||
import "characters"
|
||||
import "auth"
|
||||
|
||||
echo "Done."
|
||||
36
apps/ci/ci-pending.sh
Normal file
36
apps/ci/ci-pending.sh
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/bin/bash
|
||||
set -e
|
||||
|
||||
echo "Pending SQL check script:"
|
||||
echo
|
||||
|
||||
# We want to ensure the end of file has a semicolon and doesn't have extra
|
||||
# newlines
|
||||
find data/sql/updates/pending* -name "*.sql" -type f | while read -r file; do
|
||||
# The first sed script collapses all strings into an empty string. The
|
||||
# contents of strings aren't necessary for this check and its still valid
|
||||
# sql.
|
||||
#
|
||||
# The second rule removes sql comments.
|
||||
ERR_AT_EOF="$(sed -e "s/'.*'/''/g" -e 's/ --([^-])*$//' "$file" | tr -d '\n ' | tail -c 1)"
|
||||
if [[ "$ERR_AT_EOF" != ";" ]]; then
|
||||
echo "Missing Semicolon (;) or multiple newlines at the end of the file."
|
||||
exit 1
|
||||
else
|
||||
echo "> Semicolon check - OK"
|
||||
fi
|
||||
done
|
||||
|
||||
find data/sql/updates/pending* -name "*.sql" -type f | while read -r file; do
|
||||
if sed "s/'.*'\(.*\)/\1/g" "$file" | grep -q -i -E "broadcast_text"; then
|
||||
echo "> broadcast_text check - Failed"
|
||||
echo " - DON'T EDIT broadcast_text TABLE UNLESS YOU KNOW WHAT YOU ARE DOING!"
|
||||
echo " - This error can safely be ignored if the changes are approved to be sniffed."
|
||||
exit 1
|
||||
else
|
||||
echo "> broadcast_text check - OK"
|
||||
fi
|
||||
done
|
||||
|
||||
echo
|
||||
echo "Everything looks good"
|
||||
3
apps/ci/ci-run-unit-tests.sh
Normal file
3
apps/ci/ci-run-unit-tests.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
#!/bin/bash
|
||||
|
||||
time var/build/obj/src/test/unit_tests
|
||||
36
apps/ci/mac/ci-compile.sh
Normal file
36
apps/ci/mac/ci-compile.sh
Normal file
@@ -0,0 +1,36 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
export OPENSSL_ROOT_DIR=$(brew --prefix openssl@3)
|
||||
|
||||
export CCACHE_CPP2=true
|
||||
export CCACHE_MAXSIZE='500M'
|
||||
export CCACHE_COMPRESS=1
|
||||
export CCACHE_COMPRESSLEVEL=9
|
||||
ccache -s
|
||||
|
||||
cd var/build/obj
|
||||
|
||||
time cmake ../../../ \
|
||||
-DTOOLS=1 \
|
||||
-DBUILD_TESTING=1 \
|
||||
-DSCRIPTS=static \
|
||||
-DCMAKE_BUILD_TYPE=Release \
|
||||
-DMYSQL_ADD_INCLUDE_PATH=/usr/local/include \
|
||||
-DMYSQL_LIBRARY=/usr/local/lib/libmysqlclient.dylib \
|
||||
-DREADLINE_INCLUDE_DIR=/usr/local/opt/readline/include \
|
||||
-DREADLINE_LIBRARY=/usr/local/opt/readline/lib/libreadline.dylib \
|
||||
-DOPENSSL_INCLUDE_DIR="$OPENSSL_ROOT_DIR/include" \
|
||||
-DOPENSSL_SSL_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libssl.dylib" \
|
||||
-DOPENSSL_CRYPTO_LIBRARIES="$OPENSSL_ROOT_DIR/lib/libcrypto.dylib" \
|
||||
-DWITH_WARNINGS=1 \
|
||||
-DCMAKE_C_FLAGS="-Werror" \
|
||||
-DCMAKE_CXX_FLAGS="-Werror" \
|
||||
-DCMAKE_C_COMPILER_LAUNCHER=ccache \
|
||||
-DCMAKE_CXX_COMPILER_LAUNCHER=ccache \
|
||||
-DUSE_SCRIPTPCH=0 \
|
||||
-DUSE_COREPCH=0 \
|
||||
;
|
||||
|
||||
time make -j $(($(sysctl -n hw.ncpu ) + 2))
|
||||
|
||||
ccache -s
|
||||
2
apps/compiler/.gitignore
vendored
Normal file
2
apps/compiler/.gitignore
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
config.sh
|
||||
|
||||
32
apps/compiler/README.md
Normal file
32
apps/compiler/README.md
Normal file
@@ -0,0 +1,32 @@
|
||||
## How to compile:
|
||||
|
||||
first of all, if you need some custom configuration you have to copy
|
||||
/conf/dist/config.sh in /conf/config.sh and configure it
|
||||
|
||||
* for a "clean" compilation you must run all scripts in their order:
|
||||
|
||||
./1-clean.sh
|
||||
./2-configure.sh
|
||||
./3-build.sh
|
||||
|
||||
* if you add/rename/delete some sources and you need to compile it you have to run:
|
||||
|
||||
./2-configure.sh
|
||||
./3-build.sh
|
||||
|
||||
* if you have modified code only, you just need to run
|
||||
|
||||
./3-build.sh
|
||||
|
||||
|
||||
## compiler.sh
|
||||
|
||||
compiler.sh script contains an interactive menu to clean/compile/build. You can also run actions directly by command lines specifying the option.
|
||||
Ex:
|
||||
./compiler.sh 3
|
||||
|
||||
It will start the build process (it's equivalent to ./3-build.sh)
|
||||
|
||||
## Note:
|
||||
|
||||
For an optimal development process and **really faster** compilation time, is suggested to use clang instead of gcc
|
||||
76
apps/compiler/compiler.sh
Normal file
76
apps/compiler/compiler.sh
Normal file
@@ -0,0 +1,76 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/includes/includes.sh"
|
||||
|
||||
function run_option() {
|
||||
re='^[0-9]+$'
|
||||
if [[ $1 =~ $re ]] && test "${comp_functions[$1-1]+'test'}"; then
|
||||
${comp_functions[$1-1]}
|
||||
elif [ -n "$(type -t comp_$1)" ] && [ "$(type -t comp_$1)" = function ]; then
|
||||
fun="comp_$1"
|
||||
$fun
|
||||
else
|
||||
echo "invalid option, use --help option for the commands list"
|
||||
fi
|
||||
}
|
||||
|
||||
function comp_quit() {
|
||||
exit 0
|
||||
}
|
||||
|
||||
comp_options=(
|
||||
"build: Configure and compile"
|
||||
"clean: Clean build files"
|
||||
"configure: Run CMake"
|
||||
"compile: Compile only"
|
||||
"all: clean, configure and compile"
|
||||
"ccacheClean: Clean ccache files, normally not needed"
|
||||
"ccacheShowStats: show ccache statistics"
|
||||
"quit: Close this menu")
|
||||
comp_functions=(
|
||||
"comp_build"
|
||||
"comp_clean"
|
||||
"comp_configure"
|
||||
"comp_compile"
|
||||
"comp_all"
|
||||
"comp_ccacheClean"
|
||||
"comp_ccacheShowStats"
|
||||
"comp_quit")
|
||||
|
||||
PS3='[ Please enter your choice ]: '
|
||||
|
||||
runHooks "ON_AFTER_OPTIONS" #you can create your custom options
|
||||
|
||||
function _switch() {
|
||||
_reply="$1"
|
||||
_opt="$2"
|
||||
|
||||
case $_reply in
|
||||
""|"--help")
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${options[@]}"
|
||||
;;
|
||||
*)
|
||||
run_option $_reply $_opt
|
||||
;;
|
||||
esac
|
||||
}
|
||||
|
||||
|
||||
while true
|
||||
do
|
||||
# run option directly if specified in argument
|
||||
[ ! -z $1 ] && _switch $@
|
||||
[ ! -z $1 ] && exit 0
|
||||
|
||||
select opt in "${comp_options[@]}"
|
||||
do
|
||||
echo "==== ACORE COMPILER ===="
|
||||
_switch $REPLY
|
||||
break;
|
||||
done
|
||||
done
|
||||
7
apps/compiler/includes/defines.sh
Normal file
7
apps/compiler/includes/defines.sh
Normal file
@@ -0,0 +1,7 @@
|
||||
# you can choose build type from cmd argument
|
||||
if [ ! -z $1 ]
|
||||
then
|
||||
CCTYPE=$1
|
||||
CCTYPE=${CCTYPE^} # capitalize first letter if it's not yet
|
||||
fi
|
||||
|
||||
170
apps/compiler/includes/functions.sh
Normal file
170
apps/compiler/includes/functions.sh
Normal file
@@ -0,0 +1,170 @@
|
||||
|
||||
function comp_clean() {
|
||||
DIRTOCLEAN=${BUILDPATH:-var/build/obj}
|
||||
PATTERN="$DIRTOCLEAN/*"
|
||||
|
||||
echo "Cleaning build files in $DIRTOCLEAN"
|
||||
|
||||
[ -d "$DIRTOCLEAN" ] && rm -rf $PATTERN
|
||||
}
|
||||
|
||||
function comp_ccacheEnable() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
export CCACHE_MAXSIZE=${CCACHE_MAXSIZE:-'1000MB'}
|
||||
#export CCACHE_DEPEND=true
|
||||
export CCACHE_SLOPPINESS=${CCACHE_SLOPPINESS:-pch_defines,time_macros,include_file_mtime}
|
||||
export CCACHE_CPP2=${CCACHE_CPP2:-true} # optimization for clang
|
||||
export CCACHE_COMPRESS=${CCACHE_COMPRESS:-1}
|
||||
export CCACHE_COMPRESSLEVEL=${CCACHE_COMPRESSLEVEL:-9}
|
||||
export CCACHE_COMPILERCHECK=${CCACHE_COMPILERCHECK:-content}
|
||||
export CCACHE_LOGFILE=${CCACHE_LOGFILE:-"$CCACHE_DIR/cache.debug"}
|
||||
#export CCACHE_NODIRECT=true
|
||||
|
||||
export CCUSTOMOPTIONS="$CCUSTOMOPTIONS -DCMAKE_C_COMPILER_LAUNCHER=ccache -DCMAKE_CXX_COMPILER_LAUNCHER=ccache"
|
||||
}
|
||||
|
||||
function comp_ccacheClean() {
|
||||
[ "$AC_CCACHE" != true ] && echo "ccache is disabled" && return
|
||||
|
||||
echo "Cleaning ccache"
|
||||
ccache -C
|
||||
ccache -s
|
||||
}
|
||||
|
||||
function comp_ccacheResetStats() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
ccache -zc
|
||||
}
|
||||
|
||||
function comp_ccacheShowStats() {
|
||||
[ "$AC_CCACHE" != true ] && return
|
||||
|
||||
ccache -s
|
||||
}
|
||||
|
||||
function comp_configure() {
|
||||
CWD=$(pwd)
|
||||
|
||||
cd $BUILDPATH
|
||||
|
||||
echo "Build path: $BUILDPATH"
|
||||
echo "DEBUG info: $CDEBUG"
|
||||
echo "Compilation type: $CTYPE"
|
||||
echo "CCache: $AC_CCACHE"
|
||||
# -DCMAKE_BUILD_TYPE=$CCTYPE disable optimization "slow and huge amount of ram"
|
||||
# -DWITH_COREDEBUG=$CDEBUG compiled with debug information
|
||||
|
||||
#-DSCRIPTS_COMMANDS=$CSCRIPTS -DSCRIPTS_CUSTOM=$CSCRIPTS -DSCRIPTS_EASTERNKINGDOMS=$CSCRIPTS -DSCRIPTS_EVENTS=$CSCRIPTS -DSCRIPTS_KALIMDOR=$CSCRIPTS \
|
||||
#-DSCRIPTS_NORTHREND=$CSCRIPTS -DSCRIPTS_OUTDOORPVP=$CSCRIPTS -DSCRIPTS_OUTLAND=$CSCRIPTS -DSCRIPTS_PET=$CSCRIPTS -DSCRIPTS_SPELLS=$CSCRIPTS -DSCRIPTS_WORLD=$CSCRIPTS \
|
||||
#-DAC_WITH_UNIT_TEST=$CAC_UNIT_TEST -DAC_WITH_PLUGINS=$CAC_PLG \
|
||||
|
||||
local DCONF=""
|
||||
if [ ! -z "$CONFDIR" ]; then
|
||||
DCONF="-DCONF_DIR=$CONFDIR"
|
||||
fi
|
||||
|
||||
comp_ccacheEnable
|
||||
|
||||
OSOPTIONS=""
|
||||
|
||||
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
darwin*)
|
||||
OSOPTIONS=" -DMYSQL_ADD_INCLUDE_PATH=/usr/local/include -DMYSQL_LIBRARY=/usr/local/lib/libmysqlclient.dylib -DREADLINE_INCLUDE_DIR=/usr/local/opt/readline/include -DREADLINE_LIBRARY=/usr/local/opt/readline/lib/libreadline.dylib -DOPENSSL_INCLUDE_DIR=/usr/local/opt/openssl@3/include -DOPENSSL_SSL_LIBRARIES=/usr/local/opt/openssl@3/lib/libssl.dylib -DOPENSSL_CRYPTO_LIBRARIES=/usr/local/opt/openssl@3/lib/libcrypto.dylib "
|
||||
;;
|
||||
msys*)
|
||||
OSOPTIONS=" -DMYSQL_INCLUDE_DIR=C:\tools\mysql\current\include -DMYSQL_LIBRARY=C:\tools\mysql\current\lib\mysqlclient.lib "
|
||||
;;
|
||||
esac
|
||||
|
||||
cmake $SRCPATH -DCMAKE_INSTALL_PREFIX=$BINPATH $DCONF \
|
||||
-DAPPS_BUILD=$CAPPS_BUILD \
|
||||
-DTOOLS_BUILD=$CTOOLS_BUILD \
|
||||
-DSCRIPTS=$CSCRIPTS \
|
||||
-DMODULES=$CMODULES \
|
||||
-DBUILD_TESTING=$CBUILD_TESTING \
|
||||
-DUSE_SCRIPTPCH=$CSCRIPTPCH \
|
||||
-DUSE_COREPCH=$CCOREPCH \
|
||||
-DCMAKE_BUILD_TYPE=$CTYPE \
|
||||
-DWITH_WARNINGS=$CWARNINGS \
|
||||
-DCMAKE_C_COMPILER=$CCOMPILERC \
|
||||
-DCMAKE_CXX_COMPILER=$CCOMPILERCXX \
|
||||
$CBUILD_APPS_LIST $CBUILD_TOOLS_LIST $OSOPTIONS $CCUSTOMOPTIONS
|
||||
|
||||
cd $CWD
|
||||
|
||||
runHooks "ON_AFTER_CONFIG"
|
||||
}
|
||||
|
||||
function comp_compile() {
|
||||
[ $MTHREADS == 0 ] && MTHREADS=$(grep -c ^processor /proc/cpuinfo) && MTHREADS=$(($MTHREADS + 2))
|
||||
|
||||
echo "Using $MTHREADS threads"
|
||||
|
||||
pushd "$BUILDPATH" >> /dev/null || exit 1
|
||||
|
||||
comp_ccacheEnable
|
||||
|
||||
comp_ccacheResetStats
|
||||
|
||||
time cmake --build . --config $CTYPE -j $MTHREADS
|
||||
|
||||
comp_ccacheShowStats
|
||||
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
msys*)
|
||||
cmake --install . --config $CTYPE
|
||||
|
||||
popd >> /dev/null || exit 1
|
||||
|
||||
echo "Done"
|
||||
;;
|
||||
linux*|darwin*)
|
||||
local confDir=${CONFDIR:-"$AC_BINPATH_FULL/../etc"}
|
||||
|
||||
# create the folders before installing to
|
||||
# set the current user and permissions
|
||||
echo "Creating $AC_BINPATH_FULL..."
|
||||
mkdir -p "$AC_BINPATH_FULL"
|
||||
echo "Creating $confDir..."
|
||||
mkdir -p "$confDir"
|
||||
|
||||
echo "Cmake install..."
|
||||
sudo cmake --install . --config $CTYPE
|
||||
|
||||
popd >> /dev/null || exit 1
|
||||
|
||||
# set all aplications SUID bit
|
||||
echo "Setting permissions on binary files"
|
||||
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec sudo chown root:root -- {} +
|
||||
find "$AC_BINPATH_FULL" -mindepth 1 -maxdepth 1 -type f -exec sudo chmod u+s -- {} +
|
||||
|
||||
if [[ -n "$DOCKER" ]]; then
|
||||
[[ -f "$confDir/worldserver.conf.dist" ]] && \
|
||||
cp -nv "$confDir/worldserver.conf.dist" "$confDir/worldserver.conf"
|
||||
[[ -f "$confDir/authserver.conf.dist" ]] && \
|
||||
cp -nv "$confDir/authserver.conf.dist" "$confDir/authserver.conf"
|
||||
[[ -f "$confDir/dbimport.conf.dist" ]] && \
|
||||
cp -nv "$confDir/dbimport.conf.dist" "$confDir/dbimport.conf"
|
||||
fi
|
||||
|
||||
echo "Done"
|
||||
;;
|
||||
esac
|
||||
|
||||
runHooks "ON_AFTER_BUILD"
|
||||
}
|
||||
|
||||
function comp_build() {
|
||||
comp_configure
|
||||
comp_compile
|
||||
}
|
||||
|
||||
function comp_all() {
|
||||
comp_clean
|
||||
comp_build
|
||||
}
|
||||
23
apps/compiler/includes/includes.sh
Normal file
23
apps/compiler/includes/includes.sh
Normal file
@@ -0,0 +1,23 @@
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/../../bash_shared/includes.sh"
|
||||
|
||||
AC_PATH_COMPILER="$AC_PATH_APPS/compiler"
|
||||
|
||||
if [ -f "$AC_PATH_COMPILER/config.sh" ]; then
|
||||
source "$AC_PATH_COMPILER/config.sh" # should overwrite previous
|
||||
fi
|
||||
|
||||
function ac_on_after_build() {
|
||||
# move the run engine
|
||||
cp -rvf "$AC_PATH_APPS/startup-scripts/"* "$BINPATH"
|
||||
}
|
||||
|
||||
registerHooks "ON_AFTER_BUILD" ac_on_after_build
|
||||
|
||||
source "$AC_PATH_COMPILER/includes/defines.sh"
|
||||
|
||||
source "$AC_PATH_COMPILER/includes/functions.sh"
|
||||
|
||||
mkdir -p $BUILDPATH
|
||||
mkdir -p $BINPATH
|
||||
22
apps/config-merger/README.md
Normal file
22
apps/config-merger/README.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# ==== PHP merger (index.php + merge.php) ====
|
||||
|
||||
This is a PHP script for merging a new .dist file with your existing .conf file (worldserver.conf.dist and authserver.conf.dist)
|
||||
|
||||
It uses sessions so it is multi user safe, it adds any options that are removed to the bottom of the file commented out, just in case it removes something it shouldn't.
|
||||
If you add your custom patch configs below "# Custom" they will be copied exactly as they are.
|
||||
|
||||
Your new config will be found under $basedir/session_id/newconfig.conf.merge
|
||||
|
||||
If you do not run a PHP server on your machiene you can read this guide on ["How to execute PHP code using command line?"](https://www.geeksforgeeks.org/how-to-execute-php-code-using-command-line/) on geeksforgeeks.org.
|
||||
|
||||
```
|
||||
php -S localhost:port -t E:\Azerothcore-wotlk\apps\config-merger\
|
||||
```
|
||||
|
||||
Change port to an available port to use. i.e 8000
|
||||
|
||||
Then go to your browser and type:
|
||||
|
||||
```
|
||||
localhost:8000/index.php
|
||||
```
|
||||
44
apps/config-merger/index.php
Normal file
44
apps/config-merger/index.php
Normal file
@@ -0,0 +1,44 @@
|
||||
<?php
|
||||
/*
|
||||
* Project Name: Config File Merge For Mangos/Trinity/AzerothCore Server
|
||||
* Date: 01.01.2010 inital version (0.0.1a)
|
||||
* Author: Paradox
|
||||
* Copyright: Paradox
|
||||
* Email: iamparadox@netscape.net (paypal email)
|
||||
* License: GNU General Public License v2(GPL)
|
||||
*/
|
||||
?>
|
||||
<meta http-equiv="Content-Type" content="text/html; charset=windows-1251">
|
||||
<FORM enctype="multipart/form-data" ACTION="merge.php" METHOD="POST">
|
||||
Dist File (.conf.dist)
|
||||
<br />
|
||||
<INPUT name="File1" TYPE="file">
|
||||
<br />
|
||||
<br />
|
||||
Current Conf File (.conf)
|
||||
<br />
|
||||
<INPUT name="File2" TYPE="file">
|
||||
<br />
|
||||
<br />
|
||||
<INPUT TYPE=RADIO NAME="eol" VALUE="0" CHECKED >Windows -
|
||||
<INPUT TYPE=RADIO NAME="eol" VALUE="1" >UNIX/Linux
|
||||
<br />
|
||||
<br />
|
||||
<INPUT TYPE="submit" VALUE="Submit">
|
||||
<br />
|
||||
<br />
|
||||
If you have any custom settings, such as from patches,
|
||||
<br />
|
||||
make sure they are at the bottom of the file following
|
||||
<br />
|
||||
this block (add it if it's not there)
|
||||
<br />
|
||||
###############################################################################
|
||||
<br />
|
||||
# Custom
|
||||
<br />
|
||||
###############################################################################
|
||||
<br />
|
||||
<br />
|
||||
|
||||
</FORM>
|
||||
179
apps/config-merger/merge.php
Normal file
179
apps/config-merger/merge.php
Normal file
@@ -0,0 +1,179 @@
|
||||
<?php
|
||||
/*
|
||||
* Project Name: Config File Merge For Mangos/Trinity Server
|
||||
* Date: 01.01.2010 inital version (0.0.1a)
|
||||
* Author: Paradox
|
||||
* Copyright: Paradox
|
||||
* Email: iamparadox@netscape.net (paypal email)
|
||||
* License: GNU General Public License v2(GPL)
|
||||
*/
|
||||
|
||||
error_reporting(0);
|
||||
|
||||
if (!empty($_FILES['File1']) && !empty($_FILES['File2']))
|
||||
{
|
||||
session_id();
|
||||
session_start();
|
||||
$basedir = "merge";
|
||||
$eol = "\r\n";
|
||||
if ($_POST['eol'])
|
||||
$eol = "\n";
|
||||
else
|
||||
$eol = "\r\n";
|
||||
if (!file_exists($basedir))
|
||||
mkdir($basedir);
|
||||
if (!file_exists($basedir."/".session_id()))
|
||||
mkdir($basedir."/".session_id());
|
||||
$upload1 = $basedir."/".session_id()."/".basename($_FILES['File1']['name']);
|
||||
$upload2 = $basedir."/".session_id()."/".basename($_FILES['File2']['name']);
|
||||
|
||||
if (strpos($upload1, "worldserver") !== false)
|
||||
$newconfig = $basedir."/".session_id()."/worldserver.conf.merge";
|
||||
else if (strpos($upload1, "authserver") !== false)
|
||||
$newconfig = $basedir."/".session_id()."/authserver.conf.merge";
|
||||
else
|
||||
$newconfig = $basedir."/".session_id()."/UnkownConfigFile.conf.merge";
|
||||
|
||||
$out_file = fopen($newconfig, "w");
|
||||
$success = false;
|
||||
if (move_uploaded_file($_FILES['File1']['tmp_name'], $upload1))
|
||||
{
|
||||
$success = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
$success = false;
|
||||
}
|
||||
if (move_uploaded_file($_FILES['File2']['tmp_name'], $upload2))
|
||||
{
|
||||
$success = true;
|
||||
}
|
||||
else
|
||||
{
|
||||
$success = false;
|
||||
}
|
||||
|
||||
if ($success)
|
||||
{
|
||||
$custom_found = false;
|
||||
$in_file1 = fopen($upload1,"r");
|
||||
$in_file2 = fopen($upload2,"r");
|
||||
$array1 = array();
|
||||
$array2 = array();
|
||||
$line = trim(fgets($in_file1));
|
||||
while (!feof($in_file1))
|
||||
{
|
||||
if ((substr($line,0,1) != '#' && substr($line,0,1) != ''))
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array1[$key] = $val;
|
||||
}
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
$line = trim(fgets($in_file2));
|
||||
while (!feof($in_file2) && !$custom_found)
|
||||
{
|
||||
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array2[$key] = $val;
|
||||
}
|
||||
if (strtolower($line) == "# custom")
|
||||
$custom_found = true;
|
||||
else
|
||||
$line = trim(fgets($in_file2));
|
||||
}
|
||||
fclose($in_file1);
|
||||
foreach($array2 as $k => $v)
|
||||
{
|
||||
if (array_key_exists($k, $array1))
|
||||
{
|
||||
$array1[$k] = $v;
|
||||
unset($array2[$k]);
|
||||
}
|
||||
}
|
||||
$in_file1 = fopen($upload1,"r");
|
||||
$line = trim(fgets($in_file1));
|
||||
while (!feof($in_file1))
|
||||
{
|
||||
if (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
$array = array();
|
||||
while (substr($line,0,1) != '#' && substr($line,0,1) != '')
|
||||
{
|
||||
list($key, $val) = explode("=",$line);
|
||||
$key = trim($key);
|
||||
$val = trim($val);
|
||||
$array[$key] = $val;
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
foreach($array as $k => $v)
|
||||
{
|
||||
if (array_key_exists($k, $array1))
|
||||
fwrite($out_file, $k."=".$array1[$k].$eol);
|
||||
else
|
||||
continue;
|
||||
}
|
||||
unset($array);
|
||||
if (!feof($in_file1))
|
||||
fwrite($out_file, $line.$eol);
|
||||
}
|
||||
else
|
||||
fwrite($out_file, $line.$eol);
|
||||
$line = trim(fgets($in_file1));
|
||||
}
|
||||
if ($custom_found)
|
||||
{
|
||||
fwrite($out_file, $eol);
|
||||
fwrite($out_file, "###############################################################################".$eol);
|
||||
fwrite($out_file, "# Custom".$eol);
|
||||
$line = trim(fgets($in_file2));
|
||||
while (!feof($in_file2))
|
||||
{
|
||||
fwrite($out_file, $line.$eol);
|
||||
$line = trim(fgets($in_file2));
|
||||
}
|
||||
}
|
||||
$first = true;
|
||||
foreach($array2 as $k => $v)
|
||||
{
|
||||
if ($first)
|
||||
{
|
||||
fwrite($out_file, $eol);
|
||||
fwrite($out_file, "###############################################################################".$eol);
|
||||
fwrite($out_file, "# The Following values were removed from the config.".$eol);
|
||||
$first = false;
|
||||
}
|
||||
fwrite($out_file, "# ".$k."=".$v.$eol);
|
||||
}
|
||||
|
||||
if (strpos($upload1, "worldserver") !== false)
|
||||
{
|
||||
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
|
||||
}
|
||||
else if (strpos($upload1, "authserver") !== false)
|
||||
{
|
||||
file_put_contents($newconfig, str_replace("]=","]",file_get_contents($newconfig)));
|
||||
}
|
||||
|
||||
unset($array1);
|
||||
unset($array2);
|
||||
fclose($in_file1);
|
||||
fclose($in_file2);
|
||||
fclose($out_file);
|
||||
unlink($upload1);
|
||||
unlink($upload2);
|
||||
|
||||
echo "Process done";
|
||||
echo "<br /><a href=".$newconfig.">Click here to retrieve your merged conf</a>";
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
echo "An error has occurred";
|
||||
}
|
||||
?>
|
||||
1
apps/db_exporter/.gitignore
vendored
Normal file
1
apps/db_exporter/.gitignore
vendored
Normal file
@@ -0,0 +1 @@
|
||||
config.sh
|
||||
12
apps/db_exporter/README.md
Normal file
12
apps/db_exporter/README.md
Normal file
@@ -0,0 +1,12 @@
|
||||
This script is used by devs to export the databases to base directories
|
||||
|
||||
You should use it on clean databases
|
||||
|
||||
## USAGE
|
||||
|
||||
NOTE: this script is only working under unix currently
|
||||
|
||||
1) You must create a config.sh file changing DB connection configurations
|
||||
of /conf/config.sh.dist
|
||||
|
||||
2) Run the db_export.sh script and wait
|
||||
52
apps/db_exporter/db_export.sh
Normal file
52
apps/db_exporter/db_export.sh
Normal file
@@ -0,0 +1,52 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
ROOTPATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )/../../" && pwd )"
|
||||
|
||||
source "$ROOTPATH/apps/bash_shared/includes.sh"
|
||||
|
||||
if [ -f "./config.sh" ]; then
|
||||
source "./config.sh" # should overwrite previous
|
||||
fi
|
||||
|
||||
echo "This is a dev-only procedure to export the DB into the SQL base files. All base files will be overwritten."
|
||||
read -p "Are you sure you want to continue (y/N)? " choice
|
||||
case "$choice" in
|
||||
y|Y ) echo "Exporting the DB into the SQL base files...";;
|
||||
* ) return;;
|
||||
esac
|
||||
|
||||
echo "===== STARTING PROCESS ====="
|
||||
|
||||
|
||||
function export() {
|
||||
echo "Working on: "$1
|
||||
database=$1
|
||||
|
||||
var_base_path="DB_"$database"_PATHS"
|
||||
base_path=${!var_base_path%/}
|
||||
|
||||
base_conf="TPATH="$base_path";\
|
||||
CLEANFOLDER=1; \
|
||||
CHMODE=0; \
|
||||
TEXTDUMPS=0; \
|
||||
PARSEDUMP=1; \
|
||||
FULL=0; \
|
||||
DUMPOPTS='--skip-comments --skip-set-charset --routines --extended-insert --order-by-primary --single-transaction --quick'; \
|
||||
"
|
||||
|
||||
var_base_conf="DB_"$database"_CONF"
|
||||
base_conf=$base_conf${!var_base_conf}
|
||||
|
||||
var_base_name="DB_"$database"_NAME"
|
||||
base_name=${!var_base_name}
|
||||
|
||||
|
||||
bash "$AC_PATH_DEPS/acore/mysql-tools/mysql-tools" "dump" "" "$base_name" "" "$base_conf"
|
||||
}
|
||||
|
||||
for db in ${DATABASES[@]}
|
||||
do
|
||||
export "$db"
|
||||
done
|
||||
|
||||
echo "===== DONE ====="
|
||||
344
apps/docker/Dockerfile
Normal file
344
apps/docker/Dockerfile
Normal file
@@ -0,0 +1,344 @@
|
||||
#syntax=docker/dockerfile:1.2
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# DEV: Stage used for the development environment
|
||||
# and the locally built services
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:20.04 as base
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
LABEL description="AC base image for dev containers"
|
||||
|
||||
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
ENV DOCKER=1
|
||||
|
||||
# Ensure ac-dev-server can properly pull versions
|
||||
ENV GIT_DISCOVERY_ACROSS_FILESYSTEM=1
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Do not use acore dashboard to install
|
||||
# since it's not cacheable by docker
|
||||
RUN apt-get update && apt-get install -y gdb gdbserver git dos2unix lsb-core sudo curl unzip \
|
||||
make cmake clang libmysqlclient-dev \
|
||||
libboost-system1.7*-dev libboost-filesystem1.7*-dev libboost-program-options1.7*-dev libboost-iostreams1.7*-dev \
|
||||
build-essential libtool cmake-data openssl libgoogle-perftools-dev google-perftools \
|
||||
libssl-dev libmysql++-dev libreadline6-dev zlib1g-dev libbz2-dev mysql-client \
|
||||
libncurses5-dev ccache \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
||||
# Ensure git will work with the AzerothCore source directory
|
||||
RUN git config --global --add safe.directory /azerothcore
|
||||
|
||||
# change timezone in container
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && dpkg-reconfigure --frontend noninteractive tzdata
|
||||
|
||||
# Create a non-root user
|
||||
RUN addgroup --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# must be created to set the correct permissions on them
|
||||
RUN mkdir -p /azerothcore/env/dist/bin
|
||||
RUN mkdir -p /azerothcore/env/dist/data/Cameras
|
||||
RUN mkdir -p /azerothcore/env/dist/data/dbc
|
||||
RUN mkdir -p /azerothcore/env/dist/data/maps
|
||||
RUN mkdir -p /azerothcore/env/dist/data/mmaps
|
||||
RUN mkdir -p /azerothcore/env/dist/data/vmaps
|
||||
RUN mkdir -p /azerothcore/env/dist/logs
|
||||
RUN mkdir -p /azerothcore/env/dist/temp
|
||||
RUN mkdir -p /azerothcore/env/dist/etc
|
||||
RUN mkdir -p /azerothcore/var/build/obj
|
||||
|
||||
# Correct permissions for non-root operations
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /home/acore
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /run
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /opt
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
# copy only necessary files for the acore dashboard
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER apps /azerothcore/apps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER bin /azerothcore/bin
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER conf /azerothcore/conf
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER data /azerothcore/data
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER deps /azerothcore/deps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.json /azerothcore/acore.json
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER acore.sh /azerothcore/acore.sh
|
||||
|
||||
# Download deno and make sure the dashboard works
|
||||
RUN bash /azerothcore/acore.sh quit
|
||||
|
||||
WORKDIR /azerothcore
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# Dev: create dev server image
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM base as dev
|
||||
|
||||
LABEL description="AC dev image for dev containers"
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
# copy everything so we can work directly within the container
|
||||
# using tools such as vscode dev-container
|
||||
# NOTE: this folder is different by the /azerothcore (which is binded instead)
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER . /azerothcore
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# SERVICE BASE: prepare the OS for the production-ready services
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:20.04 as servicebase
|
||||
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
LABEL description="AC service image for server applications"
|
||||
|
||||
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Create a non-root user
|
||||
RUN addgroup --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# install the required dependencies to run the server
|
||||
RUN apt-get update && apt-get install -y dos2unix gdb gdbserver google-perftools libgoogle-perftools-dev net-tools \
|
||||
libboost-system1.7*-dev libboost-filesystem1.7*-dev libboost-program-options1.7*-dev libboost-iostreams1.7*-dev \
|
||||
tzdata libmysqlclient-dev mysql-client curl unzip && rm -rf /var/lib/apt/lists/* ;
|
||||
|
||||
# change timezone in container
|
||||
RUN ln -snf /usr/share/zoneinfo/$TZ /etc/localtime && echo $TZ > /etc/timezone && dpkg-reconfigure --frontend noninteractive tzdata
|
||||
|
||||
# Correct permissions for non-root operations
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /home/acore
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /run
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /opt
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=base /azerothcore /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
# must be created to avoid permissions errors
|
||||
RUN mkdir -p /azerothcore/env/dist/data/Cameras
|
||||
RUN mkdir -p /azerothcore/env/dist/data/dbc
|
||||
RUN mkdir -p /azerothcore/env/dist/data/maps
|
||||
RUN mkdir -p /azerothcore/env/dist/data/mmaps
|
||||
RUN mkdir -p /azerothcore/env/dist/data/vmaps
|
||||
RUN mkdir -p /azerothcore/env/dist/logs
|
||||
RUN mkdir -p /azerothcore/env/dist/etc
|
||||
RUN mkdir -p /azerothcore/env/dist/bin
|
||||
|
||||
# Download deno and make sure the dashboard works
|
||||
RUN bash /azerothcore/acore.sh quit
|
||||
|
||||
WORKDIR /azerothcore/
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# AUTH & WORLD local: images used for local services
|
||||
# These images don't include binaries by default
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM servicebase as authserver-local
|
||||
|
||||
LABEL description="AC authserver image for local environment"
|
||||
|
||||
CMD ./acore.sh run-authserver
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
FROM servicebase as worldserver-local
|
||||
|
||||
LABEL description="AC worldserver image for local environment"
|
||||
|
||||
CMD ./acore.sh run-worldserver
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# BUILD: compile sources
|
||||
#
|
||||
#=================================================================
|
||||
FROM base as build
|
||||
|
||||
ARG DOCKER_USER=acore
|
||||
USER $DOCKER_USER
|
||||
|
||||
LABEL description="AC Image used by the build stage to generate production images"
|
||||
|
||||
RUN mkdir -p /azerothcore/env/etc/
|
||||
|
||||
# .git is needed by the compiler
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./.git /azerothcore/.git
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./CMakeLists.txt /azerothcore/CMakeLists.txt
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./deps /azerothcore/deps
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./src /azerothcore/src
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER ./modules /azerothcore/modules
|
||||
# check if we have ccache files available outside
|
||||
RUN rm -rf /azerothcore/var/ccache/*
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER var/docker/ccache /azerothcore/var/ccache
|
||||
|
||||
# install eluna
|
||||
RUN git clone --depth=1 --branch=master https://github.com/azerothcore/mod-eluna.git /azerothcore/modules/mod-eluna
|
||||
|
||||
ENV USER_CONF_PATH=/azerothcore/apps/docker/config-docker.sh
|
||||
ENV CTYPE=RelWithDebInfo
|
||||
ENV AC_CCACHE=true
|
||||
ENV CCACHE_CPP2=true
|
||||
ENV CSCRIPTPCH=OFF
|
||||
ENV CCOREPCH=OFF
|
||||
ENV CTOOLS_BUILD=all
|
||||
# ENV CTOOLS_BUILD=maps-only
|
||||
ENV CSCRIPTS=static
|
||||
RUN bash apps/docker/docker-build-prod.sh
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# AUTH SERVICE: create a ready-to-use authserver image
|
||||
# with binaries included
|
||||
#
|
||||
#=================================================================
|
||||
FROM authserver-local as authserver
|
||||
|
||||
LABEL description="AC Production: authserver"
|
||||
|
||||
ARG DOCKER_USER=acore
|
||||
USER $DOCKER_USER
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/etc /azerothcore/env/dist/etc
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/authserver /azerothcore/env/dist/bin/authserver
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# WORLD SERVICE: create a ready-to-use worldserver image
|
||||
# with binaries and data included
|
||||
#
|
||||
#=================================================================
|
||||
FROM worldserver-local as worldserver
|
||||
|
||||
LABEL description="AC Production: worldserver"
|
||||
|
||||
ARG DOCKER_USER=acore
|
||||
USER $DOCKER_USER
|
||||
|
||||
RUN mkdir -p /azerothcore/env/dist/bin/lua_scripts
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/etc /azerothcore/env/dist/etc
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/worldserver /azerothcore/env/dist/bin/worldserver
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/lua_scripts /azerothcore/env/dist/bin/lua_scripts
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/dbimport /azerothcore/env/dist/bin/dbimport
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# CLIENT DATA
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:20.04 as client-data
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
LABEL description="AC Production: client-data"
|
||||
|
||||
RUN apt-get update && apt-get install -y tzdata curl unzip && rm -rf /var/lib/apt/lists/* ;
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN addgroup --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
# ENV DATAPATH=/azerothcore/env/dist/data-temp
|
||||
ENV DATAPATH=/azerothcore/env/dist/data
|
||||
ENV DATAPATH_ZIP=/tmp/data.zip
|
||||
|
||||
RUN mkdir -p "$DATAPATH"
|
||||
ARG CACHEBUST=1
|
||||
# RUN --mount=type=bind,target=/azerothcore-temp,readwrite --mount=type=cache,target=/azerothcore/env/dist/data-temp /azerothcore-temp/acore.sh client-data && cp -rT /azerothcore/env/dist/data-temp/ /azerothcore/env/dist/data && chown -R $DOCKER_USER:$DOCKER_USER /azerothcore
|
||||
RUN --mount=type=bind,target=/azerothcore-temp,readwrite /azerothcore-temp/acore.sh client-data && chown -R $DOCKER_USER:$DOCKER_USER /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
#================================================================
|
||||
#
|
||||
# TOOLS
|
||||
#
|
||||
#=================================================================
|
||||
|
||||
FROM ubuntu:20.04 as tools
|
||||
ARG USER_ID=1000
|
||||
ARG GROUP_ID=1000
|
||||
ARG DOCKER_USER=acore
|
||||
|
||||
LABEL description="AC Production: tools"
|
||||
|
||||
# List of timezones: http://en.wikipedia.org/wiki/List_of_tz_database_time_zones
|
||||
|
||||
# set timezone environment variable
|
||||
ENV TZ=Etc/UTC
|
||||
|
||||
# set noninteractive mode so tzdata doesn't ask to set timezone on install
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
RUN apt-get update && apt-get install -y libmysqlclient-dev libssl-dev libbz2-dev \
|
||||
libboost-system1.7*-dev libboost-filesystem1.7*-dev libboost-program-options1.7*-dev libboost-iostreams1.7*-dev \
|
||||
sudo && rm -rf /var/lib/apt/lists/* ;
|
||||
|
||||
# Create a non-root user
|
||||
RUN addgroup --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
adduser --disabled-password --gecos '' --uid "$USER_ID" --gid "$GROUP_ID" "$DOCKER_USER" && \
|
||||
passwd -d "$DOCKER_USER" && \
|
||||
echo "$DOCKER_USER ALL=(ALL:ALL) NOPASSWD: ALL" >> /etc/sudoers
|
||||
|
||||
RUN mkdir -p /azerothcore/env/client/
|
||||
RUN chown -R $DOCKER_USER:$DOCKER_USER /azerothcore
|
||||
|
||||
USER $DOCKER_USER
|
||||
|
||||
WORKDIR /azerothcore/env/client/
|
||||
|
||||
RUN mkdir -p /azerothcore/env/client/Cameras
|
||||
RUN mkdir -p /azerothcore/env/client/dbc
|
||||
RUN mkdir -p /azerothcore/env/client/maps
|
||||
RUN mkdir -p /azerothcore/env/client/mmaps
|
||||
RUN mkdir -p /azerothcore/env/client/vmaps
|
||||
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/map_extractor /azerothcore/env/client/map_extractor
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/mmaps_generator /azerothcore/env/client/mmaps_generator
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/vmap4_assembler /azerothcore/env/client/vmap4_assembler
|
||||
COPY --chown=$DOCKER_USER:$DOCKER_USER --from=build /azerothcore/env/dist/bin/vmap4_extractor /azerothcore/env/client/vmap4_extractor
|
||||
|
||||
27
apps/docker/README.md
Normal file
27
apps/docker/README.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Run AzerothCore with Docker
|
||||
|
||||
*This readme it's a summary of the AzerothCore docker features.*
|
||||
|
||||
Docker. is a software that performs operating-system-level virtualization, allowing to wrap and launch applications inside containers.
|
||||
|
||||
Thanks to Docker, you can quickly setup and run AzerothCore in any operating system.
|
||||
|
||||
The **only** requirement is having [Docker](https://docs.docker.com/install/) installed into your system. Forget about installing mysql, visual studio, cmake, etc...
|
||||
|
||||
### Installation instructions
|
||||
|
||||
Check the [Install with Docker](https://www.azerothcore.org/wiki/Install-with-Docker) guide.
|
||||
|
||||
### Memory usage
|
||||
|
||||
The total amount of RAM when running all AzerothCore docker containers is **less than 2 GB**.
|
||||
|
||||

|
||||
|
||||
|
||||
### Docker containers vs Virtual machines
|
||||
|
||||
Using Docker will have the same benefits as using virtual machines, but with much less overhead:
|
||||
|
||||

|
||||
|
||||
8
apps/docker/config-docker.sh
Normal file
8
apps/docker/config-docker.sh
Normal file
@@ -0,0 +1,8 @@
|
||||
CUR_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
CTOOLS_BUILD=all
|
||||
|
||||
# allow the user to override configs
|
||||
if [ -f "$AC_PATH_CONF/config.sh" ]; then
|
||||
source "$AC_PATH_CONF/config.sh" # should overwrite previous
|
||||
fi
|
||||
14
apps/docker/docker-build-dev.sh
Normal file
14
apps/docker/docker-build-dev.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CUR_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CUR_PATH/docker-build-prod.sh"
|
||||
|
||||
echo "Fixing EOL..."
|
||||
# using -n (new file mode) should also fix the issue
|
||||
# when the file is created with the default acore user but you
|
||||
# set a different user into the docker configurations
|
||||
for file in "env/dist/etc/"*
|
||||
do
|
||||
dos2unix -n $file $file
|
||||
done
|
||||
5
apps/docker/docker-build-prod.sh
Normal file
5
apps/docker/docker-build-prod.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
cd /azerothcore
|
||||
|
||||
bash acore.sh compiler build
|
||||
180
apps/docker/docker-cmd.sh
Normal file
180
apps/docker/docker-cmd.sh
Normal file
@@ -0,0 +1,180 @@
|
||||
#!/bin/bash
|
||||
|
||||
# TODO(michaeldelago) decide if we need a wrapper like this around docker
|
||||
# commands.
|
||||
#
|
||||
# Running the docker commands should be simple and familiar.
|
||||
# Introducting extra steps through the dashboard can cause issues with people
|
||||
# getting started, especially if they already know docker.
|
||||
#
|
||||
# If a new user knows docker, they will feel (pretty close) to right at home.
|
||||
# If a new user doesn't know docker, it's easy to learn and the knowledge
|
||||
# applies to much more than azerothcore
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
COMPOSE_DOCKER_CLI_BUILD="1"
|
||||
DOCKER_BUILDKIT="1"
|
||||
# BUILDKIT_INLINE_CACHE="1"
|
||||
|
||||
function usage () {
|
||||
cat <<EOF
|
||||
Wrapper for shell scripts around docker
|
||||
|
||||
usage: $(basename $0) ACTION [ ACTION... ] [ ACTION_ARG... ]
|
||||
|
||||
actions:
|
||||
EOF
|
||||
# the `-s` will remove the "#" and properly space the action and description
|
||||
cat <<EOF | column -t -l2 -s'#'
|
||||
> start:app # Start the development worldserver and authserver
|
||||
> start:app:d # Start the development worldserver and authserver in detached mode
|
||||
> build # build the development worldserver and authserver
|
||||
> pull # pull the development worldserver and authserver
|
||||
> build:nocache # build the development worldserver and authserver without cache
|
||||
> clean:build # clean build artifacts from the dev server
|
||||
> client-data # download client data in the dev server
|
||||
> dev:up start # the dev server
|
||||
> dev:build # compile azerothcore using the dev server
|
||||
> dev:dash # execute the dashboard in the dev server container
|
||||
> dev:shell [ ARGS... ] # open a bash shell in the dev server
|
||||
> prod:build # Build the service containers used by acore-docker
|
||||
> prod:pull # Pull the containers used by acore-docker
|
||||
> prod:up # Start the services used by acore-docker
|
||||
> prod:up:d # start the services used by acore-docker in the background
|
||||
> attach SERVICE # attach to a service currently running in docker compose
|
||||
EOF
|
||||
}
|
||||
|
||||
# If no args, just spit usage and exit
|
||||
[[ $# -eq 0 ]] && usage && exit
|
||||
|
||||
# loop through commands passed
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
start:app)
|
||||
set -x
|
||||
docker compose --profile app up
|
||||
set +x
|
||||
# pop the head off of the queue of args
|
||||
# After this, the value of $1 is the value of $2
|
||||
shift
|
||||
;;
|
||||
|
||||
start:app:d)
|
||||
set -x
|
||||
docker compose --profile app up -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build)
|
||||
set -x
|
||||
docker compose --profile local --profile dev --profile dev-build build
|
||||
docker compose --profile dev-build run --rm --no-deps ac-dev-build /bin/bash /azerothcore/apps/docker/docker-build-dev.sh
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
pull)
|
||||
set -x
|
||||
docker compose --profile local --profile dev --profile dev-build pull
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build:nocache)
|
||||
set -x
|
||||
docker compose --profile local --profile dev --profile dev-build build --no-cache
|
||||
docker compose run --rm --no-deps ac-dev-build /bin/bash /azerothcore/apps/docker/docker-build-dev.sh
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
clean:build)
|
||||
set -x
|
||||
docker compose run --rm --no-deps ac-dev-server bash acore.sh compiler clean
|
||||
docker compose run --rm --no-deps ac-dev-server bash acore.sh compiler ccacheClean
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
client-data)
|
||||
set -x
|
||||
docker compose run --rm --no-deps ac-dev-server bash acore.sh client-data
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:up)
|
||||
set -x
|
||||
docker compose up -d ac-dev-server
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:build)
|
||||
set -x
|
||||
docker compose run --rm ac-dev-server bash acore.sh compiler build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:dash)
|
||||
set -x
|
||||
docker compose run --rm ac-dev-server bash /azerothcore/acore.sh ${@:2}
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
dev:shell)
|
||||
set -x
|
||||
docker compose up -d ac-dev-server
|
||||
docker compose exec ac-dev-server bash ${@:2}
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
build:prod|prod:build)
|
||||
set -x
|
||||
docker compose --profile prod build
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
pull:prod|prod:pull)
|
||||
set -x
|
||||
docker compose --profile prod pull
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
prod:up|start:prod)
|
||||
set -x
|
||||
docker compose --profile prod-app up
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
prod:up:d|start:prod:d)
|
||||
set -x
|
||||
docker compose --profile prod-app up -d
|
||||
set +x
|
||||
shift
|
||||
;;
|
||||
|
||||
attach)
|
||||
SERVICE="$2"
|
||||
set -x
|
||||
docker compose attach "$SERVICE"
|
||||
set +x
|
||||
shift
|
||||
shift # Second to pass the argument
|
||||
;;
|
||||
|
||||
*)
|
||||
echo "Unknown or empty arg"
|
||||
usage
|
||||
exit 1
|
||||
esac
|
||||
done
|
||||
83
apps/extractor/extractor.bat
Normal file
83
apps/extractor/extractor.bat
Normal file
@@ -0,0 +1,83 @@
|
||||
@ECHO OFF
|
||||
CLS
|
||||
|
||||
:MENU
|
||||
ECHO.
|
||||
ECHO ...............................................
|
||||
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
|
||||
ECHO ...............................................
|
||||
ECHO PRESS 1, 2, 3 OR 4 to select your task, or 5 to EXIT.
|
||||
ECHO ...............................................
|
||||
ECHO.
|
||||
ECHO WARNING! when extracting the vmaps extractor will
|
||||
ECHO output the text below, it's intended and not an error:
|
||||
ECHO ..........................................
|
||||
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
|
||||
ECHO No such file.
|
||||
ECHO Couldn't open RootWmo!!!
|
||||
ECHO Done!
|
||||
ECHO ..........................................
|
||||
ECHO.
|
||||
ECHO Press 1, 2, 3 or 4 to start extracting or 5 to exit.
|
||||
ECHO 1 - Extract base files (NEEDED) and cameras.
|
||||
ECHO 2 - Extract vmaps (needs maps to be extracted before you run this) (OPTIONAL, highly recommended)
|
||||
ECHO 3 - Extract mmaps (needs vmaps to be extracted before you run this, may take hours) (OPTIONAL, highly recommended)
|
||||
ECHO 4 - Extract all (may take hours)
|
||||
ECHO 5 - EXIT
|
||||
ECHO.
|
||||
SET /P M=Type 1, 2, 3, 4 or 5 then press ENTER:
|
||||
IF %M%==1 GOTO MAPS
|
||||
IF %M%==2 GOTO VMAPS
|
||||
IF %M%==3 GOTO MMAPS
|
||||
IF %M%==4 GOTO ALL
|
||||
IF %M%==5 GOTO :EOF
|
||||
|
||||
:MAPS
|
||||
start /b /w map_extractor.exe
|
||||
GOTO MENU
|
||||
|
||||
:VMAPS
|
||||
start /b /w vmap4_extractor.exe
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
GOTO MENU
|
||||
|
||||
:MMAPS
|
||||
ECHO This may take a few hours to complete. Please be patient.
|
||||
PAUSE
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
|
||||
:ALL
|
||||
ECHO This may take a few hours to complete. Please be patient.
|
||||
PAUSE
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w map_extractor.exe
|
||||
start /b /w vmap4_extractor.exe
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
83
apps/extractor/extractor_es.bat
Normal file
83
apps/extractor/extractor_es.bat
Normal file
@@ -0,0 +1,83 @@
|
||||
@ECHO OFF
|
||||
CLS
|
||||
|
||||
:MENU
|
||||
ECHO.
|
||||
ECHO ...............................................
|
||||
ECHO AzerothCore dbc, maps, vmaps, mmaps extractor
|
||||
ECHO ...............................................
|
||||
ECHO PRESIONE 1, 2, 3 O 4 para seleccionar su tarea, o 5 para SALIR.
|
||||
ECHO ...............................................
|
||||
ECHO.
|
||||
ECHO ADVERTENCIA: al extraer los vmaps del extractor
|
||||
ECHO la salida del texto de abajo, es intencional y no un error:
|
||||
ECHO ..........................................
|
||||
ECHO Extracting World\Wmo\Band\Final_Stage.wmo
|
||||
ECHO No such file.
|
||||
ECHO Couldn't open RootWmo!!!
|
||||
ECHO Done!
|
||||
ECHO ..........................................
|
||||
ECHO.
|
||||
ECHO Pulse 1, 2, 3 o 4 para iniciar la extraccion o 5 para salir.
|
||||
ECHO 1 - Extraer los archivos base (NECESARIOS) y las cámaras.
|
||||
ECHO 2 - Extraer vmaps (necesita que los mapas se extraigan antes de ejecutar esto) (OPCIONAL, muy recomendable)
|
||||
ECHO 3 - Extraer mmaps (necesita que los vmaps se extraigan antes de ejecutar esto, puede llevar horas) (OPCIONAL, muy recomendable)
|
||||
ECHO 4 - Extraer todo (puede llevar varias horas)
|
||||
ECHO 5 - SALIR
|
||||
ECHO.
|
||||
SET /P M=Escriba 1, 2, 3, 4 o 5 y pulse ENTER:
|
||||
IF %M%==1 GOTO MAPS
|
||||
IF %M%==2 GOTO VMAPS
|
||||
IF %M%==3 GOTO MMAPS
|
||||
IF %M%==4 GOTO ALL
|
||||
IF %M%==5 GOTO :EOF
|
||||
|
||||
:MAPS
|
||||
start /b /w map_extractor.exe
|
||||
GOTO MENU
|
||||
|
||||
:VMAPS
|
||||
start /b /w vmap4_extractor.exe
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
GOTO MENU
|
||||
|
||||
:MMAPS
|
||||
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
|
||||
PAUSE
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
|
||||
:ALL
|
||||
ECHO Esto puede tardar unas horas en completarse. Por favor, tenga paciencia.
|
||||
PAUSE
|
||||
if exist vmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "vmaps".
|
||||
mkdir "vmaps"
|
||||
)
|
||||
if exist mmaps\ (
|
||||
echo folder found.
|
||||
) else (
|
||||
echo creating folder "mmaps".
|
||||
mkdir "mmaps"
|
||||
)
|
||||
start /b /w map_extractor.exe
|
||||
start /b /w vmap4_extractor.exe
|
||||
start /b /w vmap4_assembler.exe Buildings vmaps
|
||||
rmdir Buildings /s /q
|
||||
start /b /w mmaps_generator.exe
|
||||
GOTO MENU
|
||||
5
apps/git_tools/setup_git_commit_template.sh
Normal file
5
apps/git_tools/setup_git_commit_template.sh
Normal file
@@ -0,0 +1,5 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
## Set a local git commit template
|
||||
git config --local commit.template ".git_commit_template.txt" ;
|
||||
echo "--- Successfully set the default commit template for this repository only. Verify with: git config -e"
|
||||
34
apps/git_tools/subrepo-update.sh
Normal file
34
apps/git_tools/subrepo-update.sh
Normal file
@@ -0,0 +1,34 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#######################
|
||||
#
|
||||
# README
|
||||
#
|
||||
# This script is used to automatically update
|
||||
# submodules and subrepos included in this project
|
||||
# Subrepo are updated in bidirectional way (pull + push)
|
||||
# because they are intended to be developed by this organization
|
||||
#
|
||||
# NOTE: only maintainers and CI should run this script and
|
||||
# keep it updated
|
||||
#
|
||||
#######################
|
||||
|
||||
set -e
|
||||
ROOT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )/../../"
|
||||
# update all submodules
|
||||
git submodule update --init --recursive
|
||||
git submodule foreach git pull origin master
|
||||
# include libraries for git subrepo
|
||||
source "$ROOT_PATH/deps/git-subrepo/.rc"
|
||||
source "$ROOT_PATH/deps/acore/bash-lib/src/git-utils/subrepo.sh"
|
||||
|
||||
echo "> Pulling and update all subrepos"
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/bash-lib master deps/acore/bash-lib
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/cmake-utils master deps/acore/cmake-utils
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/mysql-tools master deps/acore/mysql-tools
|
||||
|
||||
subrepoUpdate https://github.com/azerothcore/joiner master deps/acore/joiner
|
||||
1319
apps/grafana/1_General.json
Normal file
1319
apps/grafana/1_General.json
Normal file
File diff suppressed because it is too large
Load Diff
691
apps/grafana/2_Maps.json
Normal file
691
apps/grafana/2_Maps.json
Normal file
@@ -0,0 +1,691 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": "-- Grafana --",
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
},
|
||||
{
|
||||
"datasource": "Influx",
|
||||
"enable": true,
|
||||
"iconColor": "#C0C6BE",
|
||||
"iconSize": 13,
|
||||
"lineColor": "rgba(255, 96, 96, 0.592157)",
|
||||
"name": "Global Events",
|
||||
"query": "select title, text from events where $timeFilter and realm =~ /$realm$/",
|
||||
"showLine": true,
|
||||
"textColumn": "text",
|
||||
"titleColumn": "title"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": 6,
|
||||
"iteration": 1595939001794,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 2,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [
|
||||
{
|
||||
"alias": "Unload tile",
|
||||
"transform": "negative-Y"
|
||||
}
|
||||
],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Load tile",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"map_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'LoadMapTile' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
},
|
||||
{
|
||||
"alias": "Unload tile",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"map_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'UnloadMapTile' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "B",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Map",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 7
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 1,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Pathfinding queries",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"null"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"query": "SELECT count(\"title\") FROM \"mmap_events\" WHERE \"realm\" =~ /$realm$/ AND \"title\" = 'CalculatePath' AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"rawQuery": true,
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": []
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "MMap",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": null,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 5,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 14
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 4,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": false,
|
||||
"hideZero": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Map $tag_map_id Instance $tag_map_instanceid",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_id"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_instanceid"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"none"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "map_creatures",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/^$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Creatures",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": null,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 5,
|
||||
"fillGradient": 0,
|
||||
"gridPos": {
|
||||
"h": 8,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 22
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 5,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"hideEmpty": false,
|
||||
"hideZero": true,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 1,
|
||||
"nullPointMode": "null",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 2,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Map $tag_map_id Instance $tag_map_instanceid",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$__interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_id"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"map_instanceid"
|
||||
],
|
||||
"type": "tag"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"none"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "map_gameobjects",
|
||||
"orderByTime": "ASC",
|
||||
"policy": "default",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/^$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Gameobjects",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "individual"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"label": null,
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"refresh": "1m",
|
||||
"schemaVersion": 25,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"allFormat": "regex values",
|
||||
"allValue": null,
|
||||
"current": {
|
||||
"text": "Acore",
|
||||
"value": "Acore"
|
||||
},
|
||||
"datasource": "Influx",
|
||||
"definition": "",
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": null,
|
||||
"multi": false,
|
||||
"multiFormat": "regex values",
|
||||
"name": "realm",
|
||||
"options": [],
|
||||
"query": "show tag values from events with key = realm",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"tagValuesQuery": "",
|
||||
"tags": [],
|
||||
"tagsQuery": "",
|
||||
"type": "query",
|
||||
"useTags": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-15m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"now": true,
|
||||
"refresh_intervals": [
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "browser",
|
||||
"title": "Maps, vmaps and mmaps",
|
||||
"uid": "6IhqWiWGz",
|
||||
"version": 2
|
||||
}
|
||||
280
apps/grafana/3_Network.json
Normal file
280
apps/grafana/3_Network.json
Normal file
@@ -0,0 +1,280 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": "-- Grafana --",
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"type": "dashboard"
|
||||
},
|
||||
{
|
||||
"datasource": "Influx",
|
||||
"enable": true,
|
||||
"iconColor": "#C0C6BE",
|
||||
"iconSize": 13,
|
||||
"lineColor": "rgba(255, 96, 96, 0.592157)",
|
||||
"name": "Global Events",
|
||||
"query": "select title, text from events where $timeFilter and realm =~ /$realm$/",
|
||||
"showLine": true,
|
||||
"textColumn": "text",
|
||||
"titleColumn": "title"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"gnetId": null,
|
||||
"graphTooltip": 0,
|
||||
"id": 7,
|
||||
"iteration": 1595939048589,
|
||||
"links": [],
|
||||
"panels": [
|
||||
{
|
||||
"aliasColors": {},
|
||||
"bars": false,
|
||||
"dashLength": 10,
|
||||
"dashes": false,
|
||||
"datasource": "Influx",
|
||||
"editable": true,
|
||||
"error": false,
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"custom": {}
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"fill": 1,
|
||||
"fillGradient": 0,
|
||||
"grid": {},
|
||||
"gridPos": {
|
||||
"h": 7,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"hiddenSeries": false,
|
||||
"id": 1,
|
||||
"isNew": true,
|
||||
"legend": {
|
||||
"avg": false,
|
||||
"current": false,
|
||||
"max": false,
|
||||
"min": false,
|
||||
"show": true,
|
||||
"total": false,
|
||||
"values": false
|
||||
},
|
||||
"lines": true,
|
||||
"linewidth": 2,
|
||||
"links": [],
|
||||
"nullPointMode": "connected",
|
||||
"options": {
|
||||
"dataLinks": []
|
||||
},
|
||||
"percentage": false,
|
||||
"pointradius": 5,
|
||||
"points": false,
|
||||
"renderer": "flot",
|
||||
"seriesOverrides": [],
|
||||
"spaceLength": 10,
|
||||
"stack": false,
|
||||
"steppedLine": false,
|
||||
"targets": [
|
||||
{
|
||||
"alias": "Processed packets",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "processed_packets",
|
||||
"query": "SELECT sum(\"value\") FROM \"processed_packets\" WHERE \"realm\" =~ /$realm$/ AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"refId": "A",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "sum"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/$realm$/"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"alias": "Processed packets / mean per session",
|
||||
"dsType": "influxdb",
|
||||
"groupBy": [
|
||||
{
|
||||
"params": [
|
||||
"$interval"
|
||||
],
|
||||
"type": "time"
|
||||
},
|
||||
{
|
||||
"params": [
|
||||
"0"
|
||||
],
|
||||
"type": "fill"
|
||||
}
|
||||
],
|
||||
"measurement": "processed_packets",
|
||||
"query": "SELECT mean(\"value\") FROM \"processed_packets\" WHERE \"realm\" =~ /$realm$/ AND $timeFilter GROUP BY time($interval) fill(0)",
|
||||
"refId": "B",
|
||||
"resultFormat": "time_series",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"value"
|
||||
],
|
||||
"type": "field"
|
||||
},
|
||||
{
|
||||
"params": [],
|
||||
"type": "mean"
|
||||
}
|
||||
]
|
||||
],
|
||||
"tags": [
|
||||
{
|
||||
"key": "realm",
|
||||
"operator": "=~",
|
||||
"value": "/$realm$/"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"thresholds": [],
|
||||
"timeFrom": null,
|
||||
"timeRegions": [],
|
||||
"timeShift": null,
|
||||
"title": "Processed packets",
|
||||
"tooltip": {
|
||||
"shared": true,
|
||||
"sort": 0,
|
||||
"value_type": "cumulative"
|
||||
},
|
||||
"type": "graph",
|
||||
"xaxis": {
|
||||
"buckets": null,
|
||||
"mode": "time",
|
||||
"name": null,
|
||||
"show": true,
|
||||
"values": []
|
||||
},
|
||||
"yaxes": [
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
},
|
||||
{
|
||||
"format": "short",
|
||||
"logBase": 1,
|
||||
"max": null,
|
||||
"min": null,
|
||||
"show": true
|
||||
}
|
||||
],
|
||||
"yaxis": {
|
||||
"align": false,
|
||||
"alignLevel": null
|
||||
}
|
||||
}
|
||||
],
|
||||
"refresh": "1m",
|
||||
"schemaVersion": 25,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"allFormat": "regex values",
|
||||
"allValue": null,
|
||||
"current": {
|
||||
"text": "Acore",
|
||||
"value": "Acore"
|
||||
},
|
||||
"datasource": "Influx",
|
||||
"definition": "",
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": null,
|
||||
"multi": false,
|
||||
"multiFormat": "regex values",
|
||||
"name": "realm",
|
||||
"options": [],
|
||||
"query": "show tag values from events with key = realm",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"tagValuesQuery": "",
|
||||
"tags": [],
|
||||
"tagsQuery": "",
|
||||
"type": "query",
|
||||
"useTags": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-15m",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"now": true,
|
||||
"refresh_intervals": [
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
]
|
||||
},
|
||||
"timezone": "browser",
|
||||
"title": "Network",
|
||||
"uid": "_QtkMmWMk",
|
||||
"version": 2
|
||||
}
|
||||
1677
apps/grafana/4_Performance_profiling.json
Normal file
1677
apps/grafana/4_Performance_profiling.json
Normal file
File diff suppressed because it is too large
Load Diff
253
apps/installer/includes/functions.sh
Normal file
253
apps/installer/includes/functions.sh
Normal file
@@ -0,0 +1,253 @@
|
||||
function inst_configureOS() {
|
||||
echo "Platform: $OSTYPE"
|
||||
case "$OSTYPE" in
|
||||
solaris*) echo "Solaris is not supported yet" ;;
|
||||
darwin*) source "$AC_PATH_INSTALLER/includes/os_configs/osx.sh" ;;
|
||||
linux*)
|
||||
# If $OSDISTRO is set, use this value (from config.sh)
|
||||
if [ ! -z "$OSDISTRO" ]; then
|
||||
DISTRO=$OSDISTRO
|
||||
# If available, use LSB to identify distribution
|
||||
elif command -v lsb_release >/dev/null 2>&1 ; then
|
||||
DISTRO=$(lsb_release -is)
|
||||
# Otherwise, use release info file
|
||||
else
|
||||
DISTRO=$(ls -d /etc/[A-Za-z]*[_-][rv]e[lr]* | grep -v "lsb" | cut -d'/' -f3 | cut -d'-' -f1 | cut -d'_' -f1)
|
||||
fi
|
||||
|
||||
case $DISTRO in
|
||||
# add here distro that are debian or ubuntu based
|
||||
# TODO: find a better way, maybe checking the existance
|
||||
# of a package manager
|
||||
"neon" | "ubuntu" | "Ubuntu")
|
||||
DISTRO="ubuntu"
|
||||
;;
|
||||
"debian" | "Debian")
|
||||
DISTRO="debian"
|
||||
;;
|
||||
*)
|
||||
echo "Distro: $DISTRO, is not supported. If your distribution is based on debian or ubuntu,
|
||||
please set the 'OSDISTRO' environment variable to one of these distro (you can use config.sh file)"
|
||||
;;
|
||||
esac
|
||||
|
||||
|
||||
DISTRO=${DISTRO,,}
|
||||
|
||||
echo "Distro: $DISTRO"
|
||||
|
||||
# TODO: implement different configurations by distro
|
||||
source "$AC_PATH_INSTALLER/includes/os_configs/$DISTRO.sh"
|
||||
;;
|
||||
bsd*) echo "BSD is not supported yet" ;;
|
||||
msys*) source "$AC_PATH_INSTALLER/includes/os_configs/windows.sh" ;;
|
||||
*) echo "This platform is not supported" ;;
|
||||
esac
|
||||
}
|
||||
|
||||
function inst_updateRepo() {
|
||||
cd "$AC_PATH_ROOT"
|
||||
if [ ! -z $INSTALLER_PULL_FROM ]; then
|
||||
git pull "$ORIGIN_REMOTE" "$INSTALLER_PULL_FROM"
|
||||
else
|
||||
git pull "$ORIGIN_REMOTE" $(git rev-parse --abbrev-ref HEAD)
|
||||
fi
|
||||
}
|
||||
|
||||
function inst_resetRepo() {
|
||||
cd "$AC_PATH_ROOT"
|
||||
git reset --hard $(git rev-parse --abbrev-ref HEAD)
|
||||
git clean -f
|
||||
}
|
||||
|
||||
function inst_compile() {
|
||||
comp_configure
|
||||
comp_build
|
||||
}
|
||||
|
||||
function inst_cleanCompile() {
|
||||
comp_clean
|
||||
inst_compile
|
||||
}
|
||||
|
||||
function inst_allInOne() {
|
||||
inst_configureOS
|
||||
inst_compile
|
||||
dbasm_import true true true
|
||||
}
|
||||
|
||||
function inst_getVersionBranch() {
|
||||
local res="master"
|
||||
local v="not-defined"
|
||||
local MODULE_MAJOR=0
|
||||
local MODULE_MINOR=0
|
||||
local MODULE_PATCH=0
|
||||
local MODULE_SPECIAL=0;
|
||||
local ACV_MAJOR=0
|
||||
local ACV_MINOR=0
|
||||
local ACV_PATCH=0
|
||||
local ACV_SPECIAL=0;
|
||||
local curldata=$(curl -f --silent -H 'Cache-Control: no-cache' "$1" || echo "{}")
|
||||
local parsed=$(echo "$curldata" | "$AC_PATH_DEPS/jsonpath/JSONPath.sh" -b '$.compatibility.*.[version,branch]')
|
||||
|
||||
semverParseInto "$ACORE_VERSION" ACV_MAJOR ACV_MINOR ACV_PATCH ACV_SPECIAL
|
||||
|
||||
if [[ ! -z "$parsed" ]]; then
|
||||
readarray -t vers < <(echo "$parsed")
|
||||
local idx
|
||||
res="none"
|
||||
# since we've the pair version,branch alternated in not associative and one-dimensional
|
||||
# array, we've to simulate the association with length/2 trick
|
||||
for idx in `seq 0 $((${#vers[*]}/2-1))`; do
|
||||
semverParseInto "${vers[idx*2]}" MODULE_MAJOR MODULE_MINOR MODULE_PATCH MODULE_SPECIAL
|
||||
if [[ $MODULE_MAJOR -eq $ACV_MAJOR && $MODULE_MINOR -le $ACV_MINOR ]]; then
|
||||
res="${vers[idx*2+1]}"
|
||||
v="${vers[idx*2]}"
|
||||
fi
|
||||
done
|
||||
fi
|
||||
|
||||
echo "$v" "$res"
|
||||
}
|
||||
|
||||
function inst_module_search {
|
||||
|
||||
local res="$1"
|
||||
local idx=0;
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Type what to search or leave blank for full list"
|
||||
read -p "Insert name: " res
|
||||
fi
|
||||
|
||||
local search="+$res"
|
||||
|
||||
echo "Searching $res..."
|
||||
echo "";
|
||||
|
||||
readarray -t MODS < <(curl --silent "https://api.github.com/search/repositories?q=org%3Aazerothcore${search}+fork%3Atrue+topic%3Acore-module+sort%3Astars&type=" \
|
||||
| "$AC_PATH_DEPS/jsonpath/JSONPath.sh" -b '$.items.*.name')
|
||||
while (( ${#MODS[@]} > idx )); do
|
||||
mod="${MODS[idx++]}"
|
||||
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$mod/master/acore-module.json")
|
||||
|
||||
if [[ "$b" != "none" ]]; then
|
||||
echo "-> $mod (tested with AC version: $v)"
|
||||
else
|
||||
echo "-> $mod (no revision available for AC v$AC_VERSION, it could not work!)"
|
||||
fi
|
||||
done
|
||||
|
||||
echo "";
|
||||
echo "";
|
||||
}
|
||||
|
||||
function inst_module_install {
|
||||
local res
|
||||
if [ -z "$1" ]; then
|
||||
echo "Type the name of the module to install"
|
||||
read -p "Insert name: " res
|
||||
else
|
||||
res="$1"
|
||||
fi
|
||||
|
||||
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$res/master/acore-module.json")
|
||||
|
||||
if [[ "$b" != "none" ]]; then
|
||||
Joiner:add_repo "https://github.com/azerothcore/$res" "$res" "$b" && echo "Done, please re-run compiling and db assembly. Read instruction on module repository for more information"
|
||||
else
|
||||
echo "Cannot install $res module: it doesn't exists or no version compatible with AC v$ACORE_VERSION are available"
|
||||
fi
|
||||
|
||||
echo "";
|
||||
echo "";
|
||||
}
|
||||
|
||||
function inst_module_update {
|
||||
local res;
|
||||
local _tmp;
|
||||
local branch;
|
||||
local p;
|
||||
|
||||
if [ -z "$1" ]; then
|
||||
echo "Type the name of the module to update"
|
||||
read -p "Insert name: " res
|
||||
else
|
||||
res="$1"
|
||||
fi
|
||||
|
||||
_tmp=$PWD
|
||||
|
||||
if [ -d "$J_PATH_MODULES/$res/" ]; then
|
||||
read v b < <(inst_getVersionBranch "https://raw.githubusercontent.com/azerothcore/$res/master/acore-module.json")
|
||||
|
||||
cd "$J_PATH_MODULES/$res/"
|
||||
|
||||
# use current branch if something wrong with json
|
||||
if [[ "$v" == "none" || "$v" == "not-defined" ]]; then
|
||||
b=`git rev-parse --abbrev-ref HEAD`
|
||||
fi
|
||||
|
||||
Joiner:upd_repo "https://github.com/azerothcore/$res" "$res" "$b" && echo "Done, please re-run compiling and db assembly" || echo "Cannot update"
|
||||
cd $_tmp
|
||||
else
|
||||
echo "Cannot update! Path doesn't exist"
|
||||
fi;
|
||||
|
||||
echo "";
|
||||
echo "";
|
||||
}
|
||||
|
||||
function inst_module_remove {
|
||||
if [ -z "$1" ]; then
|
||||
echo "Type the name of the module to remove"
|
||||
read -p "Insert name: " res
|
||||
else
|
||||
res="$1"
|
||||
fi
|
||||
|
||||
Joiner:remove "$res" && echo "Done, please re-run compiling" || echo "Cannot remove"
|
||||
|
||||
echo "";
|
||||
echo "";
|
||||
}
|
||||
|
||||
|
||||
function inst_simple_restarter {
|
||||
echo "Running $1 ..."
|
||||
bash "$AC_PATH_APPS/startup-scripts/simple-restarter" "$AC_BINPATH_FULL" "$1"
|
||||
echo
|
||||
#disown -a
|
||||
#jobs -l
|
||||
}
|
||||
|
||||
function inst_download_client_data {
|
||||
# change the following version when needed
|
||||
local VERSION=v16
|
||||
|
||||
echo "#######################"
|
||||
echo "Client data downloader"
|
||||
echo "#######################"
|
||||
|
||||
# first check if it's defined in env, otherwise use the default
|
||||
local path="${DATAPATH:-$AC_BINPATH_FULL}"
|
||||
local zipPath="${DATAPATH_ZIP:-"$path/data.zip"}"
|
||||
|
||||
dataVersionFile="$path/data-version"
|
||||
|
||||
[ -f "$dataVersionFile" ] && source "$dataVersionFile"
|
||||
|
||||
# create the path if doesn't exists
|
||||
mkdir -p "$path"
|
||||
|
||||
if [ "$VERSION" == "$INSTALLED_VERSION" ]; then
|
||||
echo "Data $VERSION already installed. If you want to force the download remove the following file: $dataVersionFile"
|
||||
return
|
||||
fi
|
||||
|
||||
echo "Downloading client data in: $zipPath ..."
|
||||
curl -L https://github.com/wowgaming/client-data/releases/download/$VERSION/data.zip > "$zipPath" \
|
||||
&& echo "unzip downloaded file in $path..." && unzip -q -o "$zipPath" -d "$path/" \
|
||||
&& echo "Remove downloaded file" && rm "$zipPath" \
|
||||
&& echo "INSTALLED_VERSION=$VERSION" > "$dataVersionFile"
|
||||
}
|
||||
22
apps/installer/includes/includes.sh
Normal file
22
apps/installer/includes/includes.sh
Normal file
@@ -0,0 +1,22 @@
|
||||
[[ ${INSTALLER_GUARDYVAR:-} -eq 1 ]] && return || readonly INSTALLER_GUARDYVAR=1 # include it once
|
||||
|
||||
CURRENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd )
|
||||
|
||||
source "$CURRENT_PATH/../../bash_shared/includes.sh"
|
||||
|
||||
AC_PATH_INSTALLER="$AC_PATH_APPS/installer"
|
||||
|
||||
J_PATH="$AC_PATH_DEPS/acore/joiner"
|
||||
J_PATH_MODULES="$AC_PATH_MODULES"
|
||||
|
||||
source "$J_PATH/joiner.sh"
|
||||
|
||||
if [ -f "$AC_PATH_INSTALLER/config.sh" ]; then
|
||||
source "$AC_PATH_INSTALLER/config.sh" # should overwrite previous
|
||||
fi
|
||||
|
||||
source "$AC_PATH_APPS/compiler/includes/includes.sh"
|
||||
|
||||
source "$AC_PATH_DEPS/semver_bash/semver.sh"
|
||||
|
||||
source "$AC_PATH_INSTALLER/includes/functions.sh"
|
||||
18
apps/installer/includes/os_configs/debian.sh
Normal file
18
apps/installer/includes/os_configs/debian.sh
Normal file
@@ -0,0 +1,18 @@
|
||||
if ! command -v lsb_release &>/dev/null ; then
|
||||
sudo apt-get install -y lsb-release
|
||||
fi
|
||||
|
||||
DEBIAN_VERSION=$(lsb_release -sr)
|
||||
|
||||
sudo apt-get update -y
|
||||
|
||||
sudo apt-get install -y gdbserver gdb unzip curl \
|
||||
libncurses-dev libreadline-dev clang g++ \
|
||||
gcc git cmake make ccache
|
||||
|
||||
if [[ $DEBIAN_VERSION -eq "10" ]]; then
|
||||
sudo apt-get install -y default-libmysqlclient-dev libssl-dev libreadline-dev libncurses-dev mariadb-server
|
||||
libboost-system1.6*-dev libboost-filesystem1.6*-dev libboost-program-options1.6*-dev libboost-iostreams1.6*-dev \
|
||||
else # Debian 8 and 9 should work using this
|
||||
sudo apt-get install -y libmysqlclient-dev libssl1.0-dev mysql-server
|
||||
fi
|
||||
29
apps/installer/includes/os_configs/osx.sh
Normal file
29
apps/installer/includes/os_configs/osx.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
##########################################
|
||||
## workaround for python upgrade issue https://github.com/actions/runner-images/issues/6817
|
||||
rm /usr/local/bin/2to3 || true
|
||||
rm /usr/local/bin/2to3-3.10 || true
|
||||
rm /usr/local/bin/2to3-3.11 || true
|
||||
rm /usr/local/bin/idle3 || true
|
||||
rm /usr/local/bin/idle3.10 || true
|
||||
rm /usr/local/bin/idle3.11 || true
|
||||
rm /usr/local/bin/pydoc3 || true
|
||||
rm /usr/local/bin/pydoc3.10 || true
|
||||
rm /usr/local/bin/pydoc3.11 || true
|
||||
rm /usr/local/bin/python3 || true
|
||||
rm /usr/local/bin/python3.10 || true
|
||||
rm /usr/local/bin/python3.11 || true
|
||||
rm /usr/local/bin/python3-config || true
|
||||
rm /usr/local/bin/python3.10-config || true
|
||||
rm /usr/local/bin/python3.11-config || true
|
||||
##########################################
|
||||
|
||||
brew update
|
||||
|
||||
##########################################
|
||||
## workaround for cmake already being installed in the github runners
|
||||
if ! command -v cmake &>/dev/null ; then
|
||||
brew install cmake
|
||||
fi
|
||||
##########################################
|
||||
|
||||
brew install openssl@3 readline boost@1.82 bash-completion curl unzip mysql@8.1 ccache
|
||||
29
apps/installer/includes/os_configs/ubuntu.sh
Normal file
29
apps/installer/includes/os_configs/ubuntu.sh
Normal file
@@ -0,0 +1,29 @@
|
||||
if ! command -v lsb_release &>/dev/null ; then
|
||||
sudo apt-get install -y lsb-release
|
||||
fi
|
||||
|
||||
UBUNTU_VERSION=$(lsb_release -sr);
|
||||
|
||||
sudo apt update
|
||||
|
||||
# shared deps
|
||||
sudo apt-get -y install ccache clang cmake curl google-perftools libmysqlclient-dev make unzip
|
||||
|
||||
if [[ $CONTINUOUS_INTEGRATION || $DOCKER ]]; then
|
||||
sudo add-apt-repository -y ppa:mhier/libboost-latest && sudo apt update && sudo apt-get -y install build-essential cmake-data \
|
||||
libboost1.74-dev libbz2-dev libncurses5-dev libmysql++-dev libgoogle-perftools-dev libreadline6-dev libssl-dev libtool \
|
||||
openssl zlib1g-dev
|
||||
else
|
||||
case $UBUNTU_VERSION in
|
||||
"20.04")
|
||||
sudo apt-get install -y g++ gdb gdbserver gcc git \
|
||||
libboost-all-dev libbz2-dev libncurses-dev libreadline-dev \
|
||||
libssl-dev mysql-server
|
||||
;;
|
||||
*)
|
||||
sudo add-apt-repository -y ppa:mhier/libboost-latest && sudo apt update && sudo apt-get install -y g++ gdb gdbserver gcc git \
|
||||
libboost-all-dev libbz2-dev libncurses-dev libreadline-dev \
|
||||
libssl-dev mysql-server
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
30
apps/installer/includes/os_configs/windows.sh
Normal file
30
apps/installer/includes/os_configs/windows.sh
Normal file
@@ -0,0 +1,30 @@
|
||||
# install chocolatey before
|
||||
|
||||
@"%SystemRoot%\System32\WindowsPowerShell\v1.0\powershell.exe" -NoProfile -InputFormat None -ExecutionPolicy Bypass -Command "iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))" && SET "PATH=%PATH%;%ALLUSERSPROFILE%\chocolatey\bin"
|
||||
|
||||
# install automatically following packages:
|
||||
# cmake
|
||||
# git
|
||||
# microsoft-build-tools
|
||||
# mysql
|
||||
|
||||
INSTALL_ARGS=""
|
||||
|
||||
if [[ $CONTINUOUS_INTEGRATION ]]; then
|
||||
INSTALL_ARGS=" --no-progress "
|
||||
else
|
||||
{ # try
|
||||
choco uninstall -y -n cmake.install cmake # needed to make sure that following install set the env properly
|
||||
} || { # catch
|
||||
echo "nothing to do"
|
||||
}
|
||||
|
||||
choco install -y --skip-checksums $INSTALL_ARGS git visualstudio2022community
|
||||
fi
|
||||
|
||||
choco install -y --skip-checksums $INSTALL_ARGS cmake.install -y --installargs 'ADD_CMAKE_TO_PATH=System'
|
||||
choco install -y --skip-checksums $INSTALL_ARGS visualstudio2022-workload-nativedesktop
|
||||
choco install -y --skip-checksums $INSTALL_ARGS openssl --version=3.1.1
|
||||
choco install -y --skip-checksums $INSTALL_ARGS boost-msvc-14.3 --version=1.82.0
|
||||
choco install -y --skip-checksums $INSTALL_ARGS mysql --version=8.0.31
|
||||
|
||||
99
apps/installer/main.sh
Normal file
99
apps/installer/main.sh
Normal file
@@ -0,0 +1,99 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
CURRENT_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
source "$CURRENT_PATH/includes/includes.sh"
|
||||
|
||||
PS3='[Please enter your choice]: '
|
||||
options=(
|
||||
"init (i): First Installation" # 1
|
||||
"install-deps (d): Configure OS dep" # 2
|
||||
"pull (u): Update Repository" # 3
|
||||
"reset (r): Reset & Clean Repository" # 4
|
||||
"compiler (c): Run compiler tool" # 5
|
||||
"module-search (ms): Module Search by keyword" # 6
|
||||
"module-install (mi): Module Install by name" # 7
|
||||
"module-update (mu): Module Update by name" # 8
|
||||
"module-remove: (mr): Module Remove by name" # 9
|
||||
"client-data: (gd): download client data from github repository (beta)" # 10
|
||||
"run-worldserver (rw): execute a simple restarter for worldserver" # 11
|
||||
"run-authserver (ra): execute a simple restarter for authserver" # 12
|
||||
"docker (dr): Run docker tools" # 13
|
||||
"quit: Exit from this menu" # 14
|
||||
)
|
||||
|
||||
function _switch() {
|
||||
_reply="$1"
|
||||
_opt="$2"
|
||||
|
||||
case $_reply in
|
||||
""|"i"|"init"|"1")
|
||||
inst_allInOne
|
||||
;;
|
||||
""|"d"|"install-deps"|"2")
|
||||
inst_configureOS
|
||||
;;
|
||||
""|"u"|"pull"|"3")
|
||||
inst_updateRepo
|
||||
;;
|
||||
""|"r"|"reset"|"4")
|
||||
inst_resetRepo
|
||||
;;
|
||||
""|"c"|"compiler"|"5")
|
||||
bash "$AC_PATH_APPS/compiler/compiler.sh" $_opt
|
||||
;;
|
||||
""|"ms"|"module-search"|"6")
|
||||
inst_module_search "$_opt"
|
||||
;;
|
||||
""|"mi"|"module-install"|"7")
|
||||
inst_module_install "$_opt"
|
||||
;;
|
||||
""|"mu"|"module-update"|"8")
|
||||
inst_module_update "$_opt"
|
||||
;;
|
||||
""|"mr"|"module-remove"|"9")
|
||||
inst_module_remove "$_opt"
|
||||
;;
|
||||
""|"gd"|"client-data"|"10")
|
||||
inst_download_client_data
|
||||
;;
|
||||
""|"rw"|"run-worldserver"|"11")
|
||||
inst_simple_restarter worldserver
|
||||
;;
|
||||
""|"ra"|"run-authserver"|"12")
|
||||
inst_simple_restarter authserver
|
||||
;;
|
||||
""|"dr"|"docker"|"13")
|
||||
DOCKER=1 bash "$AC_PATH_ROOT/apps/docker/docker-cmd.sh" "${@:2}"
|
||||
exit
|
||||
;;
|
||||
""|"v"|"version"|"14")
|
||||
# denoRunFile "$AC_PATH_APPS/installer/main.ts" "version"
|
||||
printf "AzerothCore Rev. %s\n" "$ACORE_VERSION"
|
||||
exit
|
||||
;;
|
||||
""|"quit"|"15")
|
||||
echo "Goodbye!"
|
||||
exit
|
||||
;;
|
||||
""|"--help")
|
||||
echo "Available commands:"
|
||||
printf '%s\n' "${options[@]}"
|
||||
;;
|
||||
*) echo "invalid option, use --help option for the commands list";;
|
||||
esac
|
||||
}
|
||||
|
||||
while true
|
||||
do
|
||||
# run option directly if specified in argument
|
||||
[ ! -z $1 ] && _switch $@ # old method: "${options[$cmdopt-1]}"
|
||||
[ ! -z $1 ] && exit 0
|
||||
|
||||
echo "==== ACORE DASHBOARD ===="
|
||||
select opt in "${options[@]}"
|
||||
do
|
||||
_switch $REPLY
|
||||
break
|
||||
done
|
||||
done
|
||||
50
apps/startup-scripts/conf.sh.dist
Normal file
50
apps/startup-scripts/conf.sh.dist
Normal file
@@ -0,0 +1,50 @@
|
||||
# enable/disable GDB execution
|
||||
export GDB_ENABLED=0
|
||||
|
||||
# [optional] gdb file
|
||||
# default: gdb.conf
|
||||
export GDB=""
|
||||
|
||||
# directory where binary are stored
|
||||
export BINPATH=""
|
||||
|
||||
# Put here the pid you configured on your worldserver.conf file
|
||||
# needed when GDB_ENABLED=1
|
||||
export SERVERPID=""
|
||||
|
||||
# path to configuration file (including the file name)
|
||||
# ex: /home/user/azerothcore/etc/worldserver.conf
|
||||
export CONFIG=""
|
||||
|
||||
# path of log files
|
||||
# needed by restarter to store its logs
|
||||
export LOGS_PATH="";
|
||||
|
||||
# exec name
|
||||
# ex: worldserver
|
||||
export SERVERBIN=""
|
||||
|
||||
# prefix name for log files
|
||||
# to avoid collision with other restarters
|
||||
export LOG_PREFIX_NAME=""
|
||||
|
||||
# [optional] name of screen service
|
||||
# if no specified, screen util won't be used
|
||||
export SCREEN_NAME=""
|
||||
|
||||
# [optional] overwrite default screen options: -A -m -d -S
|
||||
# WARNING: if you are running it under a systemd service
|
||||
# please do not remove -m -d arguments from screen if are you using it,
|
||||
# or keep WITH_CONSOLE=0 .
|
||||
# otherwise the journald-logging system will take 100% of CPU slowing
|
||||
# down the whole machine. It's because a systemd service should have
|
||||
# low console output.
|
||||
export SCREEN_OPTIONS=""
|
||||
|
||||
# enable/disable it to show the output
|
||||
# within console, if disable the output will be redirect to
|
||||
# logging files
|
||||
#
|
||||
export WITH_CONSOLE=0
|
||||
|
||||
|
||||
14
apps/startup-scripts/examples/restarter-auth.sh
Normal file
14
apps/startup-scripts/examples/restarter-auth.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
PATH_RUNENGINE="./"
|
||||
|
||||
source "$PATH_RUNENGINE/run-engine"
|
||||
|
||||
# you must create your conf
|
||||
# copying conf.sh.dist
|
||||
# and renaming as below
|
||||
source "./conf-auth.sh"
|
||||
|
||||
restarter
|
||||
|
||||
|
||||
14
apps/startup-scripts/examples/restarter-world.sh
Normal file
14
apps/startup-scripts/examples/restarter-world.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
PATH_RUNENGINE="./"
|
||||
|
||||
source "$PATH_RUNENGINE/run-engine"
|
||||
|
||||
# you must create your conf
|
||||
# copying conf.sh.dist
|
||||
# and renaming as below
|
||||
source "./conf-world.sh"
|
||||
|
||||
restarter
|
||||
|
||||
|
||||
13
apps/startup-scripts/examples/starter-auth.sh
Normal file
13
apps/startup-scripts/examples/starter-auth.sh
Normal file
@@ -0,0 +1,13 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
PATH_RUNENGINE="./"
|
||||
|
||||
source "$PATH_RUNENGINE/run-engine"
|
||||
|
||||
# you must create your conf
|
||||
# copying conf.sh.dist
|
||||
# and renaming as below
|
||||
source "./conf-auth.sh"
|
||||
|
||||
starter
|
||||
|
||||
14
apps/startup-scripts/examples/starter-world.sh
Normal file
14
apps/startup-scripts/examples/starter-world.sh
Normal file
@@ -0,0 +1,14 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
PATH_RUNENGINE="./"
|
||||
|
||||
source "$PATH_RUNENGINE/run-engine"
|
||||
|
||||
# you must create your conf
|
||||
# copying conf.sh.dist
|
||||
# and renaming as below
|
||||
source "./conf-world.sh"
|
||||
|
||||
starter
|
||||
|
||||
|
||||
7
apps/startup-scripts/gdb.conf
Normal file
7
apps/startup-scripts/gdb.conf
Normal file
@@ -0,0 +1,7 @@
|
||||
set logging on
|
||||
set debug timestamp
|
||||
run
|
||||
bt
|
||||
bt full
|
||||
info thread
|
||||
thread apply all backtrace full
|
||||
115
apps/startup-scripts/run-engine
Normal file
115
apps/startup-scripts/run-engine
Normal file
@@ -0,0 +1,115 @@
|
||||
export RUN_ENGINE_PATH="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
|
||||
# load default conf
|
||||
if [ -e "$RUN_ENGINE_PATH/conf.dist" ]; then
|
||||
source "$RUN_ENGINE_PATH/conf.sh.dist"
|
||||
fi
|
||||
|
||||
function finish {
|
||||
if [ ! -z "$SCREEN_NAME" ]; then
|
||||
screen -X -S "$SCREEN_NAME" quit
|
||||
fi
|
||||
}
|
||||
|
||||
# disabled for now, but could be useful if we want
|
||||
# shutdown the process if restarter crashes for some reason
|
||||
# trap finish EXIT
|
||||
|
||||
function configureFiles() {
|
||||
TRACE_BEGIN_STRING="SIGSEGV"
|
||||
TRACE_FILE="$LOGS_PATH/"$LOG_PREFIX_NAME"_trace.log"
|
||||
ERR_FILE="$LOGS_PATH/"$LOG_PREFIX_NAME"_error.log"
|
||||
SYSLOG="$LOGS_PATH/"$LOG_PREFIX_NAME"_system.log"
|
||||
SYSERR="$LOGS_PATH/"$LOG_PREFIX_NAME"_system.err"
|
||||
LINKS_FILE="$LOGS_PATH/"$LOG_PREFIX_NAME"_crash_links.link"
|
||||
}
|
||||
|
||||
function checkStatus() {
|
||||
local ret=1
|
||||
# wipe do : destroy old screens + ls
|
||||
#screen -wipe
|
||||
#if screen -ls $1 | grep -q "No Sockets found"
|
||||
#then
|
||||
# return 0
|
||||
#fi
|
||||
|
||||
local gdbres=$(pgrep -f "gdb -x $GDB --batch $SERVERBIN")
|
||||
if [[ $GDB_ENABLED -eq 1 && ! -z $gdbres ]]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
#
|
||||
# This is a specific check for Azeroth Core in case of screen failure
|
||||
# It is possible since same binary file cannot be launched with same configuration file
|
||||
# This is an extra check
|
||||
#
|
||||
local binres=$(pgrep -f "$SERVERBIN -c $CONFIG")
|
||||
if [ ! -z $binres ]; then
|
||||
return 1
|
||||
fi
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function run() {
|
||||
echo $1
|
||||
if [ ! -z $1 ]; then
|
||||
local OPTIONS="-A -m -d -S"
|
||||
if [ ! -z "$SCREEN_OPTIONS" ]; then
|
||||
OPTIONS=$SCREEN_OPTIONS
|
||||
fi
|
||||
|
||||
echo "> Starting with screen ( screen $OPTIONS )"
|
||||
|
||||
screen $OPTIONS $1 "$RUN_ENGINE_PATH/starter" $2 $3 "$4" "$5" "$6" $7 "$BINPATH/crashes"
|
||||
else
|
||||
$RUN_ENGINE_PATH/starter $2 $3 "$4" "$5" "$6" $7 "$BINPATH/crashes"
|
||||
fi
|
||||
}
|
||||
|
||||
function starter() {
|
||||
cd $BINPATH
|
||||
|
||||
mkdir -p "$LOGS_PATH"
|
||||
mkdir -p "$BINPATH"/crashes
|
||||
|
||||
configureFiles
|
||||
|
||||
run "$SCREEN_NAME" "$SERVERBIN" "$GDB" "$CONFIG" "$SYSLOG" "$SYSERR" "$GDB_ENABLED"
|
||||
}
|
||||
|
||||
|
||||
function restarter() {
|
||||
cd $BINPATH
|
||||
|
||||
mkdir -p "$LOGS_PATH"
|
||||
mkdir -p "$BINPATH"/crashes
|
||||
|
||||
configureFiles
|
||||
|
||||
if [ ! -f $TRACE_FILE ]; then
|
||||
touch $TRACE_FILE
|
||||
fi
|
||||
|
||||
while :
|
||||
do
|
||||
if checkStatus $SCREEN_NAME; then
|
||||
DATE=$(date)
|
||||
echo "Restarting $SCREEN_NAME Core blizz($DATE)"
|
||||
if [ $GDB_ENABLED -eq 1 ]; then
|
||||
echo "GDB enabled"
|
||||
grep -B 10 -A 1800 "$TRACE_BEGIN_STRING" "$SYSLOG" >> "$TRACE_FILE"
|
||||
cat "$SYSERR" > "$ERR_FILE"
|
||||
run "$SCREEN_NAME" "$SERVERBIN" "$GDB" "$CONFIG" "$SYSLOG" "$SYSERR" 1
|
||||
fi
|
||||
|
||||
if [ $GDB_ENABLED -eq 0 ]; then
|
||||
echo "GDB disabled"
|
||||
run "$SCREEN_NAME" "$SERVERBIN" null "$CONFIG" null null 0
|
||||
fi
|
||||
fi
|
||||
|
||||
sleep 10
|
||||
done
|
||||
}
|
||||
|
||||
70
apps/startup-scripts/simple-restarter
Normal file
70
apps/startup-scripts/simple-restarter
Normal file
@@ -0,0 +1,70 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
#PARAMETER 1: directory
|
||||
#PARAMETER 2: binary file
|
||||
#PARAMETER 3: gdb on/off
|
||||
|
||||
bin_path="${1:-$AC_RESTARTER_BINPATH}"
|
||||
bin_file="${2:-$AC_RESTARTER_BINFILE}"
|
||||
with_gdb="${3:-$AC_RESTARTER_WITHGDB}"
|
||||
|
||||
CURRENT_PATH=$( cd "$(dirname "${BASH_SOURCE[0]}")" ; pwd )
|
||||
|
||||
_instant_crash_count=0
|
||||
_restart_count=0
|
||||
|
||||
if [ "$#" -ne 2 ]; then
|
||||
echo "Usage: $0 path filename"
|
||||
echo "Example: $0 $HOME/azerothcore/bin worldserver"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
while true
|
||||
do
|
||||
if [ ! -f "$bin_path/$bin_file" ]; then
|
||||
echo "$bin_path/$bin_file doesn't exists!"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
STARTING_TIME=$(date +%s)
|
||||
|
||||
cd "$bin_path";
|
||||
|
||||
if [ "$with_gdb" = true ]; then
|
||||
echo "Running with GDB enabled"
|
||||
gdb -x "$CURRENT_PATH/gdb.conf" --batch "./$bin_file"
|
||||
else
|
||||
echo "Running without GDB"
|
||||
"./$bin_file"
|
||||
fi
|
||||
|
||||
_exit_code=$?
|
||||
|
||||
echo "exit code: $_exit_code"
|
||||
# stop restarter on SIGKILL (disabled for now)
|
||||
# 128 + 9 (SIGKILL)
|
||||
#if [ $_exit_code -eq 137 ]; then
|
||||
# echo "$bin_file has been killed"
|
||||
# exit 0
|
||||
#fi
|
||||
|
||||
echo "$bin_file terminated, restarting..."
|
||||
|
||||
ENDING_TIME=$(date +%s)
|
||||
DIFFERENCE=$(( $ENDING_TIME - $STARTING_TIME ))
|
||||
|
||||
((_restart_count++))
|
||||
echo "$bin_file Terminated after $DIFFERENCE seconds, termination count: : $_restart_count"
|
||||
|
||||
if [ $DIFFERENCE -lt 10 ]; then
|
||||
# increment instant crash if runtime is lower than 10 seconds
|
||||
((_instant_crash_count++))
|
||||
else
|
||||
_instant_crash_count=0 # reset count
|
||||
fi
|
||||
|
||||
if [ $_instant_crash_count -gt 5 ]; then
|
||||
echo "$bin_file Restarter exited. Infinite crash loop prevented. Please check your system"
|
||||
exit 1
|
||||
fi
|
||||
done
|
||||
32
apps/startup-scripts/starter
Normal file
32
apps/startup-scripts/starter
Normal file
@@ -0,0 +1,32 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
GDB_FILE="$2"
|
||||
CONFIG="$3"
|
||||
SYSLOG="$4"
|
||||
SYSERR="$5"
|
||||
GDB_ENABLED="$6"
|
||||
CRASHES_PATH="$7"
|
||||
|
||||
if [ $GDB_ENABLED -eq 1 ]; then
|
||||
echo "set logging file "$CRASHES_PATH"/gdb-$(date +%Y-%m-%d-%H-%M-%S).txt" > "$GDB_FILE"
|
||||
echo "set logging on" >> "$GDB_FILE"
|
||||
echo "set debug timestamp" >> "$GDB_FILE"
|
||||
echo "run -c $3" >> "$GDB_FILE"
|
||||
echo "bt" >> "$GDB_FILE"
|
||||
echo "bt full" >> "$GDB_FILE"
|
||||
echo "info thread" >> "$GDB_FILE"
|
||||
echo "thread apply all backtrace full" >> "$GDB_FILE"
|
||||
|
||||
[ ! -f "$SYSLOG" ] && touch "$SYSLOG"
|
||||
[ ! -f "$SYSERR" ] && touch "$SYSERR"
|
||||
|
||||
if [ $WITH_CONSOLE -eq 0 ]; then
|
||||
gdb -x $GDB_FILE --batch $1 >> "$SYSLOG" 2>> "$SYSERR"
|
||||
else
|
||||
echo "> Console enabled"
|
||||
gdb -x $GDB_FILE --batch $1 > >(tee ${SYSLOG}) 2> >(tee ${SYSERR} >&2)
|
||||
fi
|
||||
|
||||
elif [ $GDB_ENABLED -eq 0 ]; then
|
||||
"./$1" -c "$CONFIG"
|
||||
fi
|
||||
116
apps/valgrind/helgrind.supp
Normal file
116
apps/valgrind/helgrind.supp
Normal file
@@ -0,0 +1,116 @@
|
||||
{
|
||||
[1] ACE_Future::ready() race in WorldSession::ProcessQueryCallbacks(), a lock is used anyway in ACE_Future::get()/set()
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN14ACE_Future_RepIN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEE3setERKS4_R10ACE_FutureIS4_E
|
||||
fun:_ZN10ACE_FutureIN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEE3setERKS4_
|
||||
fun:_ZN21PreparedStatementTask7ExecuteEv
|
||||
}
|
||||
{
|
||||
[1] ACE_Future::ready() race in WorldSession::ProcessQueryCallbacks(), a lock is used anyway in ACE_Future::get()/set()
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN14ACE_Future_RepIP14SQLQueryHolderE3setERKS1_R10ACE_FutureIS1_E
|
||||
fun:_ZN10ACE_FutureIP14SQLQueryHolderE3setERKS1_
|
||||
fun:_ZN18SQLQueryHolderTask7ExecuteEv
|
||||
}
|
||||
{
|
||||
[2] ACE_Future::ready() race in WorldSession::ProcessQueryCallbacks(), a lock is used anyway in ACE_Future::get()/set()
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK14ACE_Future_RepIP14SQLQueryHolderE5readyEv
|
||||
fun:_ZNK10ACE_FutureIP14SQLQueryHolderE5readyEv
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[3] ACE_Future::attach()/detach() false positive in WorldSession::HandleCharEnumOpcode()
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN14ACE_Future_RepIN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEE6attachERPS5_
|
||||
}
|
||||
{
|
||||
[4] ACE_Future::get() race in WorldSession::ProcessQueryCallbacks() , a lock is used anyway in ACE_Future::get()/set(), the only case when this is a race is if the same ACE_Future is reused by another thread
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK14ACE_Future_RepIP14SQLQueryHolderE3getERS1_P14ACE_Time_Value
|
||||
fun:_ZNK10ACE_FutureIP14SQLQueryHolderE3getERS1_P14ACE_Time_Value
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[4] ACE_Future::get() race in WorldSession::ProcessQueryCallbacks() , a lock is used anyway in ACE_Future::get()/set(), the only case when this is a race is if the same ACE_Future is reused by another thread
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK10ACE_FutureIN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEE5readyEv
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN6Player10LoadFromDBEjP14SQLQueryHolder
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN14SQLQueryHolder17GetPreparedResultEm
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN12WorldSession15LoadAccountDataEN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexEEj
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK17PreparedResultSet5FetchEv
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK5Field9GetUInt32Ev
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[5] Race in WorldSession::ProcessQueryCallbacks(), added ASSERT(!m_result.ready())
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZNK5Field8GetUInt8Ev
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[6] False positive of possible race about ACE_Strong_Bound_Ptr
|
||||
Helgrind:Race
|
||||
...
|
||||
fun:_ZN17PreparedResultSetD1Ev
|
||||
fun:_ZN20ACE_Strong_Bound_PtrI17PreparedResultSet16ACE_Thread_MutexED1Ev
|
||||
fun:_ZN7Trinity7AutoPtrI17PreparedResultSet16ACE_Thread_MutexED1Ev
|
||||
fun:_ZN12WorldSession17HandlePlayerLoginEP16LoginQueryHolder
|
||||
fun:_ZN12WorldSession21ProcessQueryCallbacksEv
|
||||
}
|
||||
{
|
||||
[7] Race condition on bool in ACE, ignore
|
||||
Helgrind:Race
|
||||
fun:_ZN11WorldSocket12handle_closeEim
|
||||
fun:_ZN20ACE_Dev_Poll_Reactor16remove_handler_iEimP17ACE_Event_Handler
|
||||
}
|
||||
{
|
||||
[7] Race condition on bool in ACE, ignore
|
||||
Helgrind:Race
|
||||
fun:_ZNK11WorldSocket8IsClosedEv
|
||||
fun:_ZN12WorldSession6UpdateEjR12PacketFilter
|
||||
fun:_ZN5World14UpdateSessionsEj
|
||||
}
|
||||
3
apps/whitespace_remover/whitespace_remover.sh
Normal file
3
apps/whitespace_remover/whitespace_remover.sh
Normal file
@@ -0,0 +1,3 @@
|
||||
# Change '*.cpp' and '*.h' to the extension you want to remove whitespaces in.
|
||||
find -name '*.cpp' -print0 | xargs -r0 sed -e 's/[[:blank:]]\+$//' -i
|
||||
find -name '*.h' -print0 | xargs -r0 sed -e 's/[[:blank:]]\+$//' -i
|
||||
Reference in New Issue
Block a user